From 9882be06f64497c6ca522a99683fa80887e0491b Mon Sep 17 00:00:00 2001 From: Jason Letourneau Date: Fri, 6 Sep 2013 17:34:55 -0400 Subject: [PATCH 001/179] adding update notification checking - this requires autopsy-updates.xml to be uploaded to sleuthkit.org --- Core/autopsy-updates.xml | 16 ++++++++++++++++ .../org/sleuthkit/autopsy/core/Bundle.properties | 2 ++ Core/src/org/sleuthkit/autopsy/core/layer.xml | 7 +++++++ ...uthkit_autopsy_core_update_centerSettings.xml | 13 +++++++++++++ 4 files changed, 38 insertions(+) create mode 100644 Core/autopsy-updates.xml create mode 100644 Core/src/org/sleuthkit/autopsy/core/org_sleuthkit_autopsy_core_update_centerSettings.xml diff --git a/Core/autopsy-updates.xml b/Core/autopsy-updates.xml new file mode 100644 index 0000000000..48a2898d70 --- /dev/null +++ b/Core/autopsy-updates.xml @@ -0,0 +1,16 @@ + + + + + + + + + + Visit http://sleuthkit.org/autopsy to download the latest version of Autopsy. + + + + + \ No newline at end of file diff --git a/Core/src/org/sleuthkit/autopsy/core/Bundle.properties b/Core/src/org/sleuthkit/autopsy/core/Bundle.properties index b049fd4d82..1fef6eb2f5 100644 --- a/Core/src/org/sleuthkit/autopsy/core/Bundle.properties +++ b/Core/src/org/sleuthkit/autopsy/core/Bundle.properties @@ -8,3 +8,5 @@ OpenIDE-Module-Long-Description=\ For more information, see http://www.sleuthkit.org/autopsy/ OpenIDE-Module-Name=Autopsy-Core OpenIDE-Module-Short-Description=Autopsy Core Module +org_sleuthkit_autopsy_core_update_center=http://sleuthkit.org/autopsy-updates.xml +Services/AutoupdateType/org_sleuthkit_autopsy_core_update_center.settings=Autopsy Update Center diff --git a/Core/src/org/sleuthkit/autopsy/core/layer.xml b/Core/src/org/sleuthkit/autopsy/core/layer.xml index f72ce99565..f2f926fc60 100644 --- a/Core/src/org/sleuthkit/autopsy/core/layer.xml +++ b/Core/src/org/sleuthkit/autopsy/core/layer.xml @@ -270,6 +270,13 @@ Services ====================================================== --> + + + + + + + diff --git a/Core/src/org/sleuthkit/autopsy/core/org_sleuthkit_autopsy_core_update_centerSettings.xml b/Core/src/org/sleuthkit/autopsy/core/org_sleuthkit_autopsy_core_update_centerSettings.xml new file mode 100644 index 0000000000..c1dbc1e41e --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/core/org_sleuthkit_autopsy_core_update_centerSettings.xml @@ -0,0 +1,13 @@ + + + + + + + + + + From df97fdf3bcda3803a04ab16393a03bab59b198a0 Mon Sep 17 00:00:00 2001 From: Jason Letourneau Date: Thu, 12 Sep 2013 10:00:56 -0400 Subject: [PATCH 002/179] adding simple event log views filters --- .../datamodel/AbstractContentChildren.java | 6 + .../autopsy/datamodel/AutopsyItemVisitor.java | 14 + .../datamodel/DisplayableItemNodeVisitor.java | 16 + .../autopsy/datamodel/EventLogs.java | 311 ++++++++++++++++++ .../autopsy/datamodel/ViewsNode.java | 1 + 5 files changed, 348 insertions(+) create mode 100644 Core/src/org/sleuthkit/autopsy/datamodel/EventLogs.java diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/AbstractContentChildren.java b/Core/src/org/sleuthkit/autopsy/datamodel/AbstractContentChildren.java index 76ad367ace..1ec27eec0f 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/AbstractContentChildren.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/AbstractContentChildren.java @@ -135,6 +135,12 @@ abstract class AbstractContentChildren extends Keys { public AbstractNode visit(DeletedContent dc) { return new DeletedContent.DeletedContentsNode(dc.getSleuthkitCase()); } + + @Override + public AbstractNode visit(EventLogs evt) { + return new EventLogs.EventLogsNode(evt.getSleuthkitCase()); + } + @Override public AbstractNode visit(FileSize dc) { diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/AutopsyItemVisitor.java b/Core/src/org/sleuthkit/autopsy/datamodel/AutopsyItemVisitor.java index 431c9adf27..64ea97057f 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/AutopsyItemVisitor.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/AutopsyItemVisitor.java @@ -39,6 +39,10 @@ public interface AutopsyItemVisitor { T visit(RecentFiles.RecentFilesFilter rff); T visit(DeletedContent dc); + + T visit(EventLogs.EventLogFilter evt); + + T visit(EventLogs evt); T visit(DeletedContent.DeletedContentFilter dcf); @@ -93,6 +97,16 @@ public interface AutopsyItemVisitor { public T visit(DeletedContent dc) { return defaultVisit(dc); } + + @Override + public T visit(EventLogs.EventLogFilter evt) { + return defaultVisit(evt); + } + + @Override + public T visit(EventLogs evt) { + return defaultVisit(evt); + } @Override public T visit(DeletedContent.DeletedContentFilter dcf) { diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/DisplayableItemNodeVisitor.java b/Core/src/org/sleuthkit/autopsy/datamodel/DisplayableItemNodeVisitor.java index 9072a35689..6e32215937 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/DisplayableItemNodeVisitor.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/DisplayableItemNodeVisitor.java @@ -20,6 +20,8 @@ package org.sleuthkit.autopsy.datamodel; import org.sleuthkit.autopsy.datamodel.DeletedContent.DeletedContentsChildren.DeletedContentNode; import org.sleuthkit.autopsy.datamodel.DeletedContent.DeletedContentsNode; +import org.sleuthkit.autopsy.datamodel.EventLogs.EventLogsChildren.EventLogNode; +import org.sleuthkit.autopsy.datamodel.EventLogs.EventLogsNode; import org.sleuthkit.autopsy.datamodel.EmailExtracted.EmailExtractedAccountNode; import org.sleuthkit.autopsy.datamodel.EmailExtracted.EmailExtractedFolderNode; import org.sleuthkit.autopsy.datamodel.EmailExtracted.EmailExtractedRootNode; @@ -58,6 +60,10 @@ public interface DisplayableItemNodeVisitor { T visit(DeletedContentNode dcn); T visit(DeletedContentsNode dcn); + + T visit(EventLogsNode evt); + + T visit(EventLogNode evt); T visit(FileSizeRootNode fsrn); @@ -163,6 +169,16 @@ public interface DisplayableItemNodeVisitor { public T visit(DeletedContentNode dcn) { return defaultVisit(dcn); } + + @Override + public T visit(EventLogsNode evt) { + return defaultVisit(evt); + } + + @Override + public T visit(EventLogNode evt) { + return defaultVisit(evt); + } @Override public T visit(DeletedContentsNode dcn) { diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/EventLogs.java b/Core/src/org/sleuthkit/autopsy/datamodel/EventLogs.java new file mode 100644 index 0000000000..00e1786000 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/datamodel/EventLogs.java @@ -0,0 +1,311 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2013 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.datamodel; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.logging.Level; +import org.openide.nodes.AbstractNode; +import org.openide.nodes.ChildFactory; +import org.openide.nodes.Children; +import org.openide.nodes.Node; +import org.openide.nodes.Sheet; +import org.openide.util.lookup.Lookups; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.ContentVisitor; +import org.sleuthkit.datamodel.Directory; +import org.sleuthkit.datamodel.File; +import org.sleuthkit.datamodel.FsContent; +import org.sleuthkit.datamodel.LayoutFile; +import org.sleuthkit.datamodel.SleuthkitCase; +import org.sleuthkit.datamodel.TskCoreException; + + +/** + * event logs view nodes + */ +public class EventLogs implements AutopsyVisitableItem { + + private SleuthkitCase skCase; + + public enum EventLogFilter implements AutopsyVisitableItem { + + FS_EVENT_LOG_FILTER(0, "FS_EVENT_LOG_FILTER", "Windows"), + ALL_EVENT_LOG_FILTER(1, "ALL_EVENT_LOG_FILTER", "Other"); + private int id; + private String name; + private String displayName; + + private EventLogFilter(int id, String name, String displayName) { + this.id = id; + this.name = name; + this.displayName = displayName; + + } + + public String getName() { + return this.name; + } + + public int getId() { + return this.id; + } + + public String getDisplayName() { + return this.displayName; + } + + @Override + public T accept(AutopsyItemVisitor v) { + return v.visit(this); + } + } + + public EventLogs(SleuthkitCase skCase) { + this.skCase = skCase; + } + + @Override + public T accept(AutopsyItemVisitor v) { + return v.visit(this); + } + + public SleuthkitCase getSleuthkitCase() { + return this.skCase; + } + + public static class EventLogsNode extends DisplayableItemNode { + + private static final String NAME = "Event Logs"; + private SleuthkitCase skCase; + + EventLogsNode(SleuthkitCase skCase) { + super(Children.create(new EventLogsChildren(skCase), true), Lookups.singleton(NAME)); + super.setName(NAME); + super.setDisplayName(NAME); + this.skCase = skCase; + this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/text-file.png"); + } + + @Override + public TYPE getDisplayableItemNodeType() { + return TYPE.META; + } + + @Override + public T accept(DisplayableItemNodeVisitor v) { + return v.visit(this); + } + + @Override + protected Sheet createSheet() { + Sheet s = super.createSheet(); + Sheet.Set ss = s.get(Sheet.PROPERTIES); + if (ss == null) { + ss = Sheet.createPropertiesSet(); + s.put(ss); + } + + ss.put(new NodeProperty("Name", + "Name", + "no description", + NAME)); + return s; + } + } + + public static class EventLogsChildren extends ChildFactory { + + private SleuthkitCase skCase; + + public EventLogsChildren(SleuthkitCase skCase) { + this.skCase = skCase; + + } + + @Override + protected boolean createKeys(List list) { + list.addAll(Arrays.asList(EventLogs.EventLogFilter.values())); + return true; + } + + @Override + protected Node createNodeForKey(EventLogs.EventLogFilter key) { + return new EventLogNode(skCase, key); + } + + public class EventLogNode extends DisplayableItemNode { + + private SleuthkitCase skCase; + private EventLogs.EventLogFilter filter; + private final Logger logger = Logger.getLogger(EventLogNode.class.getName()); + + EventLogNode(SleuthkitCase skCase, EventLogs.EventLogFilter filter) { + super(Children.create(new EventLogChildren(filter, skCase), true), Lookups.singleton(filter.getDisplayName())); + super.setName(filter.getName()); + this.skCase = skCase; + this.filter = filter; + + String tooltip = filter.getDisplayName(); + this.setShortDescription(tooltip); + this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/text-file.png.png"); + + //get count of children without preloading all children nodes + final long count = new EventLogChildren(filter, skCase).calculateItems(); + //final long count = getChildren().getNodesCount(true); + super.setDisplayName(filter.getDisplayName() + " (" + count + ")"); + } + + @Override + public T accept(DisplayableItemNodeVisitor v) { + return v.visit(this); + } + + @Override + protected Sheet createSheet() { + Sheet s = super.createSheet(); + Sheet.Set ss = s.get(Sheet.PROPERTIES); + if (ss == null) { + ss = Sheet.createPropertiesSet(); + s.put(ss); + } + + ss.put(new NodeProperty("Filter Type", + "Filter Type", + "no description", + filter.getDisplayName())); + + return s; + } + + @Override + public TYPE getDisplayableItemNodeType() { + return TYPE.META; + } + + @Override + public boolean isLeafTypeNode() { + return true; + } + } + + class EventLogChildren extends ChildFactory { + + private SleuthkitCase skCase; + private EventLogs.EventLogFilter filter; + private final Logger logger = Logger.getLogger(EventLogsChildren.class.getName()); + + EventLogChildren(EventLogs.EventLogFilter filter, SleuthkitCase skCase) { + this.skCase = skCase; + this.filter = filter; + } + + @Override + protected boolean createKeys(List list) { + list.addAll(runFsQuery()); + return true; + } + + private String makeQuery() { + String query = ""; + switch (filter) { + case FS_EVENT_LOG_FILTER: + query = "name like '%.evt'"; + + + break; + case ALL_EVENT_LOG_FILTER: + query = query = "name like '%.log'"; + + break; + + default: + logger.log(Level.SEVERE, "Unsupported filter type to get log content: " + filter); + + } + + return query; + } + + private List runFsQuery() { + List ret = new ArrayList(); + + String query = makeQuery(); + try { + ret = skCase.findAllFilesWhere(query); + } catch (TskCoreException e) { + logger.log(Level.SEVERE, "Error getting files for the event log content view using: " + query, e); + } + + return ret; + + } + + /** + * Get children count without actually loading all nodes + * + * @return + */ + long calculateItems() { + try { + return skCase.countFilesWhere(makeQuery()); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, "Error getting event log files search view count", ex); + return 0; + } + } + + @Override + protected Node createNodeForKey(AbstractFile key) { + return key.accept(new ContentVisitor.Default() { + public FileNode visit(AbstractFile f) { + return new FileNode(f, false); + } + + public FileNode visit(FsContent f) { + return new FileNode(f, false); + } + + @Override + public FileNode visit(LayoutFile f) { + return new FileNode(f, false); + } + + @Override + public FileNode visit(File f) { + return new FileNode(f, false); + } + + @Override + public FileNode visit(Directory f) { + return new FileNode(f, false); + } + + @Override + protected AbstractNode defaultVisit(Content di) { + throw new UnsupportedOperationException("Not supported for this type of Displayable Item: " + di.toString()); + } + }); + } + } + } +} \ No newline at end of file diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/ViewsNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/ViewsNode.java index eea33be974..55c6447765 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/ViewsNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/ViewsNode.java @@ -37,6 +37,7 @@ public class ViewsNode extends DisplayableItemNode { new SearchFilters(sleuthkitCase), new RecentFiles(sleuthkitCase), new DeletedContent(sleuthkitCase), + new EventLogs(sleuthkitCase), new FileSize(sleuthkitCase) )), Lookups.singleton(NAME)); From 9eb8c7e5ca7378c071af8e8ff3bda998ea692098 Mon Sep 17 00:00:00 2001 From: Jason Letourneau Date: Thu, 12 Sep 2013 10:09:29 -0400 Subject: [PATCH 003/179] fixed redundant assignment --- Core/src/org/sleuthkit/autopsy/datamodel/EventLogs.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/EventLogs.java b/Core/src/org/sleuthkit/autopsy/datamodel/EventLogs.java index 00e1786000..b7e770ceda 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/EventLogs.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/EventLogs.java @@ -234,7 +234,7 @@ public class EventLogs implements AutopsyVisitableItem { break; case ALL_EVENT_LOG_FILTER: - query = query = "name like '%.log'"; + query = "name like '%.log'"; break; From e3940da25be2b35f4ec59e8bc2caf399c5bf9bc1 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Thu, 19 Sep 2013 15:27:13 -0400 Subject: [PATCH 004/179] Added TagsManager class skeleton integrated with Services --- .../autopsy/casemodule/services/Services.java | 11 ++++- .../casemodule/services/TagsManager.java | 40 +++++++++++++++++++ 2 files changed, 49 insertions(+), 2 deletions(-) create mode 100755 Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/services/Services.java b/Core/src/org/sleuthkit/autopsy/casemodule/services/Services.java index 8718406d5c..3cb8764f6f 100644 --- a/Core/src/org/sleuthkit/autopsy/casemodule/services/Services.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/services/Services.java @@ -2,7 +2,7 @@ * * Autopsy Forensic Browser * - * Copyright 2012 Basis Technology Corp. + * Copyright 2012-2013 Basis Technology Corp. * * Copyright 2012 42six Solutions. * Contact: aebadirad 42six com @@ -37,21 +37,28 @@ public class Services implements Closeable { // NOTE: all new services added to Services class must be added to this list // of services. - private List services = new ArrayList(); + private List services = new ArrayList<>(); // services private FileManager fileManager; + private TagsManager tagsManager; public Services(SleuthkitCase tskCase) { this.tskCase = tskCase; //create and initialize FileManager as early as possibly in the new/opened Case fileManager = new FileManager(tskCase); services.add(fileManager); + tagsManager = new TagsManager(tskCase); + services.add(tagsManager); } public FileManager getFileManager() { return fileManager; } + + public TagsManager getTagsManager() { + return tagsManager; + } @Override public void close() throws IOException { diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java b/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java new file mode 100755 index 0000000000..07151aa809 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java @@ -0,0 +1,40 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2013 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.casemodule.services; + +import java.io.Closeable; +import java.io.IOException; +import org.sleuthkit.datamodel.SleuthkitCase; + +/** + * A instance of this class functions as an Autopsy service that manages the + * creation, updating, and deletion of tags applied to files and artifacts by + * users. + */ +public class TagsManager implements Closeable { + private final SleuthkitCase database; + + TagsManager(SleuthkitCase database) { + this.database = database; + } + + @Override + public void close() throws IOException { + } +} From a40481f5f862d294a72fc83acc90bc0452457261 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Wed, 25 Sep 2013 16:44:56 -0400 Subject: [PATCH 005/179] Filled out new tags API for data, DTOs --- .../autopsy/casemodule/services/Services.java | 1 + .../casemodule/services/TagsManager.java | 221 +++++++++++++++++- .../directorytree/TagAbstractFileAction.java | 34 ++- .../TagBlackboardArtifactAction.java | 34 ++- 4 files changed, 273 insertions(+), 17 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/services/Services.java b/Core/src/org/sleuthkit/autopsy/casemodule/services/Services.java index 3cb8764f6f..069b13ef2e 100644 --- a/Core/src/org/sleuthkit/autopsy/casemodule/services/Services.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/services/Services.java @@ -48,6 +48,7 @@ public class Services implements Closeable { //create and initialize FileManager as early as possibly in the new/opened Case fileManager = new FileManager(tskCase); services.add(fileManager); + tagsManager = new TagsManager(tskCase); services.add(tagsManager); } diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java b/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java index 07151aa809..ef9d3f6934 100755 --- a/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java @@ -20,21 +20,228 @@ package org.sleuthkit.autopsy.casemodule.services; import java.io.Closeable; import java.io.IOException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.sleuthkit.autopsy.coreutils.ModuleSettings; +import org.sleuthkit.datamodel.BlackboardArtifact; +import org.sleuthkit.datamodel.BlackboardArtifactTag; +import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.ContentTag; import org.sleuthkit.datamodel.SleuthkitCase; +import org.sleuthkit.datamodel.TagType; +import org.sleuthkit.datamodel.TskCoreException; /** - * A instance of this class functions as an Autopsy service that manages the - * creation, updating, and deletion of tags applied to files and artifacts by - * users. + * A singleton instance of this class functions as an Autopsy service that + * manages the creation, updating, and deletion of tags applied to Content and + * BlackboardArtifacts objects by users. */ public class TagsManager implements Closeable { - private final SleuthkitCase database; + private static final String TAGS_SETTINGS_FILE_NAME = "tags"; + private static final String TAG_TYPES_SETTING_KEY = "tagTypes"; + private final SleuthkitCase tskCase; + private final HashMap tagTypes = new HashMap<>(); + + TagsManager(SleuthkitCase tskCase) { + this.tskCase = tskCase; + loadTagTypesFromTagSettings(); + } + + private void loadTagTypesFromTagSettings() { + // Get any tag types already added to the current case. + try { + List currentTagTypes = tskCase.getTagTypes(); + for (TagType tagType : currentTagTypes) { + tagTypes.put(tagType.getDisplayName(), tagType); + } + } + catch (TskCoreException ex) { + Logger.getLogger(TagsManager.class.getName()).log(Level.SEVERE, "Failed to get tag types from the current case", ex); + } + + // Read the saved tag types, if any, from the tags settings file and + // add them to the current case if they haven't already been added, e.g, + // when the case was last opened. + String setting = ModuleSettings.getConfigSetting(TAGS_SETTINGS_FILE_NAME, TAG_TYPES_SETTING_KEY); + if (null != setting && !setting.isEmpty()) { + // Read the tag types setting and break in into tag type tuples. + List tagTypeTuples = Arrays.asList(setting.split(";")); - TagsManager(SleuthkitCase database) { - this.database = database; + // Parse each tuple and add the tag types to the current case, one + // at a time to gracefully discard any duplicates or corrupt tuples. + for (String tagTypeTuple : tagTypeTuples) { + String[] tagTypeAttributes = tagTypeTuple.split(","); + if (!tagTypes.containsKey(tagTypeAttributes[0])) { + TagType tagType = new TagType(tagTypeAttributes[0], tagTypeAttributes[1], TagType.HTML_COLOR.getColorByName(tagTypeAttributes[2])); + try { + tskCase.addTagType(tagType); + tagTypes.put(tagType.getDisplayName(),tagType); + } + catch(TskCoreException ex) { + Logger.getLogger(TagsManager.class.getName()).log(Level.WARNING, "Failed to add saved " + tagType.getDisplayName() + " tag type to the current case", ex); + } + } + } + + saveTagTypesToTagsSettings(); + } + } + + private void saveTagTypesToTagsSettings() { + if (!tagTypes.isEmpty()) { + StringBuilder setting = new StringBuilder(); + for (TagType tagType : tagTypes.values()) { + if (setting.length() != 0) { + setting.append(";"); + } + setting.append(tagType.getDisplayName()).append(","); + setting.append(tagType.getDescription()).append(","); + setting.append(tagType.getColor().name()); + } + + ModuleSettings.setConfigSetting(TAGS_SETTINGS_FILE_NAME, TAG_TYPES_SETTING_KEY, setting.toString()); + } + } + + /** + * Gets a list of all tag types currently available for tagging content or + * blackboard artifacts. + * @return A list, possibly empty, of TagType data transfer objects (DTOs). + * @throws TskCoreException + */ + public List getTagTypes() throws TskCoreException { + return tskCase.getTagTypes(); + } + + /** + * Adds a new tag type to the current case and to the tags settings file. + * @param displayName The display name for the new tag type. + * @return A TagType object representing the new type on success, null on failure. + * @throws TskCoreException + */ + public TagType addTagType(String displayName) throws TagTypeAlreadyExistsException, TskCoreException { + return addTagType(displayName, "", TagType.HTML_COLOR.NONE); + } + + /** + * Adds a new tag type to the current case and to the tags settings file. + * @param displayName The display name for the new tag type. + * @param description The description for the new tag type. + * @return A TagType object representing the new type on success, null on failure. + * @throws TskCoreException + */ + public TagType addTagType(String displayName, String description) throws TagTypeAlreadyExistsException, TskCoreException { + return addTagType(displayName, description, TagType.HTML_COLOR.NONE); + } + + /** + * Adds a new tag type to the current case and to the tags settings file. + * @param displayName The display name for the new tag type. + * @param description The description for the new tag type. + * @param color The HTML color to associate with the new tag type. + * @return A TagType object representing the new type. + * @throws TskCoreException + */ + public synchronized TagType addTagType(String displayName, String description, TagType.HTML_COLOR color) throws TagTypeAlreadyExistsException, TskCoreException { + if (tagTypes.containsKey(displayName)) { + throw new TagTypeAlreadyExistsException(); + } + + TagType newTagType = new TagType(displayName, description, color); + tskCase.addTagType(newTagType); + tagTypes.put(newTagType.getDisplayName(), newTagType); + saveTagTypesToTagsSettings(); + return newTagType; + } + + public class TagTypeAlreadyExistsException extends Exception { + } + + /** + * Tags a Content object. + * @param content The Content to tag. + * @param tagType The type of tag to add. + * @throws TskCoreException + */ + public void addContentTag(Content content, TagType tagType) throws TskCoreException { + addContentTag(content, tagType, "", 0, content.getSize()); + } + + /** + * Tags a Content object. + * @param content The Content to tag. + * @param tagType The type of tag to add. + * @param comment A comment to store with the tag. + * @throws TskCoreException + */ + public void addContentTag(Content content, TagType tagType, String comment) throws TskCoreException { + addContentTag(content, tagType, comment, 0, content.getSize() - 1); + } + + /** + * Tags a Content object or a portion of a content object. + * @param content The Content to tag. + * @param tagType The type of tag to add. + * @param comment A comment to store with the tag. + * @param beginByteOffset Designates the beginning of a tagged extent. + * @param endByteOffset Designates the end of a tagged extent. + * @throws TskCoreException + */ + public void addContentTag(Content content, TagType tagType, String comment, long beginByteOffset, long endByteOffset) throws IllegalArgumentException, TskCoreException { + if (beginByteOffset < 0) { + throw new IllegalArgumentException("Content extent incorrect: beginByteOffset < 0"); + } + + if (endByteOffset <= beginByteOffset) { + throw new IllegalArgumentException("Content extent incorrect: endByteOffset <= beginByteOffset"); + } + + if (endByteOffset > content.getSize() - 1) { + throw new IllegalArgumentException("Content extent incorrect: endByteOffset exceeds content size"); + } + + tskCase.addContentTag(new ContentTag(content, tagType, comment, beginByteOffset, endByteOffset)); + } + + /** + * Deletes a content tag. + * @param tag The tag to delete. + * @throws TskCoreException + */ + public void deleteContentTag(ContentTag tag) throws TskCoreException { + tskCase.deleteContentTag(tag); + } + + /** + * Tags a BlackboardArtifact object. + * @param artifact The BlackboardArtifact to tag. + * @param tagType The type of tag to add. + * @throws TskCoreException + */ + public void addBlackboardArtifactTag(BlackboardArtifact artifact, TagType tagType) throws TskCoreException { + addBlackboardArtifactTag(artifact, tagType, ""); + } + + /** + * Tags a BlackboardArtifact object. + * @param artifact The BlackboardArtifact to tag. + * @param tagType The type of tag to add. + * @param comment A comment to store with the tag. + * @throws TskCoreException + */ + public void addBlackboardArtifactTag(BlackboardArtifact artifact, TagType tagType, String comment) throws TskCoreException { + tskCase.addBlackboardArtifactTag(new BlackboardArtifactTag(artifact, tagType, comment)); + } + + void deleteBlackboardArtifactTag(BlackboardArtifactTag tag) throws TskCoreException { + tskCase.deleteBlackboardArtifactTag(tag); } @Override - public void close() throws IOException { + public void close() throws IOException { + saveTagTypesToTagsSettings(); } } diff --git a/Core/src/org/sleuthkit/autopsy/directorytree/TagAbstractFileAction.java b/Core/src/org/sleuthkit/autopsy/directorytree/TagAbstractFileAction.java index 657673ac74..b7d3c39e17 100755 --- a/Core/src/org/sleuthkit/autopsy/directorytree/TagAbstractFileAction.java +++ b/Core/src/org/sleuthkit/autopsy/directorytree/TagAbstractFileAction.java @@ -20,12 +20,19 @@ package org.sleuthkit.autopsy.directorytree; import java.awt.event.ActionEvent; import java.util.Collection; +import java.util.logging.Level; import javax.swing.AbstractAction; import javax.swing.JMenuItem; +import javax.swing.JOptionPane; import org.openide.util.Utilities; import org.openide.util.actions.Presenter; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.casemodule.services.TagsManager; +import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.datamodel.Tags; import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.TagType; +import org.sleuthkit.datamodel.TskCoreException; public class TagAbstractFileAction extends AbstractAction implements Presenter.Popup { // This class is a singleton to support multi-selection of nodes, since @@ -60,11 +67,28 @@ public class TagAbstractFileAction extends AbstractAction implements Presenter.P } @Override - protected void applyTag(String tagName, String comment) { - Collection selectedFiles = Utilities.actionsGlobalContext().lookupAll(AbstractFile.class); - for (AbstractFile file : selectedFiles) { - Tags.createTag(file, tagName, comment); - } + protected void applyTag(String tagDisplayName, String comment) { + try { + TagsManager tagsManager = Case.getCurrentCase().getServices().getTagsManager(); + TagType tagType = tagsManager.addTagType(tagDisplayName); + + Collection selectedFiles = Utilities.actionsGlobalContext().lookupAll(AbstractFile.class); + for (AbstractFile file : selectedFiles) { + Tags.createTag(file, tagDisplayName, comment); + try { + tagsManager.addContentTag(file, tagType); + } + catch (TskCoreException ex) { + Logger.getLogger(TagAbstractFileMenu.class.getName()).log(Level.SEVERE, "Error tagging content", ex); + } + } + } + catch (TagsManager.TagTypeAlreadyExistsException ex) { + JOptionPane.showMessageDialog(null, "A " + tagDisplayName + " tag type has already been defined.", "Duplicate Tag Type", JOptionPane.ERROR_MESSAGE); + } + catch (TskCoreException ex) { + Logger.getLogger(TagAbstractFileMenu.class.getName()).log(Level.SEVERE, "Error adding " + tagDisplayName + " tag type", ex); + } } } } diff --git a/Core/src/org/sleuthkit/autopsy/directorytree/TagBlackboardArtifactAction.java b/Core/src/org/sleuthkit/autopsy/directorytree/TagBlackboardArtifactAction.java index 3d1a9641b3..3f2c8fd407 100755 --- a/Core/src/org/sleuthkit/autopsy/directorytree/TagBlackboardArtifactAction.java +++ b/Core/src/org/sleuthkit/autopsy/directorytree/TagBlackboardArtifactAction.java @@ -20,12 +20,19 @@ package org.sleuthkit.autopsy.directorytree; import java.awt.event.ActionEvent; import java.util.Collection; +import java.util.logging.Level; import javax.swing.AbstractAction; import javax.swing.JMenuItem; +import javax.swing.JOptionPane; import org.openide.util.Utilities; import org.openide.util.actions.Presenter; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.casemodule.services.TagsManager; +import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.datamodel.Tags; import org.sleuthkit.datamodel.BlackboardArtifact; +import org.sleuthkit.datamodel.TagType; +import org.sleuthkit.datamodel.TskCoreException; public class TagBlackboardArtifactAction extends AbstractAction implements Presenter.Popup { // This class is a singleton to support multi-selection of nodes, since @@ -61,11 +68,28 @@ public class TagBlackboardArtifactAction extends AbstractAction implements Prese } @Override - protected void applyTag(String tagName, String comment) { - Collection selectedArtifacts = Utilities.actionsGlobalContext().lookupAll(BlackboardArtifact.class); - for (BlackboardArtifact artifact : selectedArtifacts) { - Tags.createTag(artifact, tagName, comment); - } + protected void applyTag(String tagDisplayName, String comment) { + try { + TagsManager tagsManager = Case.getCurrentCase().getServices().getTagsManager(); + TagType tagType = tagsManager.addTagType(tagDisplayName); + + Collection selectedArtifacts = Utilities.actionsGlobalContext().lookupAll(BlackboardArtifact.class); + for (BlackboardArtifact artifact : selectedArtifacts) { + Tags.createTag(artifact, tagDisplayName, comment); + try { + tagsManager.addBlackboardArtifactTag(artifact, tagType); + } + catch (TskCoreException ex) { + Logger.getLogger(TagBlackboardArtifactMenu.class.getName()).log(Level.SEVERE, "Error tagging result", ex); + } + } + } + catch (TagsManager.TagTypeAlreadyExistsException ex) { + JOptionPane.showMessageDialog(null, "A " + tagDisplayName + " tag type has already been defined.", "Duplicate Tag Type", JOptionPane.ERROR_MESSAGE); + } + catch (TskCoreException ex) { + Logger.getLogger(TagBlackboardArtifactMenu.class.getName()).log(Level.SEVERE, "Error adding " + tagDisplayName + " tag type", ex); + } } } } From 7cf2d5406cc0a9b52a3f3b5bf6533bbe8106fec1 Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Tue, 1 Oct 2013 13:07:22 -0400 Subject: [PATCH 006/179] Added infrastructure for EnCase style file reporting. --- .../autopsy/report/FILE_REPORT_INFO.java | 104 +++++++ .../autopsy/report/FileReportModule.java | 63 ++++ .../autopsy/report/FileReportText.java | 169 +++++++++++ .../report/ReportWizardFileOptionsPanel.java | 96 +++++++ .../ReportWizardFileOptionsVisualPanel.form | 110 +++++++ .../ReportWizardFileOptionsVisualPanel.java | 269 ++++++++++++++++++ 6 files changed, 811 insertions(+) create mode 100755 Core/src/org/sleuthkit/autopsy/report/FILE_REPORT_INFO.java create mode 100755 Core/src/org/sleuthkit/autopsy/report/FileReportModule.java create mode 100755 Core/src/org/sleuthkit/autopsy/report/FileReportText.java create mode 100755 Core/src/org/sleuthkit/autopsy/report/ReportWizardFileOptionsPanel.java create mode 100755 Core/src/org/sleuthkit/autopsy/report/ReportWizardFileOptionsVisualPanel.form create mode 100755 Core/src/org/sleuthkit/autopsy/report/ReportWizardFileOptionsVisualPanel.java diff --git a/Core/src/org/sleuthkit/autopsy/report/FILE_REPORT_INFO.java b/Core/src/org/sleuthkit/autopsy/report/FILE_REPORT_INFO.java new file mode 100755 index 0000000000..760ec79c5d --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/report/FILE_REPORT_INFO.java @@ -0,0 +1,104 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2013 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.report; + +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.TskCoreException; + +/** + * Represents Column Headers for FileList Reports. + * + * Encapsulates functionality for getting column values from Files. + * + * @author jwallace + */ +public enum FILE_REPORT_INFO { + + NAME("Name") { + @Override + public String getValue(AbstractFile file) { + return file.getName(); + } + }, + FILE_EXT("File Extension") { + @Override + public String getValue(AbstractFile file) { + String name = file.getName(); + int extIndex = name.lastIndexOf("."); + return (extIndex == -1 ? "" : name.substring(extIndex)); + } + }, + A_TIME("Last Accessed") { + @Override + public String getValue(AbstractFile file) { + return file.getAtimeAsDate(); + } + }, + CR_TIME("File Created") { + @Override + public String getValue(AbstractFile file) { + return file.getCrtimeAsDate(); + } + }, + M_TIME("Last Modified") { + @Override + public String getValue(AbstractFile file) { + return file.getMtimeAsDate(); + } + }, + SIZE("Size") { + @Override + public String getValue(AbstractFile file) { + return String.valueOf(file.getSize()); + } + }, + HASH_VALUE("Hash Value") { + @Override + public String getValue(AbstractFile file) { + return file.getMd5Hash(); + } + }, + FULL_PATH("Full Path") { + @Override + public String getValue(AbstractFile file) { + try { + return file.getUniquePath(); + } catch (TskCoreException ex) { + return ""; + } + } + }; + + private String name; + + FILE_REPORT_INFO(String name) { + this.name = name; + } + + public String getName() { + return this.name; + } + + /** + * Get the value of the column from the file. + * + * @return + */ + public abstract String getValue(AbstractFile file); +} diff --git a/Core/src/org/sleuthkit/autopsy/report/FileReportModule.java b/Core/src/org/sleuthkit/autopsy/report/FileReportModule.java new file mode 100755 index 0000000000..93494e5bf6 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/report/FileReportModule.java @@ -0,0 +1,63 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2013 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.report; + +import java.util.List; +import org.sleuthkit.datamodel.AbstractFile; + +/** + * A Report Module that reports information on files in a case. + * + * @author jwallace + */ +public interface FileReportModule extends ReportModule { + + public void generateReport(String reportPath, ReportProgressPanel progress, List enabled); + + /** + * Initialize the report which will be stored at the given path. + * @param path + */ + public void startReport(String path); + + /** + * End the report. + * Will be called after the entire report has been written. + */ + public void endReport(); + + /** + * Start the file list table. + * @param headers The columns that should be included in the table. + */ + public void startTable(List headers); + + /** + * Add the given AbstractFile as a row in the table. + * Guaranteed to be called between startTable and endTable. + * @param toAdd the AbstractFile to be added. + * @param columns the columns that should be included + */ + public void addRow(AbstractFile toAdd, List columns); + + /** + * Close the table. + */ + public void endTable(); +} diff --git a/Core/src/org/sleuthkit/autopsy/report/FileReportText.java b/Core/src/org/sleuthkit/autopsy/report/FileReportText.java new file mode 100755 index 0000000000..eef652b27d --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/report/FileReportText.java @@ -0,0 +1,169 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2013 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.report; + +import java.io.BufferedWriter; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.io.Writer; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.openide.util.Exceptions; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.SleuthkitCase; +import org.sleuthkit.datamodel.TskCoreException; + +/** + * A Tab-delimited text report of the files in the case. + * + * @author jwallace + */ +public class FileReportText implements FileReportModule { + private static final Logger logger = Logger.getLogger(FileReportText.class.getName()); + private String reportPath; + private Writer out; + + private static FileReportText instance; + + // Get the default implementation of this report + public static synchronized FileReportText getDefault() { + if (instance == null) { + instance = new FileReportText(); + } + return instance; + } + + @Override + public void startReport(String path) { + this.reportPath = path + "report.txt"; + try { + out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(this.reportPath))); + } catch (IOException ex) { + logger.log(Level.WARNING, "Failed to create report text file", ex); + } + } + + @Override + public void endReport() { + if (out != null) { + try { + out.close(); + } catch (IOException ex) { + logger.log(Level.WARNING, "Could not close output writer when ending report.", ex); + } + } + } + + private String getTabDelimitedList(List list) { + StringBuilder output = new StringBuilder(); + Iterator it = list.iterator(); + while(it.hasNext()) { + output.append(it.next()).append((it.hasNext() ? "\t" : System.lineSeparator())); + } + return output.toString(); + } + + @Override + public void startTable(List headers) { + List titles = new ArrayList<>(); + for(FILE_REPORT_INFO col : headers) { + titles.add(col.getName()); + } + try { + out.write(getTabDelimitedList(titles)); + } catch (IOException ex) { + logger.log(Level.WARNING, "Error when writing headers to report file: {0}", ex); + } + } + + @Override + public void addRow(AbstractFile toAdd, List columns) { + List cells = new ArrayList<>(); + for(FILE_REPORT_INFO type : columns) { + cells.add(type.getValue(toAdd)); + } + try { + out.write(getTabDelimitedList(cells)); + } catch (IOException ex) { + logger.log(Level.WARNING, "Error when writing row to report file: {0}", ex); + } + } + + @Override + public void endTable() { + try { + out.write(System.lineSeparator()); + } catch (IOException ex) { + logger.log(Level.WARNING, "Error when closing table: {0}", ex); + } + } + + @Override + public String getName() { + return "File Report"; + } + + @Override + public String getDescription() { + return "A tab delimited text file containing information about files in the case."; + } + + @Override + public String getExtension() { + return ".txt"; + } + + @Override + public String getFilePath() { + return "report.txt"; + } + + @Override + public void generateReport(String reportPath, ReportProgressPanel progress, List enabled) { + progress.setIndeterminate(false); + progress.start(); + progress.updateStatusLabel("querying database"); + List absFiles; + try { + SleuthkitCase skCase = Case.getCurrentCase().getSleuthkitCase(); + absFiles = skCase.findAllFilesWhere("NOT meta_type = 2"); + } catch (TskCoreException ex) { + Exceptions.printStackTrace(ex); + return; + } + progress.setMaximumProgress(absFiles.size()); + startReport(reportPath); + startTable(enabled); + for (AbstractFile file: absFiles) { + progress.increment(); + progress.updateStatusLabel("Now processing " + file.getName()); + addRow(file, enabled); + } + endTable(); + endReport(); + progress.complete(); + } + +} diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportWizardFileOptionsPanel.java b/Core/src/org/sleuthkit/autopsy/report/ReportWizardFileOptionsPanel.java new file mode 100755 index 0000000000..2e09b0d5bd --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/report/ReportWizardFileOptionsPanel.java @@ -0,0 +1,96 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2013 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.report; + +import java.awt.event.ActionEvent; +import java.awt.event.ActionListener; +import javax.swing.JButton; +import javax.swing.event.ChangeListener; +import org.openide.WizardDescriptor; +import org.openide.util.HelpCtx; + +/** + * Wizard panel that allows configuration of File Report options. + * + * @author jwallace + */ +public class ReportWizardFileOptionsPanel implements WizardDescriptor.FinishablePanel{ + private WizardDescriptor wiz; + private ReportWizardFileOptionsVisualPanel component; + private JButton finishButton; + + ReportWizardFileOptionsPanel() { + finishButton = new JButton("Finish"); + finishButton.setEnabled(false); + + finishButton.addActionListener(new ActionListener() { + @Override + public void actionPerformed(ActionEvent e) { + wiz.doFinishClick(); + }; + }); + } + + public void setFinish(boolean enable) { + finishButton.setEnabled(enable); + } + + @Override + public boolean isFinishPanel() { + return true; + } + + @Override + public ReportWizardFileOptionsVisualPanel getComponent() { + if (component == null) { + component = new ReportWizardFileOptionsVisualPanel(this); + } + return component; + } + + @Override + public HelpCtx getHelp() { + return HelpCtx.DEFAULT_HELP; + } + + @Override + public void readSettings(WizardDescriptor data) { + this.wiz = data; + wiz.setOptions(new Object[] {WizardDescriptor.PREVIOUS_OPTION, WizardDescriptor.NEXT_OPTION, finishButton, WizardDescriptor.CANCEL_OPTION}); + } + + @Override + public void storeSettings(WizardDescriptor data) { + data.putProperty("fileReportOptions", getComponent().getFileReportOptions()); + } + + @Override + public boolean isValid() { + return true; + } + + @Override + public void addChangeListener(ChangeListener cl) { + } + + @Override + public void removeChangeListener(ChangeListener cl) { + } + +} diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportWizardFileOptionsVisualPanel.form b/Core/src/org/sleuthkit/autopsy/report/ReportWizardFileOptionsVisualPanel.form new file mode 100755 index 0000000000..dedff72c78 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/report/ReportWizardFileOptionsVisualPanel.form @@ -0,0 +1,110 @@ + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportWizardFileOptionsVisualPanel.java b/Core/src/org/sleuthkit/autopsy/report/ReportWizardFileOptionsVisualPanel.java new file mode 100755 index 0000000000..759b923608 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/report/ReportWizardFileOptionsVisualPanel.java @@ -0,0 +1,269 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2013 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.report; + +import java.awt.Component; +import java.awt.event.MouseAdapter; +import java.awt.event.MouseEvent; +import java.util.Arrays; +import java.util.EnumMap; +import java.util.List; +import java.util.Map; +import javax.swing.JCheckBox; +import javax.swing.JLabel; +import javax.swing.JList; +import javax.swing.ListCellRenderer; +import javax.swing.ListModel; +import javax.swing.event.ListDataListener; + +/** + * Visual component of the File Report Configuration panel of the Report Wizard. + * + * @author jwallace + */ +public class ReportWizardFileOptionsVisualPanel extends javax.swing.JPanel { + private List options; + private Map optionStates = new EnumMap<>(FILE_REPORT_INFO.class); + private ListModel model; + private ReportWizardFileOptionsPanel wizPanel; + + + public ReportWizardFileOptionsVisualPanel(ReportWizardFileOptionsPanel wizPanel) { + this.wizPanel = wizPanel; + initComponents(); + initOptionsList(); + } + + @Override + public String getName() { + return "Configure File Report"; + } + + /** + * Populate the list of File Report Information that can be selected. + */ + private void initOptionsList() { + options = Arrays.asList(FILE_REPORT_INFO.values()); + for(FILE_REPORT_INFO col : options) { + optionStates.put(col, Boolean.FALSE); + } + + model = new OptionsListModel(); + optionsList.setModel(model); + optionsList.setCellRenderer(new OptionsListRenderer()); + optionsList.setVisibleRowCount(-1); + + selectAllButton.setEnabled(true); + deselectAllButton.setEnabled(false); + + // Add the ability to enable and disable Tag checkboxes to the list + optionsList.addMouseListener(new MouseAdapter() { + @Override + public void mousePressed(MouseEvent evt) { + JList list = (JList) evt.getSource(); + int index = list.locationToIndex(evt.getPoint()); + FILE_REPORT_INFO value = (FILE_REPORT_INFO) model.getElementAt(index); + optionStates.put(value, !optionStates.get(value)); + list.repaint(); + boolean anySelected = anySelected(); + deselectAllButton.setEnabled(anySelected); + wizPanel.setFinish(anySelected); + selectAllButton.setEnabled(notAllSelected()); + } + }); + } + + /** + * Are any options selected? + * @return + */ + private boolean anySelected() { + for (Boolean b : optionStates.values()) { + if (b) { + return true; + } + } + return false; + } + + /** + * Are no options selected? + * @return + */ + private boolean notAllSelected() { + for (Boolean b : optionStates.values()) { + if (!b) { + return true; + } + } + return false; + } + + /** + * Get the user-selected settings. + * @return + */ + Map getFileReportOptions() { + return optionStates; + } + /** + * This method is called from within the constructor to initialize the form. + * WARNING: Do NOT modify this code. The content of this method is always + * regenerated by the Form Editor. + */ + @SuppressWarnings("unchecked") + // //GEN-BEGIN:initComponents + private void initComponents() { + + jScrollPane1 = new javax.swing.JScrollPane(); + optionsList = new javax.swing.JList(); + selectAllButton = new javax.swing.JButton(); + deselectAllButton = new javax.swing.JButton(); + jLabel1 = new javax.swing.JLabel(); + + optionsList.setModel(new javax.swing.AbstractListModel() { + String[] strings = { "Item 1", "Item 2", "Item 3", "Item 4", "Item 5" }; + public int getSize() { return strings.length; } + public Object getElementAt(int i) { return strings[i]; } + }); + jScrollPane1.setViewportView(optionsList); + + org.openide.awt.Mnemonics.setLocalizedText(selectAllButton, org.openide.util.NbBundle.getMessage(ReportWizardFileOptionsVisualPanel.class, "ReportWizardFileOptionsVisualPanel.selectAllButton.text")); // NOI18N + selectAllButton.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + selectAllButtonActionPerformed(evt); + } + }); + + org.openide.awt.Mnemonics.setLocalizedText(deselectAllButton, org.openide.util.NbBundle.getMessage(ReportWizardFileOptionsVisualPanel.class, "ReportWizardFileOptionsVisualPanel.deselectAllButton.text")); // NOI18N + deselectAllButton.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + deselectAllButtonActionPerformed(evt); + } + }); + + org.openide.awt.Mnemonics.setLocalizedText(jLabel1, org.openide.util.NbBundle.getMessage(ReportWizardFileOptionsVisualPanel.class, "ReportWizardFileOptionsVisualPanel.jLabel1.text")); // NOI18N + + javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); + this.setLayout(layout); + layout.setHorizontalGroup( + layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(layout.createSequentialGroup() + .addContainerGap() + .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(layout.createSequentialGroup() + .addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, 297, javax.swing.GroupLayout.PREFERRED_SIZE) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) + .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) + .addComponent(deselectAllButton, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) + .addComponent(selectAllButton, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))) + .addGroup(layout.createSequentialGroup() + .addComponent(jLabel1) + .addGap(0, 0, Short.MAX_VALUE))) + .addContainerGap()) + ); + layout.setVerticalGroup( + layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(layout.createSequentialGroup() + .addGap(10, 10, 10) + .addComponent(jLabel1) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) + .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(layout.createSequentialGroup() + .addComponent(selectAllButton) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(deselectAllButton) + .addGap(0, 202, Short.MAX_VALUE)) + .addComponent(jScrollPane1)) + .addContainerGap()) + ); + }// //GEN-END:initComponents + + private void selectAllButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_selectAllButtonActionPerformed + for (FILE_REPORT_INFO option : options) { + optionStates.put(option, Boolean.TRUE); + } + optionsList.repaint(); + selectAllButton.setEnabled(false); + deselectAllButton.setEnabled(true); + wizPanel.setFinish(true); + }//GEN-LAST:event_selectAllButtonActionPerformed + + private void deselectAllButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_deselectAllButtonActionPerformed + for (FILE_REPORT_INFO option : options) { + optionStates.put(option, Boolean.FALSE); + } + optionsList.repaint(); + selectAllButton.setEnabled(true); + deselectAllButton.setEnabled(false); + wizPanel.setFinish(false); + }//GEN-LAST:event_deselectAllButtonActionPerformed + + // Variables declaration - do not modify//GEN-BEGIN:variables + private javax.swing.JButton deselectAllButton; + private javax.swing.JLabel jLabel1; + private javax.swing.JScrollPane jScrollPane1; + private javax.swing.JList optionsList; + private javax.swing.JButton selectAllButton; + // End of variables declaration//GEN-END:variables + + private class OptionsListModel implements ListModel { + + @Override + public int getSize() { + return options.size(); + } + + @Override + public Object getElementAt(int index) { + return options.get(index); + } + + @Override + public void addListDataListener(ListDataListener l) { + } + + @Override + public void removeListDataListener(ListDataListener l) { + } + } + + /** + * Render each item in the list to be a selectable check box. + */ + private class OptionsListRenderer extends JCheckBox implements ListCellRenderer { + + @Override + public Component getListCellRendererComponent(JList list, Object value, int index, boolean isSelected, boolean cellHasFocus) { + if (value != null) { + FILE_REPORT_INFO col = (FILE_REPORT_INFO) value; + setEnabled(list.isEnabled()); + setSelected(optionStates.get(col)); + setFont(list.getFont()); + setBackground(list.getBackground()); + setForeground(list.getForeground()); + setText(col.getName()); + return this; + } + return new JLabel(); + } + + } + +} From 9b9e3c6f90bd474c6980d258b31365de5d1dc029 Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Tue, 1 Oct 2013 13:08:56 -0400 Subject: [PATCH 007/179] Updated configuration wizard to include File Report configuration. --- .../autopsy/report/ReportGenerator.java | 111 +++++++++++++++++- .../autopsy/report/ReportVisualPanel1.java | 42 +++++-- .../autopsy/report/ReportVisualPanel2.form | 18 +-- .../autopsy/report/ReportVisualPanel2.java | 16 +-- .../autopsy/report/ReportWizardAction.java | 5 +- .../autopsy/report/ReportWizardIterator.java | 1 + .../autopsy/report/ReportWizardPanel1.java | 1 + .../autopsy/report/ReportWizardPanel2.java | 11 ++ 8 files changed, 177 insertions(+), 28 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java b/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java index 2d027b0bd8..cb09bd33ff 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java +++ b/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java @@ -47,6 +47,7 @@ import javax.swing.JDialog; import javax.swing.JFrame; import javax.swing.SwingWorker; import org.openide.filesystems.FileUtil; +import org.openide.util.Exceptions; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.EscapeUtil; import org.sleuthkit.autopsy.coreutils.Logger; @@ -77,13 +78,14 @@ public class ReportGenerator { private Map tableProgress; private Map generalProgress; + private Map fileProgress; private String reportPath; private ReportGenerationPanel panel = new ReportGenerationPanel(); static final String REPORTS_DIR = "Reports"; - ReportGenerator(Map tableModuleStates, Map generalModuleStates) { + ReportGenerator(Map tableModuleStates, Map generalModuleStates, Map fileListModuleStates) { // Create the root reports directory path of the form: /Reports/ / DateFormat dateFormat = new SimpleDateFormat("MM-dd-yyyy-HH-mm-ss"); Date date = new Date(); @@ -100,7 +102,8 @@ public class ReportGenerator { // Initialize the progress panels generalProgress = new HashMap<>(); tableProgress = new HashMap<>(); - setupProgressPanels(tableModuleStates, generalModuleStates); + fileProgress = new HashMap<>(); + setupProgressPanels(tableModuleStates, generalModuleStates, fileListModuleStates); } /** @@ -109,7 +112,7 @@ public class ReportGenerator { * @param tableModuleStates the enabled/disabled state of each TableReportModule * @param generalModuleStates the enabled/disabled state of each GeneralReportModule */ - private void setupProgressPanels(Map tableModuleStates, Map generalModuleStates) { + private void setupProgressPanels(Map tableModuleStates, Map generalModuleStates, Map fileListModuleStates) { if (null != tableModuleStates) { for (Entry entry : tableModuleStates.entrySet()) { if (entry.getValue()) { @@ -127,6 +130,15 @@ public class ReportGenerator { } } } + + if (null != fileListModuleStates) { + for(Entry entry : fileListModuleStates.entrySet()) { + if (entry.getValue()) { + FileReportModule module = entry.getKey(); + fileProgress.put(module, panel.addReport(module.getName(), reportPath + module.getFilePath())); + } + } + } } /** @@ -183,6 +195,23 @@ public class ReportGenerator { } } + /** + * Generate the FileReportModule reports in a new SwingWorker. + * + * @param enabledInfo the Information that should be included about each file + * in the report. + */ + public void generateFileListReports(Map enabledInfo) { + List enabled = new ArrayList<>(); + for (Entry e : enabledInfo.entrySet()) { + if(e.getValue()) { + enabled.add(e.getKey()); + } + } + FileReportsWorker worker = new FileReportsWorker(enabled); + worker.execute(); + } + /** * SwingWorker to generate a report on all GeneralReportModules. */ @@ -201,6 +230,82 @@ public class ReportGenerator { } + /** + * SwingWorker to generate a FileReport. + */ + private class FileReportsWorker extends SwingWorker { + private List enabledInfo = Arrays.asList(FILE_REPORT_INFO.values()); + private List fileModules = new ArrayList<>(); + + FileReportsWorker(List enabled) { + enabledInfo = enabled; + for (Entry entry : fileProgress.entrySet()) { + fileModules.add(entry.getKey()); + } + } + + @Override + protected Integer doInBackground() throws Exception { + for (FileReportModule module : fileModules) { + ReportProgressPanel progress = fileProgress.get(module); + if (progress.getStatus() != ReportStatus.CANCELED) { + progress.start(); + progress.setIndeterminate(false); + progress.updateStatusLabel("Querying database..."); + } + } + + List files = getFiles(); + int numFiles = files.size(); + for (FileReportModule module : fileModules) { + module.startReport(reportPath); + module.startTable(enabledInfo); + fileProgress.get(module).setMaximumProgress(numFiles); + } + + // Add files to report. + for (AbstractFile file : files) { + // Check to see if any reports have been cancelled. + if (fileModules.isEmpty()) { + break; + } + // Remove cancelled reports, add files to report otherwise. + Iterator iter = fileModules.iterator(); + while (iter.hasNext()) { + FileReportModule module = iter.next(); + ReportProgressPanel progress = fileProgress.get(module); + if (progress.getStatus() == ReportStatus.CANCELED) { + iter.remove(); + } else { + progress.updateStatusLabel("Now processing " + file.getName()); + module.addRow(file, enabledInfo); + progress.increment(); + } + } + } + + for (FileReportModule module : fileModules) { + module.endTable(); + module.endReport(); + fileProgress.get(module).complete(); + } + + return 0; + } + + private List getFiles() { + List absFiles; + try { + SleuthkitCase skCase = Case.getCurrentCase().getSleuthkitCase(); + absFiles = skCase.findAllFilesWhere("NOT meta_type = 2"); + return absFiles; + } catch (TskCoreException ex) { + // TODO + return Collections.EMPTY_LIST; + } + } + } + /** * SwingWorker to generate reports on blackboard artifacts. */ diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportVisualPanel1.java b/Core/src/org/sleuthkit/autopsy/report/ReportVisualPanel1.java index fca94c5202..426ff8e300 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ReportVisualPanel1.java +++ b/Core/src/org/sleuthkit/autopsy/report/ReportVisualPanel1.java @@ -39,8 +39,10 @@ public final class ReportVisualPanel1 extends JPanel { private Map tableModuleStates = new LinkedHashMap(); private Map generalModuleStates = new LinkedHashMap(); + private Map fileListModuleStates = new LinkedHashMap(); private List tableModules = new ArrayList(); private List generalModules = new ArrayList(); + private List fileListModules = new ArrayList(); private ModulesTableModel modulesModel; private ModuleSelectionListener modulesListener; @@ -71,6 +73,10 @@ public final class ReportVisualPanel1 extends JPanel { generalModuleStates.put(module, Boolean.FALSE); generalModules.add(module); } + for(FileReportModule module : Lookup.getDefault().lookupAll(FileReportModule.class)) { + fileListModuleStates.put(module, Boolean.FALSE); + fileListModules.add(module); + } modulesModel = new ModulesTableModel(); modulesListener = new ModuleSelectionListener(); @@ -109,7 +115,14 @@ public final class ReportVisualPanel1 extends JPanel { Map getGeneralModuleStates() { return generalModuleStates; } - + + /** + * @return the enabled/disabled states of all FileListReportModules + */ + Map getFileListModuleStates() { + return fileListModuleStates; + } + /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always @@ -204,7 +217,7 @@ public final class ReportVisualPanel1 extends JPanel { @Override public int getRowCount() { - return tableModules.size() + generalModules.size(); + return tableModules.size() + generalModules.size() + fileListModules.size(); } @Override @@ -217,13 +230,17 @@ public final class ReportVisualPanel1 extends JPanel { ReportModule module; if (rowIndex < tableModules.size()) { module = tableModules.get(rowIndex); - } else { + } else if (rowIndex >= tableModules.size() && rowIndex < tableModules.size() + generalModules.size()){ module = generalModules.get(rowIndex - tableModules.size()); + } else { + module = fileListModules.get(rowIndex - tableModules.size() - generalModules.size()); } if (columnIndex == 0 && rowIndex < tableModules.size()) { return tableModuleStates.get(tableModules.get(rowIndex)); - } else if (columnIndex == 0 && rowIndex >= tableModules.size()) { + } else if (columnIndex == 0 && rowIndex >= tableModules.size() && rowIndex < tableModules.size() + generalModules.size()) { return generalModuleStates.get(generalModules.get(rowIndex - tableModules.size())); + } else if (columnIndex == 0 && rowIndex >= tableModules.size() + generalModules.size()) { + return fileListModuleStates.get(fileListModules.get(rowIndex - tableModules.size() - generalModules.size())); } else { return module.getName(); } @@ -238,8 +255,10 @@ public final class ReportVisualPanel1 extends JPanel { public void setValueAt(Object aValue, int rowIndex, int columnIndex) { if (columnIndex == 0 && rowIndex < tableModules.size()) { tableModuleStates.put(tableModules.get(rowIndex), (Boolean) aValue); - } else if (columnIndex == 0 && rowIndex >= tableModules.size()) { + } else if (columnIndex == 0 && rowIndex >= tableModules.size() && rowIndex < tableModules.size() + generalModules.size()) { generalModuleStates.put(generalModules.get(rowIndex - tableModules.size()), (Boolean) aValue); + } else if (columnIndex == 0 && rowIndex >= tableModules.size() + generalModules.size()) { + fileListModuleStates.put(fileListModules.get(rowIndex - tableModules.size() - generalModules.size()), (Boolean) aValue); } // Check if there are any TableReportModules enabled boolean tableModuleEnabled = false; @@ -254,7 +273,13 @@ public final class ReportVisualPanel1 extends JPanel { generalModuleEnabled = true; } } - if(tableModuleEnabled) { + boolean fileListModuleEnabled = false; + for (Entry module : fileListModuleStates.entrySet()) { + if (module.getValue()) { + fileListModuleEnabled = true; + } + } + if(tableModuleEnabled || fileListModuleEnabled) { wizPanel.setNext(true); wizPanel.setFinish(false); } else if(generalModuleEnabled) { @@ -281,13 +306,16 @@ public final class ReportVisualPanel1 extends JPanel { if (rowIndex < tableModules.size()) { configurationPanel.add(new DefaultReportConfigurationPanel(), BorderLayout.CENTER); descriptionTextPane.setText(tableModules.get(rowIndex).getDescription()); - } else { + } else if (rowIndex >= tableModules.size() && rowIndex < tableModules.size() + generalModules.size()) { GeneralReportModule module = generalModules.get(rowIndex - tableModules.size()); JPanel panel = module.getConfigurationPanel(); descriptionTextPane.setText(module.getDescription()); if (panel != null) { configurationPanel.add(panel, BorderLayout.CENTER); } + } else { + configurationPanel.add(new DefaultReportConfigurationPanel(), BorderLayout.CENTER); + descriptionTextPane.setText(fileListModules.get(rowIndex - tableModules.size() - generalModules.size()).getDescription()); } configurationPanel.revalidate(); configurationPanel.repaint(); diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportVisualPanel2.form b/Core/src/org/sleuthkit/autopsy/report/ReportVisualPanel2.form index 7acec67ed7..bd00c6e2eb 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ReportVisualPanel2.form +++ b/Core/src/org/sleuthkit/autopsy/report/ReportVisualPanel2.form @@ -29,23 +29,23 @@ - - - - - - - - - + + + + + + + + + diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportVisualPanel2.java b/Core/src/org/sleuthkit/autopsy/report/ReportVisualPanel2.java index d77e1438c1..cabaf3db9b 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ReportVisualPanel2.java +++ b/Core/src/org/sleuthkit/autopsy/report/ReportVisualPanel2.java @@ -240,19 +240,19 @@ public final class ReportVisualPanel2 extends JPanel { .addContainerGap() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(taggedResultsRadioButton) - .addComponent(allResultsRadioButton) - .addComponent(dataLabel)) - .addGap(0, 481, Short.MAX_VALUE)) - .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addGap(21, 21, 21) .addComponent(tagsScrollPane) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) - .addComponent(advancedButton, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) + .addComponent(advancedButton, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, 89, Short.MAX_VALUE) .addComponent(deselectAllButton, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) - .addComponent(selectAllButton, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)))) + .addComponent(selectAllButton, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))) + .addGroup(layout.createSequentialGroup() + .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addComponent(taggedResultsRadioButton) + .addComponent(dataLabel) + .addComponent(allResultsRadioButton)) + .addGap(0, 481, Short.MAX_VALUE))) .addContainerGap()) ); layout.setVerticalGroup( diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportWizardAction.java b/Core/src/org/sleuthkit/autopsy/report/ReportWizardAction.java index 11201aaaed..5ee1efde80 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ReportWizardAction.java +++ b/Core/src/org/sleuthkit/autopsy/report/ReportWizardAction.java @@ -68,8 +68,11 @@ public final class ReportWizardAction extends CallableSystemAction implements P wiz.setTitleFormat(new MessageFormat("{0} {1}")); wiz.setTitle("Generate Report"); if (DialogDisplayer.getDefault().notify(wiz) == WizardDescriptor.FINISH_OPTION) { - ReportGenerator generator = new ReportGenerator((Map)wiz.getProperty("tableModuleStates"), (Map)wiz.getProperty("generalModuleStates")); + ReportGenerator generator = new ReportGenerator((Map)wiz.getProperty("tableModuleStates"), + (Map)wiz.getProperty("generalModuleStates"), + (Map)wiz.getProperty("fileListModuleStates")); generator.generateArtifactTableReports((Map)wiz.getProperty("artifactStates"), (Map)wiz.getProperty("tagStates")); + generator.generateFileListReports((Map)wiz.getProperty("fileReportOptions")); generator.generateGeneralReports(); generator.displayProgressPanels(); } diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportWizardIterator.java b/Core/src/org/sleuthkit/autopsy/report/ReportWizardIterator.java index bdd593559e..766059d8eb 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ReportWizardIterator.java +++ b/Core/src/org/sleuthkit/autopsy/report/ReportWizardIterator.java @@ -35,6 +35,7 @@ public final class ReportWizardIterator implements WizardDescriptor.Iterator>(); panels.add(new ReportWizardPanel1()); panels.add(new ReportWizardPanel2()); + panels.add(new ReportWizardFileOptionsPanel()); String[] steps = new String[panels.size()]; for (int i = 0; i < panels.size(); i++) { Component c = panels.get(i).getComponent(); diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportWizardPanel1.java b/Core/src/org/sleuthkit/autopsy/report/ReportWizardPanel1.java index 9cc18bad91..f533546e87 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ReportWizardPanel1.java +++ b/Core/src/org/sleuthkit/autopsy/report/ReportWizardPanel1.java @@ -104,5 +104,6 @@ public class ReportWizardPanel1 implements WizardDescriptor.FinishablePanel { private ReportVisualPanel2 component; private JButton finishButton; + private JButton nextButton; private WizardDescriptor wiz; ReportWizardPanel2() { finishButton = new JButton("Finish"); finishButton.setEnabled(true); + nextButton = new JButton("Next >"); + nextButton.setEnabled(true); + + nextButton.addActionListener(new ActionListener() { + @Override + public void actionPerformed(ActionEvent e) { + wiz.doNextClick(); + } + }); + finishButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { From 8002058cd3f7a6e7c592f87575c29ad3f8293139 Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Tue, 1 Oct 2013 15:06:38 -0400 Subject: [PATCH 008/179] Added guard to not run FileReport if no report modules were selected. --- .../autopsy/report/ReportGenerator.java | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java b/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java index cb09bd33ff..38da7534ea 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java +++ b/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java @@ -202,14 +202,16 @@ public class ReportGenerator { * in the report. */ public void generateFileListReports(Map enabledInfo) { - List enabled = new ArrayList<>(); - for (Entry e : enabledInfo.entrySet()) { - if(e.getValue()) { - enabled.add(e.getKey()); + if (!fileProgress.isEmpty() && null != enabledInfo) { + List enabled = new ArrayList<>(); + for (Entry e : enabledInfo.entrySet()) { + if(e.getValue()) { + enabled.add(e.getKey()); + } } + FileReportsWorker worker = new FileReportsWorker(enabled); + worker.execute(); } - FileReportsWorker worker = new FileReportsWorker(enabled); - worker.execute(); } /** @@ -250,7 +252,6 @@ public class ReportGenerator { ReportProgressPanel progress = fileProgress.get(module); if (progress.getStatus() != ReportStatus.CANCELED) { progress.start(); - progress.setIndeterminate(false); progress.updateStatusLabel("Querying database..."); } } @@ -260,6 +261,7 @@ public class ReportGenerator { for (FileReportModule module : fileModules) { module.startReport(reportPath); module.startTable(enabledInfo); + fileProgress.get(module).setIndeterminate(false); fileProgress.get(module).setMaximumProgress(numFiles); } From 3aa3a82b21b03046302a2b2a22351af43eddef82 Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Tue, 1 Oct 2013 15:12:59 -0400 Subject: [PATCH 009/179] Added support for dynamic reporting wizard. --- .../autopsy/report/ReportWizardIterator.java | 63 +++++++++++++++++-- .../autopsy/report/ReportWizardPanel1.java | 30 ++++++++- .../autopsy/report/ReportWizardPanel2.java | 11 +++- 3 files changed, 95 insertions(+), 9 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportWizardIterator.java b/Core/src/org/sleuthkit/autopsy/report/ReportWizardIterator.java index 766059d8eb..f4ac4cb67c 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ReportWizardIterator.java +++ b/Core/src/org/sleuthkit/autopsy/report/ReportWizardIterator.java @@ -19,23 +19,43 @@ package org.sleuthkit.autopsy.report; import java.awt.Component; -import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.NoSuchElementException; import javax.swing.JComponent; import javax.swing.event.ChangeListener; import org.openide.WizardDescriptor; +import org.openide.util.NbPreferences; public final class ReportWizardIterator implements WizardDescriptor.Iterator { private int index; + + private ReportWizardPanel1 firstPanel; + private ReportWizardPanel2 tableConfigPanel; + private ReportWizardFileOptionsPanel fileConfigPanel; + private List> panels; + + private WizardDescriptor.Panel[] allConfigPanels; + private String[] allConfigIndex; + private WizardDescriptor.Panel[] tableConfigPanels; + private String[] tableConfigIndex; + private WizardDescriptor.Panel[] fileConfigPanels; + private String[] fileConfigIndex; + + ReportWizardIterator() { + firstPanel = new ReportWizardPanel1(); + tableConfigPanel = new ReportWizardPanel2(); + fileConfigPanel = new ReportWizardFileOptionsPanel(); + + allConfigPanels = new WizardDescriptor.Panel[]{firstPanel, tableConfigPanel, fileConfigPanel}; + tableConfigPanels = new WizardDescriptor.Panel[]{firstPanel, tableConfigPanel}; + fileConfigPanels = new WizardDescriptor.Panel[]{firstPanel, fileConfigPanel}; + } private List> getPanels() { if (panels == null) { - panels = new ArrayList>(); - panels.add(new ReportWizardPanel1()); - panels.add(new ReportWizardPanel2()); - panels.add(new ReportWizardFileOptionsPanel()); + panels = Arrays.asList(allConfigPanels); String[] steps = new String[panels.size()]; for (int i = 0; i < panels.size(); i++) { Component c = panels.get(i).getComponent(); @@ -50,9 +70,32 @@ public final class ReportWizardIterator implements WizardDescriptor.Iterator current() { @@ -79,6 +122,16 @@ public final class ReportWizardIterator implements WizardDescriptor.Iterator { private WizardDescriptor wiz; @@ -102,8 +106,30 @@ public class ReportWizardPanel1 implements WizardDescriptor.FinishablePanel tables = getComponent().getTableModuleStates(); + Map files = getComponent().getFileListModuleStates(); + wiz.putProperty("tableModuleStates", tables); wiz.putProperty("generalModuleStates", getComponent().getGeneralModuleStates()); - wiz.putProperty("fileListModuleStates", getComponent().getFileListModuleStates()); + wiz.putProperty("fileListModuleStates", files); + + // Store preferences that WizardIterator will use to determine what + // panels need to be shown + Preferences prefs = NbPreferences.forModule(ReportWizardPanel1.class); + prefs.putBoolean("tableConfig", any(tables.values())); + prefs.putBoolean("fileConfig", any(files.values())); + } + + /** + * Are any of the given booleans true? + * @param bools + * @return + */ + private boolean any(Collection bools) { + for (Boolean b : bools) { + if (b) { + return true; + } + } + return false; } } diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportWizardPanel2.java b/Core/src/org/sleuthkit/autopsy/report/ReportWizardPanel2.java index 7eefd77bf5..346ae627c1 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ReportWizardPanel2.java +++ b/Core/src/org/sleuthkit/autopsy/report/ReportWizardPanel2.java @@ -24,6 +24,7 @@ import javax.swing.JButton; import javax.swing.event.ChangeListener; import org.openide.WizardDescriptor; import org.openide.util.HelpCtx; +import org.openide.util.NbPreferences; public class ReportWizardPanel2 implements WizardDescriptor.Panel { private ReportVisualPanel2 component; @@ -33,7 +34,7 @@ public class ReportWizardPanel2 implements WizardDescriptor.Panel"); nextButton.setEnabled(true); @@ -80,14 +81,20 @@ public class ReportWizardPanel2 implements WizardDescriptor.Panel Date: Tue, 1 Oct 2013 15:13:42 -0400 Subject: [PATCH 010/179] Registered new Reporting module in layer.xml. Added resource strings to Bundle.properties. --- Core/src/org/sleuthkit/autopsy/core/layer.xml | 7 ++++++- .../src/org/sleuthkit/autopsy/report/Bundle.properties | 10 ++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/core/layer.xml b/Core/src/org/sleuthkit/autopsy/core/layer.xml index f43b8285d6..54bec76816 100644 --- a/Core/src/org/sleuthkit/autopsy/core/layer.xml +++ b/Core/src/org/sleuthkit/autopsy/core/layer.xml @@ -310,6 +310,11 @@
+ + + + + - + @@ -183,10 +183,17 @@ + + + + + + + - + From a713b75db77a0a23ac4fa38f703e164d927ea536 Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Tue, 15 Oct 2013 15:49:09 -0400 Subject: [PATCH 045/179] Fixed ant error. --- build-windows.xml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/build-windows.xml b/build-windows.xml index af2dc259ef..91e67fe68c 100644 --- a/build-windows.xml +++ b/build-windows.xml @@ -186,8 +186,7 @@ - - + From d74fa2e89482dcd54fd6167a30d30ea3880f27a4 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Tue, 15 Oct 2013 16:10:11 -0400 Subject: [PATCH 046/179] New tags API extended to properly handle blackboard artifact and content nodes --- .../casemodule/services/TagsManager.java | 2 +- .../datamodel/BlackboardArtifactTagNode.java | 30 +++++++++++++------ .../autopsy/datamodel/ContentTagNode.java | 9 +++--- 3 files changed, 26 insertions(+), 15 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java b/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java index 29de1a0e29..2f113dec83 100755 --- a/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java @@ -201,7 +201,7 @@ public class TagsManager implements Closeable { * @throws TskCoreException */ public void addBlackboardArtifactTag(BlackboardArtifact artifact, TagName tagName, String comment) throws TskCoreException { - tskCase.addBlackboardArtifactTag(new BlackboardArtifactTag(artifact, tagName, comment)); + tskCase.addBlackboardArtifactTag(new BlackboardArtifactTag(artifact, tskCase.getContentById(artifact.getObjectID()), tagName, comment)); } void deleteBlackboardArtifactTag(BlackboardArtifactTag tag) throws TskCoreException { diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/BlackboardArtifactTagNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/BlackboardArtifactTagNode.java index a3eae02719..5a603bbcf5 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/BlackboardArtifactTagNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/BlackboardArtifactTagNode.java @@ -18,10 +18,13 @@ */ package org.sleuthkit.autopsy.datamodel; +import java.util.logging.Level; +import java.util.logging.Logger; import org.openide.nodes.Children; import org.openide.nodes.Sheet; import org.openide.util.lookup.Lookups; import org.sleuthkit.datamodel.BlackboardArtifactTag; +import org.sleuthkit.datamodel.TskCoreException; /** * Instances of this class wrap BlackboardArtifactTag objects. In the Autopsy @@ -31,18 +34,19 @@ import org.sleuthkit.datamodel.BlackboardArtifactTag; * either content or blackboard artifact tag nodes. */ public class BlackboardArtifactTagNode extends DisplayableItemNode { - private static final String ICON_PATH = "org/sleuthkit/autopsy/images/tag-folder-blue-icon-16.png"; // RJCTODO: Want better icons? + private static final String ICON_PATH = "org/sleuthkit/autopsy/images/tag-folder-blue-icon-16.png"; + private final BlackboardArtifactTag tag; public BlackboardArtifactTagNode(BlackboardArtifactTag tag) { - super(Children.LEAF, Lookups.fixed(tag, tag.getArtifact())); - super.setName(tag.getArtifact().getDisplayName()); - super.setDisplayName(tag.getArtifact().getDisplayName()); + super(Children.LEAF, Lookups.fixed(tag, tag.getArtifact(), tag.getContent())); + super.setName(tag.getContent().getName()); + super.setDisplayName(tag.getContent().getName()); this.setIconBaseWithExtension(ICON_PATH); + this.tag = tag; } @Override protected Sheet createSheet() { - // RJCTODO: Make additional properties as needed for DataResultViewers Sheet propertySheet = super.createSheet(); Sheet.Set properties = propertySheet.get(Sheet.PROPERTIES); if (properties == null) { @@ -50,15 +54,23 @@ public class BlackboardArtifactTagNode extends DisplayableItemNode { propertySheet.put(properties); } - properties.put(new NodeProperty("Name", "Name", "", getName())); - + properties.put(new NodeProperty("Source File", "Source File", "", tag.getContent().getName())); + String contentPath; + try { + contentPath = tag.getContent().getUniquePath(); + } + catch (TskCoreException ex) { + Logger.getLogger(ContentTagNode.class.getName()).log(Level.SEVERE, "Failed to get path for content (id = " + tag.getContent().getId() + ")", ex); + contentPath = "Unavailable"; + } + properties.put(new NodeProperty("Source File Path", "Source File Path", "", contentPath)); + properties.put(new NodeProperty("Result Type", "Result Type", "", tag.getArtifact().getDisplayName())); + return propertySheet; } @Override public T accept(DisplayableItemNodeVisitor v) { - // See classes derived from DisplayableItemNodeVisitor - // for behavior added using the Visitor pattern. return v.visit(this); } diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagNode.java index 940fc6ab9d..2c893b3fb5 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagNode.java @@ -19,6 +19,8 @@ package org.sleuthkit.autopsy.datamodel; +import java.util.logging.Level; +import java.util.logging.Logger; import org.openide.nodes.Children; import org.openide.nodes.Sheet; import org.openide.util.lookup.Lookups; @@ -45,7 +47,6 @@ public class ContentTagNode extends DisplayableItemNode { @Override protected Sheet createSheet() { - // RJCTODO: Make additional properties as needed for DataResultViewers Sheet propertySheet = super.createSheet(); Sheet.Set properties = propertySheet.get(Sheet.PROPERTIES); if (properties == null) { @@ -53,13 +54,13 @@ public class ContentTagNode extends DisplayableItemNode { propertySheet.put(properties); } - properties.put(new NodeProperty("Source File", "Source File", "", getName())); + properties.put(new NodeProperty("Source File", "Source File", "", tag.getContent().getName())); String contentPath; try { contentPath = tag.getContent().getUniquePath(); } catch (TskCoreException ex) { - // RJCTODO: Add to log + Logger.getLogger(ContentTagNode.class.getName()).log(Level.SEVERE, "Failed to get path for content (id = " + tag.getContent().getId() + ")", ex); contentPath = "Unavailable"; } properties.put(new NodeProperty("Source File Path", "Source File Path", "", contentPath)); @@ -69,8 +70,6 @@ public class ContentTagNode extends DisplayableItemNode { @Override public T accept(DisplayableItemNodeVisitor v) { - // See classes derived from DisplayableItemNodeVisitor - // for behavior added using the Visitor pattern. return v.visit(this); } From decd0840de4649bdd4c4e29bd65c6663ed204831 Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Tue, 15 Oct 2013 16:24:00 -0400 Subject: [PATCH 047/179] Fixed ant error. --- build-windows.xml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/build-windows.xml b/build-windows.xml index 91e67fe68c..bdcfdec3ab 100644 --- a/build-windows.xml +++ b/build-windows.xml @@ -183,10 +183,11 @@ - + - + + From e7a3b496f7433299167d853468325385ddfdc449 Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Tue, 15 Oct 2013 17:01:18 -0400 Subject: [PATCH 048/179] Fixed discrepancy between fileReportPath and actual file name for file reports. --- Core/src/org/sleuthkit/autopsy/report/FileReportText.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/report/FileReportText.java b/Core/src/org/sleuthkit/autopsy/report/FileReportText.java index 856bdfb90b..05192e6910 100755 --- a/Core/src/org/sleuthkit/autopsy/report/FileReportText.java +++ b/Core/src/org/sleuthkit/autopsy/report/FileReportText.java @@ -39,6 +39,7 @@ public class FileReportText implements FileReportModule { private static final Logger logger = Logger.getLogger(FileReportText.class.getName()); private String reportPath; private Writer out; + private static final String FILE_NAME = "file-report.txt"; private static FileReportText instance; @@ -52,7 +53,7 @@ public class FileReportText implements FileReportModule { @Override public void startReport(String path) { - this.reportPath = path + "report.txt"; + this.reportPath = path + FILE_NAME; try { out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(this.reportPath))); } catch (IOException ex) { @@ -132,6 +133,6 @@ public class FileReportText implements FileReportModule { @Override public String getFilePath() { - return "file-report.txt"; + return FILE_NAME; } } From ee3578fa0a68b5c13a553b5aff32210c18457af5 Mon Sep 17 00:00:00 2001 From: df-test-runner Date: Tue, 15 Oct 2013 18:57:41 -0400 Subject: [PATCH 049/179] fixed ant error. --- build-windows.xml | 32 +++++++++++++++++++++----------- 1 file changed, 21 insertions(+), 11 deletions(-) diff --git a/build-windows.xml b/build-windows.xml index bdcfdec3ab..0e084d2572 100644 --- a/build-windows.xml +++ b/build-windows.xml @@ -84,6 +84,14 @@ + + + + + + + + @@ -92,7 +100,7 @@ - + @@ -102,6 +110,14 @@ + + + + + + + + @@ -111,7 +127,7 @@ - + @@ -182,20 +198,14 @@ - - - - - - - - + From 72add0128adf50e6174f99ed73311e5b93e7d789 Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Wed, 16 Oct 2013 08:26:05 -0400 Subject: [PATCH 050/179] Removed dead code. --- build-windows.xml | 9 --------- 1 file changed, 9 deletions(-) diff --git a/build-windows.xml b/build-windows.xml index 0e084d2572..09795298a4 100644 --- a/build-windows.xml +++ b/build-windows.xml @@ -197,15 +197,6 @@ - - From 2a6363757105752d3a30058c486483c77e14dd60 Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Wed, 16 Oct 2013 09:23:52 -0400 Subject: [PATCH 051/179] Updated NEWS and version --- NEWS.txt | 4 ++++ nbproject/project.properties | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/NEWS.txt b/NEWS.txt index 0fd41075bf..cbcfd8338c 100644 --- a/NEWS.txt +++ b/NEWS.txt @@ -1,3 +1,7 @@ +---------------- VERSION 3.0.8 -------------- +Bug Fixes: +- Fixed installer bug on Windows. No other code changes. + ---------------- VERSION 3.0.7 -------------- New features: diff --git a/nbproject/project.properties b/nbproject/project.properties index e70fffba4b..6446b47619 100644 --- a/nbproject/project.properties +++ b/nbproject/project.properties @@ -4,7 +4,7 @@ app.title=Autopsy ### lowercase version of above app.name=autopsy ### if left unset, version will default to today's date -app.version=3.0.7 +app.version=3.0.8 ### Build type isn't used at this point, but it may be useful ### Must be one of: DEVELOPMENT, RELEASE build.type=RELEASE From 214bd028c886a01586451131fbfa3c66e6782d4c Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Wed, 16 Oct 2013 10:37:48 -0400 Subject: [PATCH 052/179] Added additional data types. --- .../autopsy/report/FileReportDataTypes.java | 46 ++++++++++++++----- 1 file changed, 34 insertions(+), 12 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/report/FileReportDataTypes.java b/Core/src/org/sleuthkit/autopsy/report/FileReportDataTypes.java index d9e74474b9..8cc0b63049 100755 --- a/Core/src/org/sleuthkit/autopsy/report/FileReportDataTypes.java +++ b/Core/src/org/sleuthkit/autopsy/report/FileReportDataTypes.java @@ -20,6 +20,7 @@ package org.sleuthkit.autopsy.report; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.datamodel.TskData; /** * Represents Column Headers for FileList Reports. @@ -44,6 +45,21 @@ public enum FileReportDataTypes { return (extIndex == -1 ? "" : name.substring(extIndex)); } }, + FILE_TYPE("File Type") { + @Override + public String getValue(AbstractFile file) { + return file.getMetaTypeAsString(); + } + }, + DELETED("Is Deleted") { + @Override + public String getValue(AbstractFile file) { + if (file.getMetaFlagsAsString().equals(TskData.TSK_FS_META_FLAG_ENUM.UNALLOC.toString())) { + return "yes"; + } + return ""; + } + }, A_TIME("Last Accessed") { @Override public String getValue(AbstractFile file) { @@ -68,12 +84,30 @@ public enum FileReportDataTypes { return String.valueOf(file.getSize()); } }, + ADDRESS("Address") { + @Override + public String getValue(AbstractFile file) { + return String.valueOf(file.getMetaAddr()); + } + }, HASH_VALUE("Hash Value") { @Override public String getValue(AbstractFile file) { return file.getMd5Hash(); } }, + KNOWN_STATUS("Known Status") { + @Override + public String getValue(AbstractFile file) { + return file.getKnown().getName(); + } + }, + PERMISSIONS("Permissions") { + @Override + public String getValue(AbstractFile file) { + return file.getModesAsString(); + } + }, FULL_PATH("Full Path") { @Override public String getValue(AbstractFile file) { @@ -83,18 +117,6 @@ public enum FileReportDataTypes { return ""; } } - }, - PERMISSIONS("Permissions") { - @Override - public String getValue(AbstractFile file) { - return file.getModesAsString(); - } - }, - ADDRESS("Address") { - @Override - public String getValue(AbstractFile file) { - return String.valueOf(file.getMetaAddr()); - } }; private String name; From b65a9e6d5a61b2bbd5c46bed886d42d9f6d9807f Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Wed, 16 Oct 2013 18:26:32 -0400 Subject: [PATCH 053/179] Added delete tag capability to new tags api --- .../AddBlackboardArtifactTagAction.java | 2 +- .../autopsy/actions/AddContentTagAction.java | 2 +- .../autopsy/actions/AddTagAction.java | 24 +- .../DeleteBlackboardArtifactTagAction.java | 67 +++ .../actions/DeleteContentTagAction.java | 66 +++ .../sleuthkit/autopsy/actions/TagAction.java | 62 +++ .../casemodule/services/TagsManager.java | 336 +++++++------- .../datamodel/AbstractContentChildren.java | 5 - .../autopsy/datamodel/AutopsyItemVisitor.java | 7 - .../datamodel/BlackboardArtifactTagNode.java | 11 + .../autopsy/datamodel/ContentTagNode.java | 11 + .../autopsy/datamodel/ContentTagTypeNode.java | 2 +- .../datamodel/DisplayableItemNodeVisitor.java | 24 - .../autopsy/datamodel/ResultsNode.java | 1 - .../datamodel/RootContentChildren.java | 17 +- .../org/sleuthkit/autopsy/datamodel/Tags.java | 411 +----------------- .../sleuthkit/autopsy/datamodel/TagsNode.java | 6 +- .../BlackboardArtifactTagTypeNode.java | 2 +- .../directorytree/DataResultFilterNode.java | 12 - 19 files changed, 432 insertions(+), 636 deletions(-) create mode 100755 Core/src/org/sleuthkit/autopsy/actions/DeleteBlackboardArtifactTagAction.java create mode 100755 Core/src/org/sleuthkit/autopsy/actions/DeleteContentTagAction.java create mode 100755 Core/src/org/sleuthkit/autopsy/actions/TagAction.java diff --git a/Core/src/org/sleuthkit/autopsy/actions/AddBlackboardArtifactTagAction.java b/Core/src/org/sleuthkit/autopsy/actions/AddBlackboardArtifactTagAction.java index 303a773e17..d81e978bc1 100755 --- a/Core/src/org/sleuthkit/autopsy/actions/AddBlackboardArtifactTagAction.java +++ b/Core/src/org/sleuthkit/autopsy/actions/AddBlackboardArtifactTagAction.java @@ -46,6 +46,7 @@ public class AddBlackboardArtifactTagAction extends AddTagAction { } private AddBlackboardArtifactTagAction() { + super(""); } @Override @@ -57,7 +58,6 @@ public class AddBlackboardArtifactTagAction extends AddTagAction { protected void addTag(TagName tagName, String comment) { Collection selectedArtifacts = Utilities.actionsGlobalContext().lookupAll(BlackboardArtifact.class); for (BlackboardArtifact artifact : selectedArtifacts) { - Tags.createTag(artifact, tagName.getDisplayName(), comment); //RJCTODO: Jettision this try { Case.getCurrentCase().getServices().getTagsManager().addBlackboardArtifactTag(artifact, tagName, comment); } diff --git a/Core/src/org/sleuthkit/autopsy/actions/AddContentTagAction.java b/Core/src/org/sleuthkit/autopsy/actions/AddContentTagAction.java index 813fde02af..1483ce36c9 100755 --- a/Core/src/org/sleuthkit/autopsy/actions/AddContentTagAction.java +++ b/Core/src/org/sleuthkit/autopsy/actions/AddContentTagAction.java @@ -46,6 +46,7 @@ public class AddContentTagAction extends AddTagAction { } private AddContentTagAction() { + super(""); } @Override @@ -57,7 +58,6 @@ public class AddContentTagAction extends AddTagAction { protected void addTag(TagName tagName, String comment) { Collection selectedFiles = Utilities.actionsGlobalContext().lookupAll(AbstractFile.class); for (AbstractFile file : selectedFiles) { - Tags.createTag(file, tagName.getDisplayName(), comment); //RJCTODO: Jettision this try { Case.getCurrentCase().getServices().getTagsManager().addContentTag(file, tagName, comment); } diff --git a/Core/src/org/sleuthkit/autopsy/actions/AddTagAction.java b/Core/src/org/sleuthkit/autopsy/actions/AddTagAction.java index a98b94b0c4..1532a573d2 100755 --- a/Core/src/org/sleuthkit/autopsy/actions/AddTagAction.java +++ b/Core/src/org/sleuthkit/autopsy/actions/AddTagAction.java @@ -35,16 +35,20 @@ import org.sleuthkit.datamodel.TagName; * An abstract base class for Actions that allow users to tag SleuthKit data * model objects. */ -abstract class AddTagAction extends AbstractAction implements Presenter.Popup { +abstract class AddTagAction extends TagAction implements Presenter.Popup { private static final String NO_COMMENT = ""; + AddTagAction(String menuText) { + super(menuText); + } + @Override public JMenuItem getPopupPresenter() { return new TagMenu(); } @Override - public void actionPerformed(ActionEvent e) { + protected void doAction(ActionEvent event) { } /** @@ -59,14 +63,6 @@ abstract class AddTagAction extends AbstractAction implements Presenter.Popup { */ abstract protected void addTag(TagName tagName, String comment); - private void refreshDirectoryTree() { - //TODO instead should send event to node children, which will call its refresh() / refreshKeys() - // RJCTODO: Explain what is going on here and pare to one refreshTree() call. - DirectoryTreeTopComponent viewer = DirectoryTreeTopComponent.findInstance(); - viewer.refreshTree(BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_FILE); - viewer.refreshTree(BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_ARTIFACT); - } - /** * Instances of this class implement a context menu user interface for * creating or selecting a tag name for a tag and specifying an optional tag @@ -90,7 +86,7 @@ abstract class AddTagAction extends AbstractAction implements Presenter.Popup { // Each tag name in the current set of tags gets its own menu item in // the "Quick Tags" sub-menu. Selecting one of these menu items adds // a tag with the associated tag name. - if (tagNames.isEmpty()) { + if (!tagNames.isEmpty()) { for (final TagName tagName : tagNames) { JMenuItem tagNameItem = new JMenuItem(tagName.getDisplayName()); tagNameItem.addActionListener(new ActionListener() { @@ -114,7 +110,7 @@ abstract class AddTagAction extends AbstractAction implements Presenter.Popup { // The "Quick Tag" menu also gets an "Choose Tag..." menu item. // Selecting this item initiates a dialog that can be used to create // or select a tag name and adds a tag with the resulting name. - JMenuItem newTagMenuItem = new JMenuItem("Choose Tag..."); + JMenuItem newTagMenuItem = new JMenuItem("New Tag..."); newTagMenuItem.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { @@ -127,10 +123,10 @@ abstract class AddTagAction extends AbstractAction implements Presenter.Popup { }); quickTagMenu.add(newTagMenuItem); - // Create a "Choose Tag and Comment..." menu item. Selecting this itme initiates + // Create a "Choose Tag and Comment..." menu item. Selecting this item initiates // a dialog that can be used to create or select a tag name with an // optional comment and adds a tag with the resulting name. - JMenuItem tagAndCommentItem = new JMenuItem("Choose Tag and Comment..."); + JMenuItem tagAndCommentItem = new JMenuItem("Tag and Comment..."); tagAndCommentItem.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { diff --git a/Core/src/org/sleuthkit/autopsy/actions/DeleteBlackboardArtifactTagAction.java b/Core/src/org/sleuthkit/autopsy/actions/DeleteBlackboardArtifactTagAction.java new file mode 100755 index 0000000000..67891305da --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/actions/DeleteBlackboardArtifactTagAction.java @@ -0,0 +1,67 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2013 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.actions; + +import java.awt.event.ActionEvent; +import java.util.Collection; +import java.util.logging.Level; +import java.util.logging.Logger; +import javax.swing.JOptionPane; +import org.openide.util.Utilities; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.datamodel.BlackboardArtifactTag; +import org.sleuthkit.datamodel.TskCoreException; + +/** + * Instances of this Action allow users to delete tags applied to blackboard artifacts. + */ +public class DeleteBlackboardArtifactTagAction extends TagAction { + private static final String MENU_TEXT = "Delete Tag(s)"; + + // This class is a singleton to support multi-selection of nodes, since + // org.openide.nodes.NodeOp.findActions(Node[] nodes) will only pick up an Action if every + // node in the array returns a reference to the same action object from Node.getActions(boolean). + private static DeleteBlackboardArtifactTagAction instance; + + public static synchronized DeleteBlackboardArtifactTagAction getInstance() { + if (null == instance) { + instance = new DeleteBlackboardArtifactTagAction(); + } + return instance; + } + + private DeleteBlackboardArtifactTagAction() { + super(MENU_TEXT); + } + + @Override + protected void doAction(ActionEvent event) { + Collection selectedTags = Utilities.actionsGlobalContext().lookupAll(BlackboardArtifactTag.class); + for (BlackboardArtifactTag tag : selectedTags) { + try { + Case.getCurrentCase().getServices().getTagsManager().deleteBlackboardArtifactTag(tag); + } + catch (TskCoreException ex) { + Logger.getLogger(AddContentTagAction.class.getName()).log(Level.SEVERE, "Error deleting tag", ex); + JOptionPane.showMessageDialog(null, "Unable to delete tag " + tag.getName() + ".", "Tag Deletion Error", JOptionPane.ERROR_MESSAGE); + } + } + } +} + diff --git a/Core/src/org/sleuthkit/autopsy/actions/DeleteContentTagAction.java b/Core/src/org/sleuthkit/autopsy/actions/DeleteContentTagAction.java new file mode 100755 index 0000000000..fd0f6a41dc --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/actions/DeleteContentTagAction.java @@ -0,0 +1,66 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2013 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.actions; + +import java.awt.event.ActionEvent; +import java.util.Collection; +import java.util.logging.Level; +import java.util.logging.Logger; +import javax.swing.JOptionPane; +import org.openide.util.Utilities; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.datamodel.ContentTag; +import org.sleuthkit.datamodel.TskCoreException; + +/** + * Instances of this Action allow users to delete tags applied to content. + */ +public class DeleteContentTagAction extends TagAction { + private static final String MENU_TEXT = "Delete Tag(s)"; + + // This class is a singleton to support multi-selection of nodes, since + // org.openide.nodes.NodeOp.findActions(Node[] nodes) will only pick up an Action if every + // node in the array returns a reference to the same action object from Node.getActions(boolean). + private static DeleteContentTagAction instance; + + public static synchronized DeleteContentTagAction getInstance() { + if (null == instance) { + instance = new DeleteContentTagAction(); + } + return instance; + } + + private DeleteContentTagAction() { + super(MENU_TEXT); + } + + @Override + protected void doAction(ActionEvent e) { + Collection selectedTags = Utilities.actionsGlobalContext().lookupAll(ContentTag.class); + for (ContentTag tag : selectedTags) { + try { + Case.getCurrentCase().getServices().getTagsManager().deleteContentTag(tag); + } + catch (TskCoreException ex) { + Logger.getLogger(AddContentTagAction.class.getName()).log(Level.SEVERE, "Error deleting tag", ex); + JOptionPane.showMessageDialog(null, "Unable to delete tag " + tag.getName() + ".", "Tag Deletion Error", JOptionPane.ERROR_MESSAGE); + } + } + } +} diff --git a/Core/src/org/sleuthkit/autopsy/actions/TagAction.java b/Core/src/org/sleuthkit/autopsy/actions/TagAction.java new file mode 100755 index 0000000000..d5d78550f1 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/actions/TagAction.java @@ -0,0 +1,62 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2013 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.actions; + +import java.awt.event.ActionEvent; +import javax.swing.AbstractAction; +import org.sleuthkit.autopsy.directorytree.DirectoryTreeTopComponent; +import org.sleuthkit.datamodel.BlackboardArtifact; + +/** + * Abstract base class for Actions involving tags. + */ +public abstract class TagAction extends AbstractAction { + public TagAction(String menuText) { + super(menuText); + } + + @Override + public void actionPerformed(ActionEvent event) { + doAction(event); + refreshDirectoryTree(); + } + + /** + * Derived classes must implement this Template Method for actionPerformed(). + * @param event ActionEvent object passed to actionPerformed() + */ + abstract protected void doAction(ActionEvent event); + + /** + * Derived classes should call this method any time a tag is created, updated + * or deleted outside of an actionPerformed() call. + */ + protected void refreshDirectoryTree() { + // The way the "directory tree" currently works, a new tags sub-tree + // needs to be made to reflect the results of invoking tag Actions. The + // way to do this is to call DirectoryTreeTopComponent.refreshTree(), + // which calls RootContentChildren.refreshKeys(BlackboardArtifact.ARTIFACT_TYPE... types) + // for the RootContentChildren object that is the child factory for the + // ResultsNode that is the root of the tags sub-tree. There is a switch + // statement in RootContentChildren.refreshKeys() that maps both + // BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_FILE and BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_ARTIFACT + // to making a call to refreshKey(TagsNodeKey). + DirectoryTreeTopComponent.findInstance().refreshTree(BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_FILE); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java b/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java index 2f113dec83..afab0a237b 100755 --- a/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java @@ -37,20 +37,105 @@ import org.sleuthkit.datamodel.TskCoreException; /** * A singleton instance of this class functions as an Autopsy service that - * manages the creation, updating, and deletion of tags applied to Content and - * BlackboardArtifacts objects by users. + * manages the creation, updating, and deletion of tags applied to content and + * blackboard artifacts by users. */ public class TagsManager implements Closeable { - private static final String TAGS_SETTINGS_FILE_NAME = "tags"; - private static final String TAG_NAMES_SETTING_KEY = "tagNames"; + private static final String TAGS_SETTINGS_NAME = "Tags"; + private static final String TAG_NAMES_SETTING_KEY = "TagNames"; + private static final TagName[] predefinedTagNames = new TagName[]{new TagName("Bookmark", "", TagName.HTML_COLOR.NONE)}; private final SleuthkitCase tskCase; private final HashMap tagNames = new HashMap<>(); + private final Object lock = new Object(); + // Use this exception and the member hash map to manage uniqueness of hash + // names. This is deemed more proactive and informative than leaving this to + // the UNIQUE constraint on the display_name field of the tag_names table in + // the case database. + public class TagNameAlreadyExistsException extends Exception { + } + + /** + * Package-scope constructor for use of Services class. An instance of + * TagsManager should be created for each case that is opened. + * @param [in] tskCase The SleuthkitCase object for the current case. + */ TagsManager(SleuthkitCase tskCase) { this.tskCase = tskCase; - loadTagNamesFromTagSettings(); + getExistingTagNames(); + saveTagNamesToTagsSettings(); } - + + private void getExistingTagNames() { + getTagNamesFromCurrentCase(); + getTagNamesFromTagsSettings(); + getPredefinedTagNames(); + } + + private void getTagNamesFromCurrentCase() { + try { + ArrayList currentTagNames = new ArrayList<>(); + tskCase.getAllTagNames(currentTagNames); + for (TagName tagName : currentTagNames) { + tagNames.put(tagName.getDisplayName(), tagName); + } + } + catch (TskCoreException ex) { + Logger.getLogger(TagsManager.class.getName()).log(Level.SEVERE, "Failed to get tag types from the current case", ex); + } + } + + private void getTagNamesFromTagsSettings() { + String setting = ModuleSettings.getConfigSetting(TAGS_SETTINGS_NAME, TAG_NAMES_SETTING_KEY); + if (null != setting && !setting.isEmpty()) { + // Read the tag name setting and break it into tag name tuples. + List tagNameTuples = Arrays.asList(setting.split(";")); + + // Parse each tuple and add the tag names to the current case, one + // at a time to gracefully discard any duplicates or corrupt tuples. + for (String tagNameTuple : tagNameTuples) { + String[] tagNameAttributes = tagNameTuple.split(","); + if (!tagNames.containsKey(tagNameAttributes[0])) { + TagName tagName = new TagName(tagNameAttributes[0], tagNameAttributes[1], TagName.HTML_COLOR.getColorByName(tagNameAttributes[2])); + addTagName(tagName, "Failed to add " + tagName.getDisplayName() + " tag name from tag settings to the current case"); + } + } + } + } + + private void getPredefinedTagNames() { + for (TagName tagName : predefinedTagNames) { + if (!tagNames.containsKey(tagName.getDisplayName())) { + addTagName(tagName, "Failed to add predefined " + tagName.getDisplayName() + " tag name to the current case"); + } + } + } + + private void addTagName(TagName tagName, String errorMessage) { + try { + tskCase.addTagName(tagName); + tagNames.put(tagName.getDisplayName(), tagName); + } + catch(TskCoreException ex) { + Logger.getLogger(TagsManager.class.getName()).log(Level.SEVERE, errorMessage, ex); + } + } + + private void saveTagNamesToTagsSettings() { + if (!tagNames.isEmpty()) { + StringBuilder setting = new StringBuilder(); + for (TagName tagName : tagNames.values()) { + if (setting.length() != 0) { + setting.append(";"); + } + setting.append(tagName.getDisplayName()).append(","); + setting.append(tagName.getDescription()).append(","); + setting.append(tagName.getColor().name()); + } + ModuleSettings.setConfigSetting(TAGS_SETTINGS_NAME, TAG_NAMES_SETTING_KEY, setting.toString()); + } + } + /** * Gets a list of all tag names currently available for tagging content or * blackboard artifacts. @@ -67,27 +152,35 @@ public class TagsManager implements Closeable { } /** - * RJCTODO: Discard or properly comment + * Gets a list of all tag names currently used for tagging content or + * blackboard artifacts. + * @return [out] A list, possibly empty, of TagName data transfer objects (DTOs). + */ + public void getTagNamesInUse(List tagNames) { + try { + tagNames.clear(); + tskCase.getTagNamesInUse(tagNames); + } + catch (TskCoreException ex) { + Logger.getLogger(TagsManager.class.getName()).log(Level.SEVERE, "Failed to get tag names from the current case", ex); + } + } + + /** + * Checks whether a tag name with a given display name exists. + * @param [in] tagDisplayName The display name for which to check. + * @return True if the tag name exists, false otherwise. */ public boolean tagNameExists(String tagDisplayName) { - return tagNames.containsKey(tagDisplayName); + synchronized(lock) { + return tagNames.containsKey(tagDisplayName); + } } - /** - * RJCTODO: Discard or properly comment - */ - public TagName getTagName(String tagDisplayName) { - if (!tagNames.containsKey(tagDisplayName)) { - // RJCTODO: Throw exception - } - - return tagNames.get(tagDisplayName); - } - /** * Adds a new tag name to the current case and to the tags settings file. - * @param displayName The display name for the new tag name. - * @return A TagName object representing the new tag name on success, null on failure. + * @param [in] displayName The display name for the new tag name. + * @return A TagName data transfer object (DTO) representing the new tag name. * @throws TskCoreException */ public TagName addTagName(String displayName) throws TagNameAlreadyExistsException, TskCoreException { @@ -96,9 +189,9 @@ public class TagsManager implements Closeable { /** * Adds a new tag name to the current case and to the tags settings file. - * @param displayName The display name for the new tag name. - * @param description The description for the new tag name. - * @return A TagName object representing the new tag name on success, null on failure. + * @param [in] displayName The display name for the new tag name. + * @param [in] description The description for the new tag name. + * @return A TagName data transfer object (DTO) representing the new tag name. * @throws TskCoreException */ public TagName addTagName(String displayName, String description) throws TagNameAlreadyExistsException, TskCoreException { @@ -107,31 +200,34 @@ public class TagsManager implements Closeable { /** * Adds a new tag name to the current case and to the tags settings file. - * @param displayName The display name for the new tag name. - * @param description The description for the new tag name. - * @param color The HTML color to associate with the new tag name. - * @return A TagName object representing the new tag name. + * @param [in] displayName The display name for the new tag name. + * @param [in] description The description for the new tag name. + * @param [in] color The HTML color to associate with the new tag name. + * @return A TagName data transfer object (DTO) representing the new tag name. * @throws TskCoreException */ public synchronized TagName addTagName(String displayName, String description, TagName.HTML_COLOR color) throws TagNameAlreadyExistsException, TskCoreException { - if (tagNames.containsKey(displayName)) { - throw new TagNameAlreadyExistsException(); - } + synchronized(lock) { + if (tagNames.containsKey(displayName)) { + throw new TagNameAlreadyExistsException(); + } + + // Add the tag name to the case. + TagName newTagName = new TagName(displayName, description, color); + tskCase.addTagName(newTagName); + + // Add the tag name to the tags settings. + tagNames.put(newTagName.getDisplayName(), newTagName); + saveTagNamesToTagsSettings(); + + return newTagName; + } + } - TagName newTagName = new TagName(displayName, description, color); - tskCase.addTagName(newTagName); - tagNames.put(newTagName.getDisplayName(), newTagName); - saveTagNamesToTagsSettings(); - return newTagName; - } - - public class TagNameAlreadyExistsException extends Exception { - } - /** - * Tags a Content object. - * @param content The Content to tag. - * @param tagName The type of tag to add. + * Tags a content object. + * @param [in] content The content to tag. + * @param [in] tagName The name to use for the tag. * @throws TskCoreException */ public void addContentTag(Content content, TagName tagName) throws TskCoreException { @@ -139,10 +235,10 @@ public class TagsManager implements Closeable { } /** - * Tags a Content object. - * @param content The Content to tag. - * @param tagName The name to use for the tag. - * @param comment A comment to store with the tag. + * Tags a content object. + * @param [in] content The content to tag. + * @param [in] tagName The name to use for the tag. + * @param [in] comment A comment to store with the tag. * @throws TskCoreException */ public void addContentTag(Content content, TagName tagName, String comment) throws TskCoreException { @@ -150,12 +246,12 @@ public class TagsManager implements Closeable { } /** - * Tags a Content object or a portion of a content object. - * @param content The Content to tag. - * @param tagName The name to use for the tag. - * @param comment A comment to store with the tag. - * @param beginByteOffset Designates the beginning of a tagged extent. - * @param endByteOffset Designates the end of a tagged extent. + * Tags a content object or a portion of a content object. + * @param [in] content The content to tag. + * @param [in] tagName The name to use for the tag. + * @param [in] comment A comment to store with the tag. + * @param [in] beginByteOffset Designates the beginning of a tagged extent. + * @param [in] endByteOffset Designates the end of a tagged extent. * @throws TskCoreException */ public void addContentTag(Content content, TagName tagName, String comment, long beginByteOffset, long endByteOffset) throws IllegalArgumentException, TskCoreException { @@ -176,7 +272,7 @@ public class TagsManager implements Closeable { /** * Deletes a content tag. - * @param tag The tag to delete. + * @param [in] tag The tag to delete. * @throws TskCoreException */ public void deleteContentTag(ContentTag tag) throws TskCoreException { @@ -184,36 +280,11 @@ public class TagsManager implements Closeable { } /** - * Tags a BlackboardArtifact object. - * @param artifact The BlackboardArtifact to tag. - * @param tagName The name to use for the tag. - * @throws TskCoreException + * Gets content tags by tag name. + * @param [in] tagName The tag name of interest. + * @return A list, possibly empty, of the content tags with the specified tag name. */ - public void addBlackboardArtifactTag(BlackboardArtifact artifact, TagName tagName) throws TskCoreException { - addBlackboardArtifactTag(artifact, tagName, ""); - } - - /** - * Tags a BlackboardArtifact object. - * @param artifact The BlackboardArtifact to tag. - * @param tagName The name to use for the tag. - * @param comment A comment to store with the tag. - * @throws TskCoreException - */ - public void addBlackboardArtifactTag(BlackboardArtifact artifact, TagName tagName, String comment) throws TskCoreException { - tskCase.addBlackboardArtifactTag(new BlackboardArtifactTag(artifact, tskCase.getContentById(artifact.getObjectID()), tagName, comment)); - } - - void deleteBlackboardArtifactTag(BlackboardArtifactTag tag) throws TskCoreException { - tskCase.deleteBlackboardArtifactTag(tag); - } - - /** - * RJCTODO - * @param tagName - * @return - */ - public void getContentTags(TagName tagName, List tags) { + public void getContentTagsByTagName(TagName tagName, List tags) { try { tskCase.getContentTagsByTagName(tagName, tags); } @@ -221,13 +292,43 @@ public class TagsManager implements Closeable { Logger.getLogger(TagsManager.class.getName()).log(Level.SEVERE, "Failed to get content tags from the current case", ex); } } - + /** - * RJCTODO - * @param tagName - * @return + * Tags a blackboard artifact object. + * @param [in] artifact The blackboard artifact to tag. + * @param [in] tagName The name to use for the tag. + * @throws TskCoreException */ - public void getBlackboardArtifactTags(TagName tagName, List tags) { + public void addBlackboardArtifactTag(BlackboardArtifact artifact, TagName tagName) throws TskCoreException { + addBlackboardArtifactTag(artifact, tagName, ""); + } + + /** + * Tags a blackboard artifact object. + * @param [in] artifact The blackboard artifact to tag. + * @param [in] tagName The name to use for the tag. + * @param [in] comment A comment to store with the tag. + * @throws TskCoreException + */ + public void addBlackboardArtifactTag(BlackboardArtifact artifact, TagName tagName, String comment) throws TskCoreException { + tskCase.addBlackboardArtifactTag(new BlackboardArtifactTag(artifact, tskCase.getContentById(artifact.getObjectID()), tagName, comment)); + } + + /** + * Deletes a blackboard artifact tag. + * @param [in] tag The tag to delete. + * @throws TskCoreException + */ + public void deleteBlackboardArtifactTag(BlackboardArtifactTag tag) throws TskCoreException { + tskCase.deleteBlackboardArtifactTag(tag); + } + + /** + * Gets blackboard artifact tags by tag name. + * @param [in] tagName The tag name of interest. + * @return A list, possibly empty, of the content tags with the specified tag name. + */ + public void getBlackboardArtifactTagsByTagName(TagName tagName, List tags) { try { tskCase.getBlackboardArtifactTagsByTagName(tagName, tags); } @@ -239,62 +340,5 @@ public class TagsManager implements Closeable { @Override public void close() throws IOException { saveTagNamesToTagsSettings(); - } - - private void loadTagNamesFromTagSettings() { - // Get any tag names already defined for the current case. - try { - ArrayList currentTagNames = new ArrayList<>(); - tskCase.getAllTagNames(currentTagNames); - for (TagName tagName : currentTagNames) { - tagNames.put(tagName.getDisplayName(), tagName); - } - } - catch (TskCoreException ex) { - Logger.getLogger(TagsManager.class.getName()).log(Level.SEVERE, "Failed to get tag types from the current case", ex); - } - - // Read the saved tag names, if any, from the tags settings file and - // add them to the current case if they haven't already been added, e.g, - // when the case was last opened. - String setting = ModuleSettings.getConfigSetting(TAGS_SETTINGS_FILE_NAME, TAG_NAMES_SETTING_KEY); - if (null != setting && !setting.isEmpty()) { - // Read the tag types setting and break in into tag type tuples. - List tagNameTuples = Arrays.asList(setting.split(";")); - - // Parse each tuple and add the tag types to the current case, one - // at a time to gracefully discard any duplicates or corrupt tuples. - for (String tagNameTuple : tagNameTuples) { - String[] tagNameAttributes = tagNameTuple.split(","); - if (!tagNames.containsKey(tagNameAttributes[0])) { - TagName tagName = new TagName(tagNameAttributes[0], tagNameAttributes[1], TagName.HTML_COLOR.getColorByName(tagNameAttributes[2])); - try { - tskCase.addTagName(tagName); - tagNames.put(tagName.getDisplayName(),tagName); - } - catch(TskCoreException ex) { - Logger.getLogger(TagsManager.class.getName()).log(Level.WARNING, "Failed to add saved " + tagName.getDisplayName() + " tag name to the current case", ex); - } - } - } - - saveTagNamesToTagsSettings(); - } - } - - private void saveTagNamesToTagsSettings() { - if (!tagNames.isEmpty()) { - StringBuilder setting = new StringBuilder(); - for (TagName tagName : tagNames.values()) { - if (setting.length() != 0) { - setting.append(";"); - } - setting.append(tagName.getDisplayName()).append(","); - setting.append(tagName.getDescription()).append(","); - setting.append(tagName.getColor().name()); - } - - ModuleSettings.setConfigSetting(TAGS_SETTINGS_FILE_NAME, TAG_NAMES_SETTING_KEY, setting.toString()); - } } } diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/AbstractContentChildren.java b/Core/src/org/sleuthkit/autopsy/datamodel/AbstractContentChildren.java index 5df4ad782e..eaccbc35e2 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/AbstractContentChildren.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/AbstractContentChildren.java @@ -155,11 +155,6 @@ abstract class AbstractContentChildren extends Keys { return ee.new EmailExtractedRootNode(); } - @Override - public AbstractNode visit(Tags t) { - return t.new TagsRootNode(); - } - @Override public AbstractNode visit(TagsNodeKey tagsNodeKey) { return new TagsNode(); diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/AutopsyItemVisitor.java b/Core/src/org/sleuthkit/autopsy/datamodel/AutopsyItemVisitor.java index 0a3133a018..f57388ae65 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/AutopsyItemVisitor.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/AutopsyItemVisitor.java @@ -52,8 +52,6 @@ public interface AutopsyItemVisitor { T visit(EmailExtracted ee); - T visit(Tags t); - T visit(TagsNodeKey tagsNodeKey); T visit(DataSources i); @@ -136,11 +134,6 @@ public interface AutopsyItemVisitor { return defaultVisit(ee); } - @Override - public T visit(Tags t) { - return defaultVisit(t); - } - @Override public T visit(TagsNodeKey tagsNodeKey) { return defaultVisit(tagsNodeKey); diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/BlackboardArtifactTagNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/BlackboardArtifactTagNode.java index 5a603bbcf5..e4e61680b3 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/BlackboardArtifactTagNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/BlackboardArtifactTagNode.java @@ -18,11 +18,15 @@ */ package org.sleuthkit.autopsy.datamodel; +import java.util.ArrayList; +import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; +import javax.swing.Action; import org.openide.nodes.Children; import org.openide.nodes.Sheet; import org.openide.util.lookup.Lookups; +import org.sleuthkit.autopsy.actions.DeleteBlackboardArtifactTagAction; import org.sleuthkit.datamodel.BlackboardArtifactTag; import org.sleuthkit.datamodel.TskCoreException; @@ -69,6 +73,13 @@ public class BlackboardArtifactTagNode extends DisplayableItemNode { return propertySheet; } + @Override + public Action[] getActions(boolean context) { + List actions = new ArrayList<>(); + actions.add(DeleteBlackboardArtifactTagAction.getInstance()); + return actions.toArray(new Action[0]); + } + @Override public T accept(DisplayableItemNodeVisitor v) { return v.visit(this); diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagNode.java index 2c893b3fb5..b2e5c1d08e 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagNode.java @@ -19,11 +19,15 @@ package org.sleuthkit.autopsy.datamodel; +import java.util.ArrayList; +import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; +import javax.swing.Action; import org.openide.nodes.Children; import org.openide.nodes.Sheet; import org.openide.util.lookup.Lookups; +import org.sleuthkit.autopsy.actions.DeleteContentTagAction; import org.sleuthkit.datamodel.ContentTag; import org.sleuthkit.datamodel.TskCoreException; @@ -68,6 +72,13 @@ public class ContentTagNode extends DisplayableItemNode { return propertySheet; } + @Override + public Action[] getActions(boolean context) { + List actions = new ArrayList<>(); + actions.add(DeleteContentTagAction.getInstance()); + return actions.toArray(new Action[0]); + } + @Override public T accept(DisplayableItemNodeVisitor v) { return v.visit(this); diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagTypeNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagTypeNode.java index 123d21944b..956ac6d69d 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagTypeNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagTypeNode.java @@ -77,7 +77,7 @@ public class ContentTagTypeNode extends DisplayableItemNode { @Override protected boolean createKeys(List keys) { // Use the content tags bearing the specified tag name as the keys. - Case.getCurrentCase().getServices().getTagsManager().getContentTags(tagName, keys); + Case.getCurrentCase().getServices().getTagsManager().getContentTagsByTagName(tagName, keys); return true; } diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/DisplayableItemNodeVisitor.java b/Core/src/org/sleuthkit/autopsy/datamodel/DisplayableItemNodeVisitor.java index b4d820bec2..e2cef3846e 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/DisplayableItemNodeVisitor.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/DisplayableItemNodeVisitor.java @@ -30,9 +30,6 @@ import org.sleuthkit.autopsy.datamodel.HashsetHits.HashsetHitsSetNode; import org.sleuthkit.autopsy.datamodel.KeywordHits.KeywordHitsKeywordNode; import org.sleuthkit.autopsy.datamodel.KeywordHits.KeywordHitsListNode; import org.sleuthkit.autopsy.datamodel.KeywordHits.KeywordHitsRootNode; -import org.sleuthkit.autopsy.datamodel.Tags.TagNodeRoot; -import org.sleuthkit.autopsy.datamodel.Tags.TagsNodeRoot; -import org.sleuthkit.autopsy.datamodel.Tags.TagsRootNode; import org.sleuthkit.autopsy.directorytree.BlackboardArtifactTagTypeNode; /** @@ -86,12 +83,6 @@ public interface DisplayableItemNodeVisitor { T visit(EmailExtractedFolderNode eefn); - T visit(TagsRootNode bksrn); - - T visit(TagsNodeRoot bksrn); - - T visit(TagNodeRoot tnr); - T visit(TagsNode node); T visit(TagNameNode node); @@ -277,21 +268,6 @@ public interface DisplayableItemNodeVisitor { return defaultVisit(ldn); } - @Override - public T visit(TagsRootNode bksrn) { - return defaultVisit(bksrn); - } - - @Override - public T visit(TagsNodeRoot bksnr) { - return defaultVisit(bksnr); - } - - @Override - public T visit(TagNodeRoot tnr) { - return defaultVisit(tnr); - } - @Override public T visit(TagsNode node) { return defaultVisit(node); diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/ResultsNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/ResultsNode.java index ab5159440b..d87f1f2670 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/ResultsNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/ResultsNode.java @@ -35,7 +35,6 @@ public class ResultsNode extends DisplayableItemNode { new KeywordHits(sleuthkitCase), new HashsetHits(sleuthkitCase), new EmailExtracted(sleuthkitCase), - new Tags(sleuthkitCase), //TODO move to the top of the tree new TagsNodeKey() )), Lookups.singleton(NAME)); setName(NAME); diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/RootContentChildren.java b/Core/src/org/sleuthkit/autopsy/datamodel/RootContentChildren.java index 27b1a5ac84..ff91e44112 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/RootContentChildren.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/RootContentChildren.java @@ -79,23 +79,12 @@ public class RootContentChildren extends AbstractContentChildren { case TSK_EMAIL_MSG: if (o instanceof EmailExtracted) this.refreshKey(o); - break; - - //TODO check + break; case TSK_TAG_FILE: - if (o instanceof Tags) - this.refreshKey(o); + case TSK_TAG_ARTIFACT: if (o instanceof TagsNodeKey) this.refreshKey(o); - break; - - //TODO check - case TSK_TAG_ARTIFACT: - if (o instanceof Tags) - this.refreshKey(o); - if (o instanceof TagsNodeKey) - this.refreshKey(o); - break; + break; default: if (o instanceof ExtractedContent) this.refreshKey(o); diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/Tags.java b/Core/src/org/sleuthkit/autopsy/datamodel/Tags.java index 74f6fca42b..dce6ecb3ad 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/Tags.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/Tags.java @@ -18,30 +18,15 @@ */ package org.sleuthkit.autopsy.datamodel; -import java.awt.event.ActionEvent; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; -import java.util.Arrays; -import java.util.EnumMap; -import java.util.HashMap; import java.util.HashSet; import java.util.List; -import java.util.Map; import java.util.TreeSet; import java.util.logging.Level; -import javax.swing.AbstractAction; -import javax.swing.Action; -import org.openide.nodes.ChildFactory; -import org.openide.nodes.Children; -import org.openide.nodes.Node; -import org.openide.nodes.Sheet; -import org.openide.util.Lookup; -import org.openide.util.lookup.Lookups; import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.corecomponentinterfaces.BlackboardResultViewer; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.coreutils.ModuleSettings; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; @@ -50,382 +35,12 @@ import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; -/** - * - * Support for tags in the directory tree. Tag nodes representing file and - * result tags, encapsulate TSK_TAG_FILE and TSK_TAG_ARTIFACT typed artifacts. - * - * The class implements querying of data model and populating node hierarchy - * using child factories. - * - */ -public class Tags implements AutopsyVisitableItem { +public class Tags { private static final Logger logger = Logger.getLogger(Tags.class.getName()); - private static final String FILE_TAG_LABEL_NAME = "File Tags"; - private static final String RESULT_TAG_LABEL_NAME = "Result Tags"; - private SleuthkitCase skCase; - public static final String NAME = "Tags"; - private static final String TAG_ICON_PATH = "org/sleuthkit/autopsy/images/tag-folder-blue-icon-16.png"; - //bookmarks are specializations of tags public static final String BOOKMARK_TAG_NAME = "Bookmark"; - private static final String BOOKMARK_ICON_PATH = "org/sleuthkit/autopsy/images/star-bookmark-icon-16.png"; - private Map>> tags; private static final String EMPTY_COMMENT = ""; - private static final String APP_SETTINGS_FILE_NAME = "app"; // @@@ TODO: Need a general app settings or user preferences file, this will do for now. - private static final String TAG_NAMES_SETTING_KEY = "tag_names"; - private static final HashSet appSettingTagNames = new HashSet<>(); - private static final StringBuilder tagNamesAppSetting = new StringBuilder(); - - // When this class is loaded, either create an new app settings file or - // get the tag names setting from the existing app settings file. - static { - String setting = ModuleSettings.getConfigSetting(APP_SETTINGS_FILE_NAME, TAG_NAMES_SETTING_KEY); - if (null != setting && !setting.isEmpty()) { - // Make a speedy lookup for the tag names in the setting to aid in the - // detection of new tag names. - List tagNamesFromAppSettings = Arrays.asList(setting.split(",")); - for (String tagName : tagNamesFromAppSettings) { - appSettingTagNames.add(tagName); - } - - // Load the raw comma separated values list from the setting into a - // string builder to facilitate adding new tag names to the list and writing - // it back to the app settings file. - tagNamesAppSetting.append(setting); - } - } - Tags(SleuthkitCase skCase) { - this.skCase = skCase; - } - - @Override - public T accept(AutopsyItemVisitor v) { - return v.visit(this); - } - - /** - * Root of all Tag nodes. This node is shown directly under Results in the - * directory tree. - */ - public class TagsRootNode extends DisplayableItemNode { - - public TagsRootNode() { - super(Children.create(new Tags.TagsRootChildren(), true), Lookups.singleton(NAME)); - super.setName(NAME); - super.setDisplayName(NAME); - this.setIconBaseWithExtension(TAG_ICON_PATH); - initData(); - } - - private void initData() { - try { - // Get all file and artifact tags - - //init data - tags = new EnumMap<>(BlackboardArtifact.ARTIFACT_TYPE.class); - tags.put(BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_FILE, new HashMap>()); - tags.put(BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_ARTIFACT, new HashMap>()); - - //populate - for (BlackboardArtifact.ARTIFACT_TYPE artType : tags.keySet()) { - final Map> artTags = tags.get(artType); - for (BlackboardArtifact artifact : skCase.getBlackboardArtifacts(artType)) { - for (BlackboardAttribute attribute : artifact.getAttributes()) { - if (attribute.getAttributeTypeID() == ATTRIBUTE_TYPE.TSK_TAG_NAME.getTypeID()) { - String tagName = attribute.getValueString(); - if (artTags.containsKey(tagName)) { - List artifacts = artTags.get(tagName); - artifacts.add(artifact); - } else { - List artifacts = new ArrayList<>(); - artifacts.add(artifact); - artTags.put(tagName, artifacts); - } - break; - } - } - } - } - - - } catch (TskCoreException ex) { - logger.log(Level.WARNING, "Count not initialize tag nodes", ex); - } - } - - @Override - public boolean isLeafTypeNode() { - return false; - } - - @Override - public T accept(DisplayableItemNodeVisitor v) { - return v.visit(this); - } - - @Override - protected Sheet createSheet() { - Sheet s = super.createSheet(); - Sheet.Set ss = s.get(Sheet.PROPERTIES); - if (ss == null) { - ss = Sheet.createPropertiesSet(); - s.put(ss); - } - - ss.put(new NodeProperty("Name", - "Name", - "no description", - getName())); - - return s; - } - } - - /** - * bookmarks root child node creating types of bookmarks nodes - */ - private class TagsRootChildren extends ChildFactory { - - @Override - protected boolean createKeys(List list) { - for (BlackboardArtifact.ARTIFACT_TYPE artType : tags.keySet()) { - list.add(artType); - } - - return true; - } - - @Override - protected Node createNodeForKey(BlackboardArtifact.ARTIFACT_TYPE key) { - return new TagsNodeRoot(key, tags.get(key)); - } - } - - /** - * Tag node representation (file or result) - */ - public class TagsNodeRoot extends DisplayableItemNode { - - TagsNodeRoot(BlackboardArtifact.ARTIFACT_TYPE tagType, Map> subTags) { - super(Children.create(new TagRootChildren(tagType, subTags), true), Lookups.singleton(tagType.getDisplayName())); - - String name = null; - if (tagType.equals(BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_FILE)) { - name = FILE_TAG_LABEL_NAME; - } else if (tagType.equals(BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_ARTIFACT)) { - name = RESULT_TAG_LABEL_NAME; - } - - super.setName(name); - super.setDisplayName(name + " (" + subTags.values().size() + ")"); - - this.setIconBaseWithExtension(TAG_ICON_PATH); - } - - @Override - protected Sheet createSheet() { - Sheet s = super.createSheet(); - Sheet.Set ss = s.get(Sheet.PROPERTIES); - if (ss == null) { - ss = Sheet.createPropertiesSet(); - s.put(ss); - } - - ss.put(new NodeProperty("Name", - "Name", - "no description", - getName())); - - return s; - } - - @Override - public T accept(DisplayableItemNodeVisitor v) { - return v.visit(this); - } - - @Override - public boolean isLeafTypeNode() { - return false; - } - } - - /** - * Child factory to add all the Tag artifacts to a TagsRootNode with the tag - * name. - */ - private class TagRootChildren extends ChildFactory { - - private Map> subTags; - private BlackboardArtifact.ARTIFACT_TYPE tagType; - - TagRootChildren(BlackboardArtifact.ARTIFACT_TYPE tagType, Map> subTags) { - super(); - this.tagType = tagType; - this.subTags = subTags; - } - - @Override - protected boolean createKeys(List list) { - list.addAll(subTags.keySet()); - - return true; - } - - @Override - protected Node createNodeForKey(String key) { - return new Tags.TagNodeRoot(tagType, key, subTags.get(key)); - } - } - - /** - * Node for each unique tag name. Shown directly under Results > Tags. - */ - public class TagNodeRoot extends DisplayableItemNode { - - TagNodeRoot(BlackboardArtifact.ARTIFACT_TYPE tagType, String tagName, List artifacts) { - super(Children.create(new Tags.TagsChildrenNode(tagType, tagName, artifacts), true), Lookups.singleton(tagName)); - - super.setName(tagName); - super.setDisplayName(tagName + " (" + artifacts.size() + ")"); - - if (tagName.equals(BOOKMARK_TAG_NAME)) { - this.setIconBaseWithExtension(BOOKMARK_ICON_PATH); - } else { - this.setIconBaseWithExtension(TAG_ICON_PATH); - } - } - - @Override - protected Sheet createSheet() { - Sheet s = super.createSheet(); - Sheet.Set ss = s.get(Sheet.PROPERTIES); - if (ss == null) { - ss = Sheet.createPropertiesSet(); - s.put(ss); - } - - ss.put(new NodeProperty("Name", - "Name", - "no description", - getName())); - - return s; - } - - @Override - public T accept(DisplayableItemNodeVisitor v) { - return v.visit(this); - } - - @Override - public boolean isLeafTypeNode() { - return true; - } - } - - /** - * Node representing an individual Tag artifact. For each TagsNodeRoot under - * Results > Tags, this is one of the nodes listed in the result viewer. - */ - private class TagsChildrenNode extends ChildFactory { - - private List artifacts; - private BlackboardArtifact.ARTIFACT_TYPE tagType; - private String tagName; - - private TagsChildrenNode(BlackboardArtifact.ARTIFACT_TYPE tagType, String tagName, List artifacts) { - super(); - this.tagType = tagType; - this.tagName = tagName; - this.artifacts = artifacts; - } - - @Override - protected boolean createKeys(List list) { - list.addAll(artifacts); - return true; - } - - @Override - protected Node createNodeForKey(final BlackboardArtifact artifact) { - //create node with action - BlackboardArtifactNode tagNode = null; - - String iconPath; - if (tagName.equals(BOOKMARK_TAG_NAME)) { - iconPath = BOOKMARK_ICON_PATH; - } else { - iconPath = TAG_ICON_PATH; - } - - //create actions here where Tag logic belongs - //instead of DataResultFilterNode w/visitors, which is much less pluggable and cluttered - if (tagType.equals(BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_ARTIFACT)) { - //in case of result tag, add a action by sublcassing bb art node - //this action will be merged with other actions set DataResultFIlterNode - //otherwise in case of - tagNode = new BlackboardArtifactNode(artifact, iconPath) { - @Override - public Action[] getActions(boolean bln) { - //Action [] actions = super.getActions(bln); //To change body of generated methods, choose Tools | Templates. - Action[] actions = new Action[1]; - actions[0] = new AbstractAction("View Source Result") { - @Override - public void actionPerformed(ActionEvent e) { - //open the source artifact in dir tree - BlackboardArtifact sourceArt = Tags.getArtifactFromTag(artifact.getArtifactID()); - if (sourceArt != null) { - BlackboardResultViewer v = Lookup.getDefault().lookup(BlackboardResultViewer.class); - v.viewArtifact(sourceArt); - } - } - }; - return actions; - } - }; - } else { - //for file tag, don't subclass to add the additional actions - tagNode = new BlackboardArtifactNode(artifact, iconPath); - } - - //add some additional node properties - int artifactTypeID = artifact.getArtifactTypeID(); - final String NO_DESCR = "no description"; - if (artifactTypeID == BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_ARTIFACT.getTypeID()) { - BlackboardArtifact sourceResult = Tags.getArtifactFromTag(artifact.getArtifactID()); - String resultType = sourceResult.getDisplayName(); - - NodeProperty resultTypeProp = new NodeProperty("Source Result Type", - "Result Type", - NO_DESCR, - resultType); - - - tagNode.addNodeProperty(resultTypeProp); - - } - try { - //add source path property - final AbstractFile sourceFile = skCase.getAbstractFileById(artifact.getObjectID()); - final String sourcePath = sourceFile.getUniquePath(); - NodeProperty sourcePathProp = new NodeProperty("Source File Path", - "Source File Path", - NO_DESCR, - sourcePath); - - - tagNode.addNodeProperty(sourcePathProp); - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error getting a file from artifact to get source file path for a tag, ", ex); - } - - return tagNode; - } - } - /** * Create a tag for a file with TSK_TAG_NAME as tagName. * @@ -448,9 +63,7 @@ public class Tags implements AutopsyVisitableItem { "", comment); attrs.add(attr2); } - bookArt.addAttributes(attrs); - - updateTagNamesAppSetting(tagName); + bookArt.addAttributes(attrs); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Failed to create tag for " + file.getName(), ex); @@ -488,30 +101,13 @@ public class Tags implements AutopsyVisitableItem { attrs.add(attr1); attrs.add(attr3); - bookArt.addAttributes(attrs); - - updateTagNamesAppSetting(tagName); + bookArt.addAttributes(attrs); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Failed to create tag for artifact " + artifact.getArtifactID(), ex); } } - private static void updateTagNamesAppSetting(String tagName) { - // If this tag name is not in the current tag names app setting... - if (!appSettingTagNames.contains(tagName)) { - // Add it to the lookup. - appSettingTagNames.add(tagName); - - // Add it to the setting and write the setting back to the app settings file. - if (tagNamesAppSetting.length() != 0) { - tagNamesAppSetting.append(","); - } - tagNamesAppSetting.append(tagName); - ModuleSettings.setConfigSetting(APP_SETTINGS_FILE_NAME, TAG_NAMES_SETTING_KEY, tagNamesAppSetting.toString()); - } - } - /** * Create a bookmark tag for a file. * @@ -557,7 +153,6 @@ public class Tags implements AutopsyVisitableItem { public static TreeSet getAllTagNames() { // Use a TreeSet<> so the union of the tag names from the two sources will be sorted. TreeSet tagNames = getTagNamesFromCurrentCase(); - tagNames.addAll(appSettingTagNames); // Make sure the book mark tag is always included. tagNames.add(BOOKMARK_TAG_NAME); diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/TagsNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/TagsNode.java index e5d121ada6..3f1813a647 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/TagsNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/TagsNode.java @@ -43,6 +43,10 @@ public class TagsNode extends DisplayableItemNode { this.setIconBaseWithExtension(ICON_PATH); } + public static String getNodeName() { + return DISPLAY_NAME; + } + @Override public boolean isLeafTypeNode() { return false; @@ -70,7 +74,7 @@ public class TagsNode extends DisplayableItemNode { private static class TagNameNodeFactory extends ChildFactory { @Override protected boolean createKeys(List keys) { - Case.getCurrentCase().getServices().getTagsManager().getAllTagNames(keys); // RJCTODO: Change this call to filtered call + Case.getCurrentCase().getServices().getTagsManager().getAllTagNames(keys); return true; } diff --git a/Core/src/org/sleuthkit/autopsy/directorytree/BlackboardArtifactTagTypeNode.java b/Core/src/org/sleuthkit/autopsy/directorytree/BlackboardArtifactTagTypeNode.java index 7eb1e21a4e..b7acf5dd72 100755 --- a/Core/src/org/sleuthkit/autopsy/directorytree/BlackboardArtifactTagTypeNode.java +++ b/Core/src/org/sleuthkit/autopsy/directorytree/BlackboardArtifactTagTypeNode.java @@ -81,7 +81,7 @@ public class BlackboardArtifactTagTypeNode extends DisplayableItemNode { @Override protected boolean createKeys(List keys) { // Use the blackboard artifact tags bearing the specified tag name as the keys. - Case.getCurrentCase().getServices().getTagsManager().getBlackboardArtifactTags(tagName, keys); + Case.getCurrentCase().getServices().getTagsManager().getBlackboardArtifactTagsByTagName(tagName, keys); return true; } diff --git a/Core/src/org/sleuthkit/autopsy/directorytree/DataResultFilterNode.java b/Core/src/org/sleuthkit/autopsy/directorytree/DataResultFilterNode.java index 7303734772..1619c18ccf 100755 --- a/Core/src/org/sleuthkit/autopsy/directorytree/DataResultFilterNode.java +++ b/Core/src/org/sleuthkit/autopsy/directorytree/DataResultFilterNode.java @@ -63,8 +63,6 @@ import org.sleuthkit.autopsy.datamodel.LayoutFileNode; import org.sleuthkit.autopsy.datamodel.RecentFilesFilterNode; import org.sleuthkit.autopsy.datamodel.RecentFilesNode; import org.sleuthkit.autopsy.datamodel.FileTypesNode; -import org.sleuthkit.autopsy.datamodel.Tags.TagNodeRoot; -import org.sleuthkit.autopsy.datamodel.Tags.TagsNodeRoot; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardAttribute; @@ -405,16 +403,6 @@ public class DataResultFilterNode extends FilterNode { return openChild(atn); } - @Override - public AbstractAction visit(TagNodeRoot tnr) { - return openChild(tnr); - } - - @Override - public AbstractAction visit(TagsNodeRoot tnr) { - return openChild(tnr); - } - @Override public AbstractAction visit(DirectoryNode dn) { if (dn.getDisplayName().equals(DirectoryNode.DOTDOTDIR)) { From 18d47f3f39ecfdfa050107bb4254ad875270cd65 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Thu, 17 Oct 2013 11:45:53 -0400 Subject: [PATCH 054/179] New tags API work --- .../casemodule/services/TagsManager.java | 19 +- .../autopsy/datamodel/Bookmarks.java | 298 ------------------ .../org/sleuthkit/autopsy/datamodel/Tags.java | 226 ------------- .../report/ArtifactSelectionDialog.java | 2 + .../autopsy/report/ReportGenerator.java | 15 +- .../sleuthkit/autopsy/report/ReportHTML.java | 2 +- .../autopsy/report/ReportVisualPanel2.java | 22 +- 7 files changed, 40 insertions(+), 544 deletions(-) delete mode 100644 Core/src/org/sleuthkit/autopsy/datamodel/Bookmarks.java diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java b/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java index afab0a237b..4e8d5ec869 100755 --- a/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java @@ -328,7 +328,7 @@ public class TagsManager implements Closeable { * @param [in] tagName The tag name of interest. * @return A list, possibly empty, of the content tags with the specified tag name. */ - public void getBlackboardArtifactTagsByTagName(TagName tagName, List tags) { + public void getBlackboardArtifactTagsByTagName(TagName tagName, List tags) { try { tskCase.getBlackboardArtifactTagsByTagName(tagName, tags); } @@ -336,7 +336,22 @@ public class TagsManager implements Closeable { Logger.getLogger(TagsManager.class.getName()).log(Level.SEVERE, "Failed to get blackboard artifact tags from the current case", ex); } } - + + /** + * Gets blackboard artifact tags for a particular blackboard artifact. + * @param [in] artifact The blackboard artifact of interest. + * @param [out] tags A list, possibly empty, of the tags that have been applied to the artifact. + * @throws TskCoreException + */ + public void getBlackboardArtifactTagsByArtifact(BlackboardArtifact artifact, List tags) { + try { + tskCase.getBlackboardArtifactTagsByArtifact(artifact, tags); + } + catch (TskCoreException ex) { + Logger.getLogger(TagsManager.class.getName()).log(Level.SEVERE, "Failed to get blackboard artifact tags from the current case", ex); + } + } + @Override public void close() throws IOException { saveTagNamesToTagsSettings(); diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/Bookmarks.java b/Core/src/org/sleuthkit/autopsy/datamodel/Bookmarks.java deleted file mode 100644 index e3055f22b7..0000000000 --- a/Core/src/org/sleuthkit/autopsy/datamodel/Bookmarks.java +++ /dev/null @@ -1,298 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2012 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.datamodel; - -import java.awt.event.ActionEvent; -import java.util.EnumMap; -import java.util.List; -import java.util.Map; -import java.util.logging.Level; -import javax.swing.AbstractAction; -import javax.swing.Action; -import org.sleuthkit.autopsy.coreutils.Logger; -import org.openide.nodes.ChildFactory; -import org.openide.nodes.Children; -import org.openide.nodes.Node; -import org.openide.nodes.Sheet; -import org.openide.util.Lookup; -import org.openide.util.lookup.Lookups; -import org.sleuthkit.autopsy.corecomponentinterfaces.BlackboardResultViewer; -import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardAttribute; -import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; -import org.sleuthkit.datamodel.SleuthkitCase; -import org.sleuthkit.datamodel.TskCoreException; - -/** - * Support for bookmark (file and result/artifact) nodes and displaying - * bookmarks in the directory tree Bookmarks are divided into file and result - * children bookmarks. - * - * Bookmarks are specialized tags - TSK_TAG_NAME starts with File Bookmark or - * Result Bookmark - * - * @deprecated cosolidated under Tags - * - * TODO bookmark hierarchy support (TSK_TAG_NAME with slashes) - */ -@Deprecated -public class Bookmarks implements AutopsyVisitableItem { - - public static final String NAME = "Bookmarks"; - private static final String FILE_BOOKMARKS_LABEL_NAME = "File Bookmarks"; - private static final String RESULT_BOOKMARKS_LABEL_NAME = "Result Bookmarks"; - //bookmarks are specializations of tags - public static final String BOOKMARK_TAG_NAME = "Bookmark"; - private static final String BOOKMARK_ICON_PATH = "org/sleuthkit/autopsy/images/star-bookmark-icon-16.png"; - private static final Logger logger = Logger.getLogger(Bookmarks.class.getName()); - private SleuthkitCase skCase; - private final Map> data = - new EnumMap>(BlackboardArtifact.ARTIFACT_TYPE.class); - - public Bookmarks(SleuthkitCase skCase) { - this.skCase = skCase; - - } - - @Override - public T accept(AutopsyItemVisitor v) { - return null; //v.visit(this); - } - - /** - * bookmarks root node with file/result bookmarks - */ - public class BookmarksRootNode extends DisplayableItemNode { - - public BookmarksRootNode() { - super(Children.create(new BookmarksRootChildren(), true), Lookups.singleton(NAME)); - super.setName(NAME); - super.setDisplayName(NAME); - this.setIconBaseWithExtension(BOOKMARK_ICON_PATH); - initData(); - } - - private void initData() { - data.put(BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_FILE, null); - data.put(BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_ARTIFACT, null); - - try { - - //filter out tags that are not bookmarks - //we get bookmarks that have tag names that start with predefined names, preserving the bookmark hierarchy - List tagFiles = skCase.getBlackboardArtifacts(BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_FILE, - BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TAG_NAME, - BOOKMARK_TAG_NAME); - List tagArtifacts = skCase.getBlackboardArtifacts(BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_ARTIFACT, - BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TAG_NAME, - BOOKMARK_TAG_NAME); - - data.put(BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_FILE, tagFiles); - data.put(BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_ARTIFACT, tagArtifacts); - } catch (TskCoreException ex) { - logger.log(Level.WARNING, "Count not initialize bookmark nodes, ", ex); - } - - - } - - @Override - public boolean isLeafTypeNode() { - return false; - } - - @Override - public T accept(DisplayableItemNodeVisitor v) { - return null; // v.visit(this); - } - - @Override - protected Sheet createSheet() { - Sheet s = super.createSheet(); - Sheet.Set ss = s.get(Sheet.PROPERTIES); - if (ss == null) { - ss = Sheet.createPropertiesSet(); - s.put(ss); - } - - ss.put(new NodeProperty("Name", - "Name", - "no description", - getName())); - - return s; - } - } - - /** - * bookmarks root child node creating types of bookmarks nodes - */ - private class BookmarksRootChildren extends ChildFactory { - - @Override - protected boolean createKeys(List list) { - for (BlackboardArtifact.ARTIFACT_TYPE artType : data.keySet()) { - list.add(artType); - } - - return true; - } - - @Override - protected Node createNodeForKey(BlackboardArtifact.ARTIFACT_TYPE key) { - return new BookmarksNodeRoot(key, data.get(key)); - } - } - - /** - * Bookmarks node representation (file or result) - */ - public class BookmarksNodeRoot extends DisplayableItemNode { - - public BookmarksNodeRoot(BlackboardArtifact.ARTIFACT_TYPE bookType, List bookmarks) { - super(Children.create(new BookmarksChildrenNode(bookmarks), true), Lookups.singleton(bookType.getDisplayName())); - - String name = null; - if (bookType.equals(BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_FILE)) { - name = FILE_BOOKMARKS_LABEL_NAME; - } else if (bookType.equals(BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_ARTIFACT)) { - name = RESULT_BOOKMARKS_LABEL_NAME; - } - - super.setName(name); - super.setDisplayName(name + " (" + bookmarks.size() + ")"); - - this.setIconBaseWithExtension(BOOKMARK_ICON_PATH); - } - - @Override - protected Sheet createSheet() { - Sheet s = super.createSheet(); - Sheet.Set ss = s.get(Sheet.PROPERTIES); - if (ss == null) { - ss = Sheet.createPropertiesSet(); - s.put(ss); - } - - ss.put(new NodeProperty("Name", - "Name", - "no description", - getName())); - - return s; - } - - @Override - public boolean isLeafTypeNode() { - return false; - } - - @Override - public T accept(DisplayableItemNodeVisitor v) { - return null; //v.visit(this); - } - } - - /** - * Node representing mail folder content (mail messages) - */ - private class BookmarksChildrenNode extends ChildFactory { - - private List bookmarks; - - private BookmarksChildrenNode(List bookmarks) { - super(); - this.bookmarks = bookmarks; - } - - @Override - protected boolean createKeys(List list) { - list.addAll(bookmarks); - return true; - } - - @Override - protected Node createNodeForKey(BlackboardArtifact artifact) { - BlackboardArtifactNode bookmarkNode = null; - - int artifactTypeID = artifact.getArtifactTypeID(); - if (artifactTypeID == BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_ARTIFACT.getTypeID()) { - final BlackboardArtifact sourceResult = Tags.getArtifactFromTag(artifact.getArtifactID()); - bookmarkNode = new BlackboardArtifactNode(artifact, BOOKMARK_ICON_PATH) { - @Override - public Action[] getActions(boolean bln) { - //Action [] actions = super.getActions(bln); //To change body of generated methods, choose Tools | Templates. - Action[] actions = new Action[1]; - actions[0] = new AbstractAction("View Source Result") { - @Override - public void actionPerformed(ActionEvent e) { - //open the source artifact in dir tree - if (sourceResult != null) { - BlackboardResultViewer v = Lookup.getDefault().lookup(BlackboardResultViewer.class); - v.viewArtifact(sourceResult); - } - } - }; - return actions; - } - }; - - //add custom property - final String NO_DESCR = "no description"; - String resultType = sourceResult.getDisplayName(); - NodeProperty resultTypeProp = new NodeProperty("Source Result Type", - "Result Type", - NO_DESCR, - resultType); - bookmarkNode.addNodeProperty(resultTypeProp); - - } else { - //file bookmark, no additional action - bookmarkNode = new BlackboardArtifactNode(artifact, BOOKMARK_ICON_PATH); - - } - return bookmarkNode; - } - } - - /** - * Links existing blackboard artifact (a tag) to this artifact. Linkage is - * made using TSK_TAGGED_ARTIFACT attribute. - */ - void addArtifactTag(BlackboardArtifact art, BlackboardArtifact tag) throws TskCoreException { - if (art.equals(tag)) { - throw new TskCoreException("Cannot tag the same artifact: id" + art.getArtifactID()); - } - BlackboardAttribute attrLink = new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_TAGGED_ARTIFACT.getTypeID(), - "", art.getArtifactID()); - tag.addAttribute(attrLink); - } - - /** - * Get tag artifacts linked to the artifact - * - * @param art artifact to get tags for - * @return list of children artifacts or an empty list - * @throws TskCoreException exception thrown if a critical error occurs - * within tsk core and child artifact could not be queried - */ - List getTagArtifacts(BlackboardArtifact art) throws TskCoreException { - return skCase.getBlackboardArtifacts(ATTRIBUTE_TYPE.TSK_TAGGED_ARTIFACT, art.getArtifactID()); - } -} \ No newline at end of file diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/Tags.java b/Core/src/org/sleuthkit/autopsy/datamodel/Tags.java index dce6ecb3ad..03a749cfb3 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/Tags.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/Tags.java @@ -18,249 +18,23 @@ */ package org.sleuthkit.autopsy.datamodel; -import java.sql.ResultSet; -import java.sql.SQLException; import java.util.ArrayList; import java.util.HashSet; import java.util.List; -import java.util.TreeSet; import java.util.logging.Level; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; -import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; public class Tags { private static final Logger logger = Logger.getLogger(Tags.class.getName()); public static final String BOOKMARK_TAG_NAME = "Bookmark"; - private static final String EMPTY_COMMENT = ""; - /** - * Create a tag for a file with TSK_TAG_NAME as tagName. - * - * @param file to create tag for - * @param tagName TSK_TAG_NAME - * @param comment the tag comment, or null if not present - */ - public static void createTag(AbstractFile file, String tagName, String comment) { - try { - final BlackboardArtifact bookArt = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_FILE); - List attrs = new ArrayList<>(); - - - BlackboardAttribute attr1 = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TAG_NAME.getTypeID(), - "", tagName); - attrs.add(attr1); - - if (comment != null && !comment.isEmpty()) { - BlackboardAttribute attr2 = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT.getTypeID(), - "", comment); - attrs.add(attr2); - } - bookArt.addAttributes(attrs); - } - catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Failed to create tag for " + file.getName(), ex); - } - } - - /** - * Create a tag for an artifact with TSK_TAG_NAME as tagName. - * - * @param artifact to create tag for - * @param tagName TSK_TAG_NAME - * @param comment the tag comment or null if not present - */ - public static void createTag(BlackboardArtifact artifact, String tagName, String comment) { - try { - Case currentCase = Case.getCurrentCase(); - SleuthkitCase skCase = currentCase.getSleuthkitCase(); - - AbstractFile file = skCase.getAbstractFileById(artifact.getObjectID()); - final BlackboardArtifact bookArt = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_ARTIFACT); - List attrs = new ArrayList<>(); - - - BlackboardAttribute attr1 = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TAG_NAME.getTypeID(), - "", tagName); - - if (comment != null && !comment.isEmpty()) { - BlackboardAttribute attr2 = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT.getTypeID(), - "", comment); - attrs.add(attr2); - } - - BlackboardAttribute attr3 = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TAGGED_ARTIFACT.getTypeID(), - "", artifact.getArtifactID()); - attrs.add(attr1); - - attrs.add(attr3); - bookArt.addAttributes(attrs); - } - catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Failed to create tag for artifact " + artifact.getArtifactID(), ex); - } - } - - /** - * Create a bookmark tag for a file. - * - * @param file to create bookmark tag for - * @param comment the bookmark comment - */ - public static void createBookmark(AbstractFile file, String comment) { - createTag(file, Tags.BOOKMARK_TAG_NAME, comment); - } - - /** - * Create a bookmark tag for an artifact. - * - * @param artifact to create bookmark tag for - * @param comment the bookmark comment - */ - public static void createBookmark(BlackboardArtifact artifact, String comment) { - createTag(artifact, Tags.BOOKMARK_TAG_NAME, comment); - } - - /** - * Get a list of all the bookmarks. - * - * @return a list of all bookmark artifacts - */ - static List getBookmarks() { - try { - Case currentCase = Case.getCurrentCase(); - SleuthkitCase skCase = currentCase.getSleuthkitCase(); - return skCase.getBlackboardArtifacts(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TAG_NAME, Tags.BOOKMARK_TAG_NAME); - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Failed to get list of artifacts from the case", ex); - } - return new ArrayList<>(); - } - - /** - * Get a list of all the unique tag names associated with the current case plus any - * tag names stored in the application settings file. - * - * @return A collection of tag names. - */ - public static TreeSet getAllTagNames() { - // Use a TreeSet<> so the union of the tag names from the two sources will be sorted. - TreeSet tagNames = getTagNamesFromCurrentCase(); - - // Make sure the book mark tag is always included. - tagNames.add(BOOKMARK_TAG_NAME); - - return tagNames; - } - - /** - * Get a list of all the unique tag names associated with the current case. - * Uses a custom query for speed when dealing with thousands of tags. - * - * @return A collection of tag names. - */ - @SuppressWarnings("deprecation") - public static TreeSet getTagNamesFromCurrentCase() { - TreeSet tagNames = new TreeSet<>(); - - ResultSet rs = null; - SleuthkitCase skCase = null; - try { - skCase = Case.getCurrentCase().getSleuthkitCase(); - rs = skCase.runQuery("SELECT value_text" - + " FROM blackboard_attributes" - + " WHERE attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TAG_NAME.getTypeID() - + " GROUP BY value_text" - + " ORDER BY value_text"); - while (rs.next()) { - tagNames.add(rs.getString("value_text")); - } - } - catch (IllegalStateException ex) { - // Case.getCurrentCase() throws IllegalStateException if there is no current autopsy case. - } - catch (SQLException ex) { - logger.log(Level.SEVERE, "Failed to query the blackboard for tag names", ex); - } - finally { - if (null != skCase && null != rs) { - try { - skCase.closeRunQuery(rs); - } catch (SQLException ex) { - logger.log(Level.SEVERE, "Failed to close the query for blackboard for tag names", ex); - } - } - } - - // Make sure the book mark tag is always included. - tagNames.add(BOOKMARK_TAG_NAME); - - return tagNames; - } - - /** - * Get the tag comment for a specified tag. - * - * @param tagArtifactId artifact id of the tag - * @return the tag comment - */ - static String getCommentFromTag(long tagArtifactId) { - try { - Case currentCase = Case.getCurrentCase(); - SleuthkitCase skCase = currentCase.getSleuthkitCase(); - - BlackboardArtifact artifact = skCase.getBlackboardArtifact(tagArtifactId); - if (artifact.getArtifactTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_FILE.getTypeID() - || artifact.getArtifactTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_ARTIFACT.getTypeID()) { - List attributes = artifact.getAttributes(); - for (BlackboardAttribute att : attributes) { - if (att.getAttributeTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT.getTypeID()) { - return att.getValueString(); - } - } - } - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Failed to get artifact " + tagArtifactId + " from case", ex); - } - - return EMPTY_COMMENT; - } - - /** - * Get the artifact for a result tag. - * - * @param tagArtifactId artifact id of the tag - * @return the tag's artifact - */ - static BlackboardArtifact getArtifactFromTag(long tagArtifactId) { - try { - Case currentCase = Case.getCurrentCase(); - SleuthkitCase skCase = currentCase.getSleuthkitCase(); - - BlackboardArtifact artifact = skCase.getBlackboardArtifact(tagArtifactId); - if (artifact.getArtifactTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_FILE.getTypeID() - || artifact.getArtifactTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_ARTIFACT.getTypeID()) { - List attributes = artifact.getAttributes(); - for (BlackboardAttribute att : attributes) { - if (att.getAttributeTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TAGGED_ARTIFACT.getTypeID()) { - return skCase.getBlackboardArtifact(att.getValueLong()); - } - } - } - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Failed to get artifact " + tagArtifactId + " from case."); - } - - return null; - } - /** * Looks up the tag names associated with either a tagged artifact or a tag artifact. * diff --git a/Core/src/org/sleuthkit/autopsy/report/ArtifactSelectionDialog.java b/Core/src/org/sleuthkit/autopsy/report/ArtifactSelectionDialog.java index 24ed36f302..96f417f72b 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ArtifactSelectionDialog.java +++ b/Core/src/org/sleuthkit/autopsy/report/ArtifactSelectionDialog.java @@ -66,6 +66,8 @@ public class ArtifactSelectionDialog extends javax.swing.JDialog { try { ArrayList doNotReport = new ArrayList(); doNotReport.add(BlackboardArtifact.ARTIFACT_TYPE.TSK_GEN_INFO); + doNotReport.add(BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_FILE); // Obsolete artifact type + doNotReport.add(BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_ARTIFACT); // Obsolete artifact type artifacts = Case.getCurrentCase().getSleuthkitCase().getBlackboardArtifactTypesInUse(); diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java b/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java index 889ca07328..9064532933 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java +++ b/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java @@ -34,7 +34,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; -import java.util.Comparator; import java.util.Date; import java.util.HashMap; import java.util.Iterator; @@ -56,9 +55,9 @@ import org.sleuthkit.autopsy.report.ReportProgressPanel.ReportStatus; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; +import org.sleuthkit.datamodel.BlackboardArtifactTag; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; -import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; @@ -512,14 +511,18 @@ public class ReportGenerator { private List getFilteredArtifacts(ARTIFACT_TYPE type, HashSet tagNamesFilter) { List artifacts = new ArrayList<>(); try { - // For every artifact of the current type, add it and it's attributes to a list for (BlackboardArtifact artifact : skCase.getBlackboardArtifacts(type)) { - HashSet tags = Tags.getUniqueTagNamesForArtifact(artifact); - if(failsTagFilter(tags, tagNamesFilter)) { + ArrayList tags = new ArrayList<>(); + Case.getCurrentCase().getServices().getTagsManager().getBlackboardArtifactTagsByArtifact(artifact, tags); + HashSet uniqueTagNames = new HashSet<>(); + for (BlackboardArtifactTag tag : tags) { + uniqueTagNames.add(tag.getName().getDisplayName()); + } + if(failsTagFilter(uniqueTagNames, tagNamesFilter)) { continue; } try { - artifacts.add(new ArtifactData(artifact, skCase.getBlackboardAttributes(artifact), tags)); + artifacts.add(new ArtifactData(artifact, skCase.getBlackboardAttributes(artifact), uniqueTagNames)); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Failed to get Blackboard Attributes when generating report.", ex); } diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportHTML.java b/Core/src/org/sleuthkit/autopsy/report/ReportHTML.java index aa48b125f0..4b5492ebec 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ReportHTML.java +++ b/Core/src/org/sleuthkit/autopsy/report/ReportHTML.java @@ -599,7 +599,7 @@ public class ReportHTML implements TableReportModule { // Make a folder for the local file with the same name as the tag. StringBuilder localFilePath = new StringBuilder(); - localFilePath.append(path); + localFilePath.append(path); HashSet tagNames = Tags.getUniqueTagNamesForArtifact(sourceArtifact); if (!tagNames.isEmpty()) { localFilePath.append(tagNames.iterator().next()); diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportVisualPanel2.java b/Core/src/org/sleuthkit/autopsy/report/ReportVisualPanel2.java index e374135aa9..074892d84c 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ReportVisualPanel2.java +++ b/Core/src/org/sleuthkit/autopsy/report/ReportVisualPanel2.java @@ -38,13 +38,12 @@ import javax.swing.ListModel; import javax.swing.event.ListDataListener; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.datamodel.Tags; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; +import org.sleuthkit.datamodel.TagName; import org.sleuthkit.datamodel.TskCoreException; public final class ReportVisualPanel2 extends JPanel { - private static final Logger logger = Logger.getLogger(ReportVisualPanel2.class.getName()); private ReportWizardPanel2 wizPanel; private Map tagStates = new LinkedHashMap<>(); @@ -73,8 +72,11 @@ public final class ReportVisualPanel2 extends JPanel { // Initialize the list of Tags private void initTags() { - for(String tag : Tags.getTagNamesFromCurrentCase()) { - tagStates.put(tag, Boolean.FALSE); + ArrayList tagNamesInUse = new ArrayList<>(); + Case.getCurrentCase().getServices().getTagsManager().getTagNamesInUse(tagNamesInUse); + + for(TagName tagName : tagNamesInUse) { + tagStates.put(tagName.getDisplayName(), Boolean.FALSE); } tags.addAll(tagStates.keySet()); @@ -95,16 +97,17 @@ public final class ReportVisualPanel2 extends JPanel { list.repaint(); updateFinishButton(); } - }); - + }); } // Initialize the list of Artifacts private void initArtifactTypes() { try { - ArrayList doNotReport = new ArrayList(); + ArrayList doNotReport = new ArrayList(); doNotReport.add(BlackboardArtifact.ARTIFACT_TYPE.TSK_GEN_INFO); + doNotReport.add(BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_FILE); // Obsolete artifact type + doNotReport.add(BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_ARTIFACT); // Obsolete artifact type artifacts = Case.getCurrentCase().getSleuthkitCase().getBlackboardArtifactTypesInUse(); @@ -116,7 +119,6 @@ public final class ReportVisualPanel2 extends JPanel { } } catch (TskCoreException ex) { Logger.getLogger(ReportVisualPanel2.class.getName()).log(Level.SEVERE, "Error getting list of artifacts in use: " + ex.getLocalizedMessage(), ex); - return; } } @@ -355,8 +357,6 @@ public final class ReportVisualPanel2 extends JPanel { return this; } return new JLabel(); - } - + } } - } From e393c317899f3494ebca757e6435259a5e97b907 Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Thu, 17 Oct 2013 11:47:55 -0400 Subject: [PATCH 055/179] Fixed errors during parsing of regripper output. --- .../recentactivity/ExtractRegistry.java | 74 +++++++++++-------- 1 file changed, 42 insertions(+), 32 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java index 5643a8e0c1..23225f2ca8 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java @@ -171,42 +171,53 @@ public class ExtractRegistry extends Extract { logger.log(Level.INFO, moduleName + "- Now getting registry information from " + regFileNameLocal); RegOutputFiles regOutputFiles = executeRegRip(regFileNameLocal, outputPathBase); - if (parseReg(regOutputFiles.autopsyPlugins, regFile.getId(), extrctr) == false) { - continue; + + if (regOutputFiles.autopsyPlugins.isEmpty() == false) { + if (parseReg(regOutputFiles.autopsyPlugins, regFile.getId(), extrctr) == false) { + this.addErrorMessage(this.getName() + ": Failed parsing registry file results " + regFileName); + continue; + } } - try { - BlackboardArtifact art = regFile.newArtifact(ARTIFACT_TYPE.TSK_TOOL_OUTPUT.getTypeID()); - BlackboardAttribute att = new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "RegRipper"); - art.addAttribute(att); - - FileReader fread = new FileReader(regOutputFiles.fullPlugins); - BufferedReader input = new BufferedReader(fread); - - StringBuilder sb = new StringBuilder(); - while (true) { - + if (regOutputFiles.fullPlugins.isEmpty() == false) { + try { + BlackboardArtifact art = regFile.newArtifact(ARTIFACT_TYPE.TSK_TOOL_OUTPUT.getTypeID()); + BlackboardAttribute att = new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "RegRipper"); + art.addAttribute(att); + + FileReader fread = new FileReader(regOutputFiles.fullPlugins); + BufferedReader input = new BufferedReader(fread); + + StringBuilder sb = new StringBuilder(); try { - String s = input.readLine(); - if (s == null) { - break; + while (true) { + String s = input.readLine(); + if (s == null) { + break; + } + sb.append(s).append("\n"); } - sb.append(s).append("\n"); } catch (IOException ex) { java.util.logging.Logger.getLogger(ExtractRegistry.class.getName()).log(Level.SEVERE, null, ex); - break; + } finally { + try { + input.close(); + } catch (IOException ex) { + logger.log(Level.WARNING, "Failed to close reader.", ex); + } } + + att = new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_TEXT.getTypeID(), "RecentActivity", sb.toString()); + art.addAttribute(att); + } catch (FileNotFoundException ex) { + this.addErrorMessage(this.getName() + ": Error reading registry file - " + regOutputFiles.fullPlugins); + java.util.logging.Logger.getLogger(ExtractRegistry.class.getName()).log(Level.SEVERE, null, ex); + } catch (TskCoreException ex) { + // TODO - add error message here? + java.util.logging.Logger.getLogger(ExtractRegistry.class.getName()).log(Level.SEVERE, null, ex); } - - att = new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_TEXT.getTypeID(), "RecentActivity", sb.toString()); - art.addAttribute(att); - } catch (FileNotFoundException ex) { - java.util.logging.Logger.getLogger(ExtractRegistry.class.getName()).log(Level.SEVERE, null, ex); - } catch (TskCoreException ex) { - java.util.logging.Logger.getLogger(ExtractRegistry.class.getName()).log(Level.SEVERE, null, ex); - } - - regFileNameLocalFile.delete(); + regFileNameLocalFile.delete(); + } } try { if (logFile != null) { @@ -277,8 +288,7 @@ public class ExtractRegistry extends Extract { } } } - } - else { + } else { logger.log(Level.INFO, "Not running Autopsy-only modules on hive"); } @@ -303,8 +313,7 @@ public class ExtractRegistry extends Extract { } } } - } - else { + } else { logger.log(Level.INFO, "Not running original RR modules on hive"); } return regOutputFiles; @@ -321,6 +330,7 @@ public class ExtractRegistry extends Extract { // Read the file in and create a Document and elements File regfile = new File(regRecord); fstream = new FileInputStream(regfile); + //InputStreamReader fstreamReader = new InputStreamReader(fstream, "UTF-8"); //BufferedReader input = new BufferedReader(fstreamReader); //logger.log(Level.INFO, "using encoding " + fstreamReader.getEncoding()); From 79126cbe2beb8c0be153feca974a28e78c0c1e4a Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Thu, 17 Oct 2013 13:57:30 -0400 Subject: [PATCH 056/179] Added another error message to user on pasco failure. --- .../sleuthkit/autopsy/recentactivity/ExtractIE.java | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java index bbf876a7c6..300cfcb6d6 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java @@ -284,7 +284,8 @@ public class ExtractIE extends Extract { } catch (Exception e) { //TODO should throw a specific checked exception - logger.log(Level.SEVERE, "Error lnk parsing the file to get recent files" + recentFile); + logger.log(Level.SEVERE, "Error lnk parsing the file to get recent files" + recentFile, e); + this.addErrorMessage(this.getName() + ": Error parsing Recent File " + recentFile.getName()); continue; } String path = lnk.getBestPath(); @@ -364,14 +365,15 @@ public class ExtractIE extends Extract { String filename = "pasco2Result." + indexFile.getId() + ".txt"; boolean bPascProcSuccess = executePasco(temps, filename); - pascoResults.add(filename); //At this point pasco2 proccessed the index files. //Now fetch the results, parse them and the delete the files. if (bPascProcSuccess) { - + pascoResults.add(filename); //Delete index.dat file since it was succcessfully by Pasco datFile.delete(); + } else { + this.addErrorMessage(this.getName() + ": Error processing Internet Explorer history."); } } } @@ -597,4 +599,4 @@ public class ExtractIE extends Extract { public boolean hasBackgroundJobsRunning() { return false; } -} \ No newline at end of file +} From 98913f22ee65555e624372d5c8f6b6ad9a8f3c50 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Fri, 18 Oct 2013 14:02:18 -0400 Subject: [PATCH 057/179] Added work around for database writes on EDT to TagsManager --- .../autopsy/actions/AddTagAction.java | 13 +- .../actions/GetTagNameAndCommentDialog.java | 10 +- .../autopsy/actions/GetTagNameDialog.java | 7 +- .../casemodule/services/TagsManager.java | 332 ++++++++++-------- .../autopsy/datamodel/ContentTagTypeNode.java | 10 +- .../sleuthkit/autopsy/datamodel/TagsNode.java | 11 +- .../BlackboardArtifactTagTypeNode.java | 13 +- .../autopsy/report/ReportVisualPanel2.java | 9 +- 8 files changed, 243 insertions(+), 162 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/actions/AddTagAction.java b/Core/src/org/sleuthkit/autopsy/actions/AddTagAction.java index 1532a573d2..0224421771 100755 --- a/Core/src/org/sleuthkit/autopsy/actions/AddTagAction.java +++ b/Core/src/org/sleuthkit/autopsy/actions/AddTagAction.java @@ -21,15 +21,15 @@ package org.sleuthkit.autopsy.actions; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.ArrayList; -import javax.swing.AbstractAction; +import java.util.logging.Level; import javax.swing.JMenu; import javax.swing.JMenuItem; import org.openide.util.actions.Presenter; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.services.TagsManager; -import org.sleuthkit.autopsy.directorytree.DirectoryTreeTopComponent; -import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.TagName; +import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.autopsy.coreutils.Logger; /** * An abstract base class for Actions that allow users to tag SleuthKit data @@ -77,7 +77,12 @@ abstract class AddTagAction extends TagAction implements Presenter.Popup { // Get the current set of tag names. TagsManager tagsManager = Case.getCurrentCase().getServices().getTagsManager(); ArrayList tagNames = new ArrayList<>(); - tagsManager.getAllTagNames(tagNames); + try { + tagsManager.getAllTagNames(tagNames); + } + catch (TskCoreException ex) { + Logger.getLogger(TagsManager.class.getName()).log(Level.SEVERE, "Failed to get tag names", ex); + } // Create a "Quick Tag" sub-menu. JMenu quickTagMenu = new JMenu("Quick Tag"); diff --git a/Core/src/org/sleuthkit/autopsy/actions/GetTagNameAndCommentDialog.java b/Core/src/org/sleuthkit/autopsy/actions/GetTagNameAndCommentDialog.java index 2398b59081..34aee2c7df 100644 --- a/Core/src/org/sleuthkit/autopsy/actions/GetTagNameAndCommentDialog.java +++ b/Core/src/org/sleuthkit/autopsy/actions/GetTagNameAndCommentDialog.java @@ -22,6 +22,7 @@ import java.awt.event.ActionEvent; import java.awt.event.KeyEvent; import java.util.ArrayList; import java.util.HashMap; +import java.util.logging.Level; import javax.swing.AbstractAction; import javax.swing.ActionMap; import javax.swing.InputMap; @@ -32,7 +33,9 @@ import javax.swing.KeyStroke; import org.openide.windows.WindowManager; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.services.TagsManager; +import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.datamodel.TagName; +import org.sleuthkit.datamodel.TskCoreException; public class GetTagNameAndCommentDialog extends JDialog { private static final String NO_TAG_NAMES_MESSAGE = "No Tags"; // RJCTODO: ?? @@ -83,7 +86,12 @@ public class GetTagNameAndCommentDialog extends JDialog { // Save the tag names to be enable to return the one the user selects. TagsManager tagsManager = Case.getCurrentCase().getServices().getTagsManager(); ArrayList currentTagNames = new ArrayList<>(); - tagsManager.getAllTagNames(currentTagNames); + try { + tagsManager.getAllTagNames(currentTagNames); + } + catch (TskCoreException ex) { + Logger.getLogger(GetTagNameAndCommentDialog.class.getName()).log(Level.SEVERE, "Failed to get tag names", ex); + } if (currentTagNames.isEmpty()) { tagCombo.addItem(NO_TAG_NAMES_MESSAGE); } diff --git a/Core/src/org/sleuthkit/autopsy/actions/GetTagNameDialog.java b/Core/src/org/sleuthkit/autopsy/actions/GetTagNameDialog.java index d576d7c274..637effab00 100644 --- a/Core/src/org/sleuthkit/autopsy/actions/GetTagNameDialog.java +++ b/Core/src/org/sleuthkit/autopsy/actions/GetTagNameDialog.java @@ -54,7 +54,12 @@ public class GetTagNameDialog extends JDialog { // case the user chooses an existing tag name from the tag names table. TagsManager tagsManager = Case.getCurrentCase().getServices().getTagsManager(); ArrayList currentTagNames = new ArrayList<>(); - tagsManager.getAllTagNames(currentTagNames); + try { + tagsManager.getAllTagNames(currentTagNames); + } + catch (TskCoreException ex) { + Logger.getLogger(GetTagNameDialog.class.getName()).log(Level.SEVERE, "Failed to get tag names", ex); + } for (TagName name : currentTagNames) { this.tagNames.put(name.getDisplayName(), name); } diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java b/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java index 4e8d5ec869..411d6ec33b 100755 --- a/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java @@ -36,7 +36,7 @@ import org.sleuthkit.datamodel.TagName; import org.sleuthkit.datamodel.TskCoreException; /** - * A singleton instance of this class functions as an Autopsy service that + * A per case instance of this class functions as an Autopsy service that * manages the creation, updating, and deletion of tags applied to content and * blackboard artifacts by users. */ @@ -45,8 +45,8 @@ public class TagsManager implements Closeable { private static final String TAG_NAMES_SETTING_KEY = "TagNames"; private static final TagName[] predefinedTagNames = new TagName[]{new TagName("Bookmark", "", TagName.HTML_COLOR.NONE)}; private final SleuthkitCase tskCase; - private final HashMap tagNames = new HashMap<>(); - private final Object lock = new Object(); + private final HashMap uniqueTagNames = new HashMap<>(); + private boolean tagNamesInitialized = false; // @@@ This is part of a work around to be removed when database access on the EDT is correctly synchronized. // Use this exception and the member hash map to manage uniqueness of hash // names. This is deemed more proactive and informative than leaving this to @@ -56,114 +56,46 @@ public class TagsManager implements Closeable { } /** - * Package-scope constructor for use of Services class. An instance of + * Package-scope constructor for use of the Services class. An instance of * TagsManager should be created for each case that is opened. * @param [in] tskCase The SleuthkitCase object for the current case. */ TagsManager(SleuthkitCase tskCase) { this.tskCase = tskCase; - getExistingTagNames(); - saveTagNamesToTagsSettings(); + // @@@ The removal of this call is a work around until database access on the EDT is correctly synchronized. + // getExistingTagNames(); } - private void getExistingTagNames() { - getTagNamesFromCurrentCase(); - getTagNamesFromTagsSettings(); - getPredefinedTagNames(); - } - - private void getTagNamesFromCurrentCase() { - try { - ArrayList currentTagNames = new ArrayList<>(); - tskCase.getAllTagNames(currentTagNames); - for (TagName tagName : currentTagNames) { - tagNames.put(tagName.getDisplayName(), tagName); - } - } - catch (TskCoreException ex) { - Logger.getLogger(TagsManager.class.getName()).log(Level.SEVERE, "Failed to get tag types from the current case", ex); - } - } - - private void getTagNamesFromTagsSettings() { - String setting = ModuleSettings.getConfigSetting(TAGS_SETTINGS_NAME, TAG_NAMES_SETTING_KEY); - if (null != setting && !setting.isEmpty()) { - // Read the tag name setting and break it into tag name tuples. - List tagNameTuples = Arrays.asList(setting.split(";")); - - // Parse each tuple and add the tag names to the current case, one - // at a time to gracefully discard any duplicates or corrupt tuples. - for (String tagNameTuple : tagNameTuples) { - String[] tagNameAttributes = tagNameTuple.split(","); - if (!tagNames.containsKey(tagNameAttributes[0])) { - TagName tagName = new TagName(tagNameAttributes[0], tagNameAttributes[1], TagName.HTML_COLOR.getColorByName(tagNameAttributes[2])); - addTagName(tagName, "Failed to add " + tagName.getDisplayName() + " tag name from tag settings to the current case"); - } - } - } - } - - private void getPredefinedTagNames() { - for (TagName tagName : predefinedTagNames) { - if (!tagNames.containsKey(tagName.getDisplayName())) { - addTagName(tagName, "Failed to add predefined " + tagName.getDisplayName() + " tag name to the current case"); - } - } - } - - private void addTagName(TagName tagName, String errorMessage) { - try { - tskCase.addTagName(tagName); - tagNames.put(tagName.getDisplayName(), tagName); - } - catch(TskCoreException ex) { - Logger.getLogger(TagsManager.class.getName()).log(Level.SEVERE, errorMessage, ex); - } - } - - private void saveTagNamesToTagsSettings() { - if (!tagNames.isEmpty()) { - StringBuilder setting = new StringBuilder(); - for (TagName tagName : tagNames.values()) { - if (setting.length() != 0) { - setting.append(";"); - } - setting.append(tagName.getDisplayName()).append(","); - setting.append(tagName.getDescription()).append(","); - setting.append(tagName.getColor().name()); - } - ModuleSettings.setConfigSetting(TAGS_SETTINGS_NAME, TAG_NAMES_SETTING_KEY, setting.toString()); - } - } - /** * Gets a list of all tag names currently available for tagging content or * blackboard artifacts. - * @return [out] A list, possibly empty, of TagName data transfer objects (DTOs). + * @param [out] A list, possibly empty, of TagName data transfer objects (DTOs). + * @throws TskCoreException */ - public void getAllTagNames(List tagNames) { - try { - tagNames.clear(); - tskCase.getAllTagNames(tagNames); - } - catch (TskCoreException ex) { - Logger.getLogger(TagsManager.class.getName()).log(Level.SEVERE, "Failed to get tag names from the current case", ex); + public synchronized void getAllTagNames(List tagNames) throws TskCoreException { + // @@@ This is a work around to be removed when database access on the EDT is correctly synchronized. + if (!tagNamesInitialized) { + getExistingTagNames(); } + + tagNames.clear(); + tskCase.getAllTagNames(tagNames); } /** * Gets a list of all tag names currently used for tagging content or * blackboard artifacts. - * @return [out] A list, possibly empty, of TagName data transfer objects (DTOs). + * @param [out] A list, possibly empty, of TagName data transfer objects (DTOs). + * @throws TskCoreException */ - public void getTagNamesInUse(List tagNames) { - try { - tagNames.clear(); - tskCase.getTagNamesInUse(tagNames); - } - catch (TskCoreException ex) { - Logger.getLogger(TagsManager.class.getName()).log(Level.SEVERE, "Failed to get tag names from the current case", ex); + public synchronized void getTagNamesInUse(List tagNames) throws TskCoreException { + // @@@ This is a work around to be removed when database access on the EDT is correctly synchronized. + if (!tagNamesInitialized) { + getExistingTagNames(); } + + tagNames.clear(); + tskCase.getTagNamesInUse(tagNames); } /** @@ -171,57 +103,63 @@ public class TagsManager implements Closeable { * @param [in] tagDisplayName The display name for which to check. * @return True if the tag name exists, false otherwise. */ - public boolean tagNameExists(String tagDisplayName) { - synchronized(lock) { - return tagNames.containsKey(tagDisplayName); + public synchronized boolean tagNameExists(String tagDisplayName) { + // @@@ This is a work around to be removed when database access on the EDT is correctly synchronized. + if (!tagNamesInitialized) { + getExistingTagNames(); } + + return uniqueTagNames.containsKey(tagDisplayName); } /** - * Adds a new tag name to the current case and to the tags settings file. + * Adds a new tag name to the current case and to the tags settings. * @param [in] displayName The display name for the new tag name. * @return A TagName data transfer object (DTO) representing the new tag name. - * @throws TskCoreException + * @throws TagNameAlreadyExistsException, TskCoreException */ public TagName addTagName(String displayName) throws TagNameAlreadyExistsException, TskCoreException { return addTagName(displayName, "", TagName.HTML_COLOR.NONE); } /** - * Adds a new tag name to the current case and to the tags settings file. + * Adds a new tag name to the current case and to the tags settings. * @param [in] displayName The display name for the new tag name. * @param [in] description The description for the new tag name. * @return A TagName data transfer object (DTO) representing the new tag name. - * @throws TskCoreException + * @throws TagNameAlreadyExistsException, TskCoreException */ public TagName addTagName(String displayName, String description) throws TagNameAlreadyExistsException, TskCoreException { return addTagName(displayName, description, TagName.HTML_COLOR.NONE); } /** - * Adds a new tag name to the current case and to the tags settings file. + * Adds a new tag name to the current case and to the tags settings. * @param [in] displayName The display name for the new tag name. * @param [in] description The description for the new tag name. * @param [in] color The HTML color to associate with the new tag name. * @return A TagName data transfer object (DTO) representing the new tag name. - * @throws TskCoreException + * @throws TagNameAlreadyExistsException, TskCoreException */ public synchronized TagName addTagName(String displayName, String description, TagName.HTML_COLOR color) throws TagNameAlreadyExistsException, TskCoreException { - synchronized(lock) { - if (tagNames.containsKey(displayName)) { - throw new TagNameAlreadyExistsException(); - } + // @@@ This is a work around to be removed when database access on the EDT is correctly synchronized. + if (!tagNamesInitialized) { + getExistingTagNames(); + } + + if (uniqueTagNames.containsKey(displayName)) { + throw new TagNameAlreadyExistsException(); + } - // Add the tag name to the case. - TagName newTagName = new TagName(displayName, description, color); - tskCase.addTagName(newTagName); + // Add the tag name to the case. + TagName newTagName = new TagName(displayName, description, color); + tskCase.addTagName(newTagName); - // Add the tag name to the tags settings. - tagNames.put(newTagName.getDisplayName(), newTagName); - saveTagNamesToTagsSettings(); - - return newTagName; - } + // Add the tag name to the tags settings. + uniqueTagNames.put(newTagName.getDisplayName(), newTagName); + saveTagNamesToTagsSettings(); + + return newTagName; } /** @@ -246,27 +184,32 @@ public class TagsManager implements Closeable { } /** - * Tags a content object or a portion of a content object. + * Tags a content object or a section of a content object. * @param [in] content The content to tag. * @param [in] tagName The name to use for the tag. * @param [in] comment A comment to store with the tag. - * @param [in] beginByteOffset Designates the beginning of a tagged extent. - * @param [in] endByteOffset Designates the end of a tagged extent. - * @throws TskCoreException + * @param [in] beginByteOffset Designates the beginning of a tagged section. + * @param [in] endByteOffset Designates the end of a tagged section. + * @throws IllegalArgumentException, TskCoreException */ - public void addContentTag(Content content, TagName tagName, String comment, long beginByteOffset, long endByteOffset) throws IllegalArgumentException, TskCoreException { - if (beginByteOffset < 0) { - throw new IllegalArgumentException("Content extent incorrect: beginByteOffset < 0"); - } - - if (endByteOffset <= beginByteOffset) { - throw new IllegalArgumentException("Content extent incorrect: endByteOffset <= beginByteOffset"); - } - - if (endByteOffset > content.getSize() - 1) { - throw new IllegalArgumentException("Content extent incorrect: endByteOffset exceeds content size"); + public synchronized void addContentTag(Content content, TagName tagName, String comment, long beginByteOffset, long endByteOffset) throws IllegalArgumentException, TskCoreException { + // @@@ This is a work around to be removed when database access on the EDT is correctly synchronized. + if (!tagNamesInitialized) { + getExistingTagNames(); } + if (beginByteOffset < 0 || beginByteOffset > content.getSize() - 1) { + throw new IllegalArgumentException("beginByteOffset = " + beginByteOffset + " out of content size range (0 - " + (content.getSize() - 1) + ")"); + } + + if (endByteOffset < 0 || endByteOffset > content.getSize() - 1) { + throw new IllegalArgumentException("endByteOffset = " + endByteOffset + " out of content size range (0 - " + (content.getSize() - 1) + ")"); + } + + if (endByteOffset < beginByteOffset) { + throw new IllegalArgumentException("endByteOffset < beginByteOffset"); + } + tskCase.addContentTag(new ContentTag(content, tagName, comment, beginByteOffset, endByteOffset)); } @@ -275,7 +218,12 @@ public class TagsManager implements Closeable { * @param [in] tag The tag to delete. * @throws TskCoreException */ - public void deleteContentTag(ContentTag tag) throws TskCoreException { + public synchronized void deleteContentTag(ContentTag tag) throws TskCoreException { + // @@@ This is a work around to be removed when database access on the EDT is correctly synchronized. + if (!tagNamesInitialized) { + getExistingTagNames(); + } + tskCase.deleteContentTag(tag); } @@ -283,14 +231,15 @@ public class TagsManager implements Closeable { * Gets content tags by tag name. * @param [in] tagName The tag name of interest. * @return A list, possibly empty, of the content tags with the specified tag name. + * @throws TskCoreException */ - public void getContentTagsByTagName(TagName tagName, List tags) { - try { - tskCase.getContentTagsByTagName(tagName, tags); - } - catch (TskCoreException ex) { - Logger.getLogger(TagsManager.class.getName()).log(Level.SEVERE, "Failed to get content tags from the current case", ex); + public synchronized void getContentTagsByTagName(TagName tagName, List tags) throws TskCoreException { + // @@@ This is a work around to be removed when database access on the EDT is correctly synchronized. + if (!tagNamesInitialized) { + getExistingTagNames(); } + + tskCase.getContentTagsByTagName(tagName, tags); } /** @@ -310,7 +259,12 @@ public class TagsManager implements Closeable { * @param [in] comment A comment to store with the tag. * @throws TskCoreException */ - public void addBlackboardArtifactTag(BlackboardArtifact artifact, TagName tagName, String comment) throws TskCoreException { + public synchronized void addBlackboardArtifactTag(BlackboardArtifact artifact, TagName tagName, String comment) throws TskCoreException { + // @@@ This is a work around to be removed when database access on the EDT is correctly synchronized. + if (!tagNamesInitialized) { + getExistingTagNames(); + } + tskCase.addBlackboardArtifactTag(new BlackboardArtifactTag(artifact, tskCase.getContentById(artifact.getObjectID()), tagName, comment)); } @@ -319,7 +273,12 @@ public class TagsManager implements Closeable { * @param [in] tag The tag to delete. * @throws TskCoreException */ - public void deleteBlackboardArtifactTag(BlackboardArtifactTag tag) throws TskCoreException { + public synchronized void deleteBlackboardArtifactTag(BlackboardArtifactTag tag) throws TskCoreException { + // @@@ This is a work around to be removed when database access on the EDT is correctly synchronized. + if (!tagNamesInitialized) { + getExistingTagNames(); + } + tskCase.deleteBlackboardArtifactTag(tag); } @@ -327,14 +286,15 @@ public class TagsManager implements Closeable { * Gets blackboard artifact tags by tag name. * @param [in] tagName The tag name of interest. * @return A list, possibly empty, of the content tags with the specified tag name. + * @throws TskCoreException */ - public void getBlackboardArtifactTagsByTagName(TagName tagName, List tags) { - try { - tskCase.getBlackboardArtifactTagsByTagName(tagName, tags); - } - catch (TskCoreException ex) { - Logger.getLogger(TagsManager.class.getName()).log(Level.SEVERE, "Failed to get blackboard artifact tags from the current case", ex); + public synchronized void getBlackboardArtifactTagsByTagName(TagName tagName, List tags) throws TskCoreException { + // @@@ This is a work around to be removed when database access on the EDT is correctly synchronized. + if (!tagNamesInitialized) { + getExistingTagNames(); } + + tskCase.getBlackboardArtifactTagsByTagName(tagName, tags); } /** @@ -343,17 +303,89 @@ public class TagsManager implements Closeable { * @param [out] tags A list, possibly empty, of the tags that have been applied to the artifact. * @throws TskCoreException */ - public void getBlackboardArtifactTagsByArtifact(BlackboardArtifact artifact, List tags) { - try { - tskCase.getBlackboardArtifactTagsByArtifact(artifact, tags); - } - catch (TskCoreException ex) { - Logger.getLogger(TagsManager.class.getName()).log(Level.SEVERE, "Failed to get blackboard artifact tags from the current case", ex); + public synchronized void getBlackboardArtifactTagsByArtifact(BlackboardArtifact artifact, List tags) throws TskCoreException { + // @@@ This is a work around to be removed when database access on the EDT is correctly synchronized. + if (!tagNamesInitialized) { + getExistingTagNames(); } + + tskCase.getBlackboardArtifactTagsByArtifact(artifact, tags); } @Override public void close() throws IOException { saveTagNamesToTagsSettings(); } + + private void addTagName(TagName tagName, String errorMessage) { + try { + tskCase.addTagName(tagName); + uniqueTagNames.put(tagName.getDisplayName(), tagName); + } + catch(TskCoreException ex) { + Logger.getLogger(TagsManager.class.getName()).log(Level.SEVERE, errorMessage, ex); + } + } + + private void getExistingTagNames() { + getTagNamesFromCurrentCase(); + getTagNamesFromTagsSettings(); + getPredefinedTagNames(); + saveTagNamesToTagsSettings(); + tagNamesInitialized = true; // @@@ This is part of a work around to be removed when database access on the EDT is correctly synchronized. + } + + private void getTagNamesFromCurrentCase() { + try { + ArrayList currentTagNames = new ArrayList<>(); + tskCase.getAllTagNames(currentTagNames); + for (TagName tagName : currentTagNames) { + uniqueTagNames.put(tagName.getDisplayName(), tagName); + } + } + catch (TskCoreException ex) { + Logger.getLogger(TagsManager.class.getName()).log(Level.SEVERE, "Failed to get tag types from the current case", ex); + } + } + + private void getTagNamesFromTagsSettings() { + String setting = ModuleSettings.getConfigSetting(TAGS_SETTINGS_NAME, TAG_NAMES_SETTING_KEY); + if (null != setting && !setting.isEmpty()) { + // Read the tag name setting and break it into tag name tuples. + List tagNameTuples = Arrays.asList(setting.split(";")); + + // Parse each tuple and add the tag names to the current case, one + // at a time to gracefully discard any duplicates or corrupt tuples. + for (String tagNameTuple : tagNameTuples) { + String[] tagNameAttributes = tagNameTuple.split(","); + if (!uniqueTagNames.containsKey(tagNameAttributes[0])) { + TagName tagName = new TagName(tagNameAttributes[0], tagNameAttributes[1], TagName.HTML_COLOR.getColorByName(tagNameAttributes[2])); + addTagName(tagName, "Failed to add " + tagName.getDisplayName() + " tag name from tag settings to the current case"); + } + } + } + } + + private void getPredefinedTagNames() { + for (TagName tagName : predefinedTagNames) { + if (!uniqueTagNames.containsKey(tagName.getDisplayName())) { + addTagName(tagName, "Failed to add predefined " + tagName.getDisplayName() + " tag name to the current case"); + } + } + } + + private void saveTagNamesToTagsSettings() { + if (!uniqueTagNames.isEmpty()) { + StringBuilder setting = new StringBuilder(); + for (TagName tagName : uniqueTagNames.values()) { + if (setting.length() != 0) { + setting.append(";"); + } + setting.append(tagName.getDisplayName()).append(","); + setting.append(tagName.getDescription()).append(","); + setting.append(tagName.getColor().name()); + } + ModuleSettings.setConfigSetting(TAGS_SETTINGS_NAME, TAG_NAMES_SETTING_KEY, setting.toString()); + } + } } diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagTypeNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagTypeNode.java index 956ac6d69d..92dfa50243 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagTypeNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagTypeNode.java @@ -19,13 +19,16 @@ package org.sleuthkit.autopsy.datamodel; import java.util.List; +import java.util.logging.Level; import org.openide.nodes.ChildFactory; import org.openide.nodes.Children; import org.openide.nodes.Node; import org.openide.nodes.Sheet; import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.datamodel.ContentTag; import org.sleuthkit.datamodel.TagName; +import org.sleuthkit.datamodel.TskCoreException; /** * Instances of this class are are elements of a directory tree sub-tree @@ -77,7 +80,12 @@ public class ContentTagTypeNode extends DisplayableItemNode { @Override protected boolean createKeys(List keys) { // Use the content tags bearing the specified tag name as the keys. - Case.getCurrentCase().getServices().getTagsManager().getContentTagsByTagName(tagName, keys); + try { + Case.getCurrentCase().getServices().getTagsManager().getContentTagsByTagName(tagName, keys); + } + catch (TskCoreException ex) { + Logger.getLogger(ContentTagTypeNode.ContentTagNodeFactory.class.getName()).log(Level.SEVERE, "Failed to get tag names", ex); + } return true; } diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/TagsNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/TagsNode.java index 3f1813a647..908252a95f 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/TagsNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/TagsNode.java @@ -19,12 +19,16 @@ package org.sleuthkit.autopsy.datamodel; import java.util.List; +import java.util.logging.Level; import org.openide.nodes.ChildFactory; import org.openide.nodes.Children; import org.openide.nodes.Node; import org.openide.nodes.Sheet; +import org.sleuthkit.autopsy.actions.GetTagNameAndCommentDialog; import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.datamodel.TagName; +import org.sleuthkit.datamodel.TskCoreException; /** * Instances of this class are the root nodes of tree that is a sub-tree of the @@ -74,7 +78,12 @@ public class TagsNode extends DisplayableItemNode { private static class TagNameNodeFactory extends ChildFactory { @Override protected boolean createKeys(List keys) { - Case.getCurrentCase().getServices().getTagsManager().getAllTagNames(keys); + try { + Case.getCurrentCase().getServices().getTagsManager().getAllTagNames(keys); + } + catch (TskCoreException ex) { + Logger.getLogger(TagNameNodeFactory.class.getName()).log(Level.SEVERE, "Failed to get tag names", ex); + } return true; } diff --git a/Core/src/org/sleuthkit/autopsy/directorytree/BlackboardArtifactTagTypeNode.java b/Core/src/org/sleuthkit/autopsy/directorytree/BlackboardArtifactTagTypeNode.java index b7acf5dd72..8c2e904aed 100755 --- a/Core/src/org/sleuthkit/autopsy/directorytree/BlackboardArtifactTagTypeNode.java +++ b/Core/src/org/sleuthkit/autopsy/directorytree/BlackboardArtifactTagTypeNode.java @@ -19,17 +19,21 @@ package org.sleuthkit.autopsy.directorytree; import java.util.List; +import java.util.logging.Level; import org.openide.nodes.ChildFactory; import org.openide.nodes.Children; import org.openide.nodes.Node; import org.openide.nodes.Sheet; import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.datamodel.BlackboardArtifactTagNode; +import org.sleuthkit.autopsy.datamodel.ContentTagTypeNode; import org.sleuthkit.autopsy.datamodel.DisplayableItemNode; import org.sleuthkit.autopsy.datamodel.DisplayableItemNodeVisitor; import org.sleuthkit.autopsy.datamodel.NodeProperty; import org.sleuthkit.datamodel.BlackboardArtifactTag; import org.sleuthkit.datamodel.TagName; +import org.sleuthkit.datamodel.TskCoreException; /** * Instances of this class are elements in a sub-tree of the Autopsy @@ -80,8 +84,13 @@ public class BlackboardArtifactTagTypeNode extends DisplayableItemNode { @Override protected boolean createKeys(List keys) { - // Use the blackboard artifact tags bearing the specified tag name as the keys. - Case.getCurrentCase().getServices().getTagsManager().getBlackboardArtifactTagsByTagName(tagName, keys); + try { + // Use the blackboard artifact tags bearing the specified tag name as the keys. + Case.getCurrentCase().getServices().getTagsManager().getBlackboardArtifactTagsByTagName(tagName, keys); + } + catch (TskCoreException ex) { + Logger.getLogger(BlackboardArtifactTagTypeNode.BlackboardArtifactTagNodeFactory.class.getName()).log(Level.SEVERE, "Failed to get tag names", ex); + } return true; } diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportVisualPanel2.java b/Core/src/org/sleuthkit/autopsy/report/ReportVisualPanel2.java index 074892d84c..d63c65f73a 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ReportVisualPanel2.java +++ b/Core/src/org/sleuthkit/autopsy/report/ReportVisualPanel2.java @@ -73,8 +73,13 @@ public final class ReportVisualPanel2 extends JPanel { // Initialize the list of Tags private void initTags() { ArrayList tagNamesInUse = new ArrayList<>(); - Case.getCurrentCase().getServices().getTagsManager().getTagNamesInUse(tagNamesInUse); - + try { + Case.getCurrentCase().getServices().getTagsManager().getTagNamesInUse(tagNamesInUse); + } + catch (TskCoreException ex) { + Logger.getLogger(ReportVisualPanel2.class.getName()).log(Level.SEVERE, "Failed to get tag names", ex); + } + for(TagName tagName : tagNamesInUse) { tagStates.put(tagName.getDisplayName(), Boolean.FALSE); } From 15c6ed57cf9c088b1a7f871b3d8f824435331c7d Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Fri, 18 Oct 2013 14:11:22 -0400 Subject: [PATCH 058/179] Changed createKeys methods to add all keys at once instead of one at a time. --- .../KeywordSearchResultFactory.java | 20 ++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchResultFactory.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchResultFactory.java index e595307b65..5d8e23e9c9 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchResultFactory.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchResultFactory.java @@ -242,6 +242,8 @@ public class KeywordSearchResultFactory extends ChildFactory { return false; } + List tempList = new ArrayList<>(); + //execute the query and get fscontents matching Map> tcqRes; try { @@ -311,8 +313,13 @@ public class KeywordSearchResultFactory extends ChildFactory { } final String highlightQueryEscaped = getHighlightQuery(tcq, literal_query, tcqRes, f); - toPopulate.add(new KeyValueQueryContent(f.getName(), resMap, ++resID, f, highlightQueryEscaped, tcq, previewChunk, tcqRes)); + tempList.add(new KeyValueQueryContent(f.getName(), resMap, ++resID, f, highlightQueryEscaped, tcq, previewChunk, tcqRes)); } + + // Add all the nodes to toPopulate at once. Minimizes node creation + // EDT threads, which can slow and/or hang the UI on large queries. + toPopulate.addAll(tempList); + //write to bb //cannot reuse snippet in ResultWriter //because for regex searches in UI we compress results by showing a file per regex once (even if multiple term hits) @@ -449,15 +456,22 @@ public class KeywordSearchResultFactory extends ChildFactory { int resID = 0; final KeywordSearchQuery origQuery = thing.getQuery(); - + + List tempList = new ArrayList<>(); + for (final AbstractFile f : uniqueMatches.keySet()) { final int previewChunkId = uniqueMatches.get(f); Map resMap = new LinkedHashMap<>(); if (f.getType() == TSK_DB_FILES_TYPE_ENUM.FS) { AbstractFsContentNode.fillPropertyMap(resMap, (FsContent) f); } - toPopulate.add(new KeyValueQueryContent(f.getName(), resMap, ++resID, f, keywordQuery, thing.getQuery(), previewChunkId, matchesRes)); + tempList.add(new KeyValueQueryContent(f.getName(), resMap, ++resID, f, keywordQuery, thing.getQuery(), previewChunkId, matchesRes)); } + + // Add all the nodes to toPopulate at once. Minimizes node creation + // EDT threads, which can slow and/or hang the UI on large queries. + toPopulate.addAll(tempList); + //write to bb new ResultWriter(matchesRes, origQuery, "").execute(); From d4ac9b3bd4f29443d148efb0a27a9f2ca7b5ee10 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Fri, 18 Oct 2013 15:12:01 -0400 Subject: [PATCH 059/179] Added Comment properties to tag nodes --- .../autopsy/datamodel/BlackboardArtifactTagNode.java | 3 ++- .../org/sleuthkit/autopsy/datamodel/ContentTagNode.java | 7 ++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/BlackboardArtifactTagNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/BlackboardArtifactTagNode.java index e4e61680b3..8cab0fed87 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/BlackboardArtifactTagNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/BlackboardArtifactTagNode.java @@ -69,7 +69,8 @@ public class BlackboardArtifactTagNode extends DisplayableItemNode { } properties.put(new NodeProperty("Source File Path", "Source File Path", "", contentPath)); properties.put(new NodeProperty("Result Type", "Result Type", "", tag.getArtifact().getDisplayName())); - + properties.put(new NodeProperty("Comment", "Comment", "", tag.getComment())); + return propertySheet; } diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagNode.java index b2e5c1d08e..a7c1b487e1 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagNode.java @@ -58,7 +58,7 @@ public class ContentTagNode extends DisplayableItemNode { propertySheet.put(properties); } - properties.put(new NodeProperty("Source File", "Source File", "", tag.getContent().getName())); + properties.put(new NodeProperty("File", "File", "", tag.getContent().getName())); String contentPath; try { contentPath = tag.getContent().getUniquePath(); @@ -67,8 +67,9 @@ public class ContentTagNode extends DisplayableItemNode { Logger.getLogger(ContentTagNode.class.getName()).log(Level.SEVERE, "Failed to get path for content (id = " + tag.getContent().getId() + ")", ex); contentPath = "Unavailable"; } - properties.put(new NodeProperty("Source File Path", "Source File Path", "", contentPath)); - + properties.put(new NodeProperty("File Path", "File Path", "", contentPath)); + properties.put(new NodeProperty("Comment", "Comment", "", tag.getComment())); + return propertySheet; } From fd811a49190ed579c234aa84e3a9c647397d22ec Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Fri, 18 Oct 2013 15:52:03 -0400 Subject: [PATCH 060/179] Clean up of artifact selection types dialog for reports --- .../report/ArtifactSelectionDialog.form | 1 + .../report/ArtifactSelectionDialog.java | 32 ++----------------- 2 files changed, 4 insertions(+), 29 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/report/ArtifactSelectionDialog.form b/Core/src/org/sleuthkit/autopsy/report/ArtifactSelectionDialog.form index 4a2908fff3..cd714f81c8 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ArtifactSelectionDialog.form +++ b/Core/src/org/sleuthkit/autopsy/report/ArtifactSelectionDialog.form @@ -6,6 +6,7 @@ + diff --git a/Core/src/org/sleuthkit/autopsy/report/ArtifactSelectionDialog.java b/Core/src/org/sleuthkit/autopsy/report/ArtifactSelectionDialog.java index 96f417f72b..e4dff5c8d2 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ArtifactSelectionDialog.java +++ b/Core/src/org/sleuthkit/autopsy/report/ArtifactSelectionDialog.java @@ -41,8 +41,6 @@ import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; import org.sleuthkit.datamodel.TskCoreException; public class ArtifactSelectionDialog extends javax.swing.JDialog { - private static final Logger logger = Logger.getLogger(ArtifactSelectionDialog.class.getName()); - private static ArtifactSelectionDialog instance; private ArtifactModel model; private ArtifactRenderer renderer; @@ -73,13 +71,12 @@ public class ArtifactSelectionDialog extends javax.swing.JDialog { artifacts.removeAll(doNotReport); - artifactStates = new EnumMap(BlackboardArtifact.ARTIFACT_TYPE.class); + artifactStates = new EnumMap<>(BlackboardArtifact.ARTIFACT_TYPE.class); for (BlackboardArtifact.ARTIFACT_TYPE type : artifacts) { artifactStates.put(type, Boolean.TRUE); } } catch (TskCoreException ex) { Logger.getLogger(ArtifactSelectionDialog.class.getName()).log(Level.SEVERE, "Error getting list of artifacts in use: " + ex.getLocalizedMessage()); - return; } } @@ -101,28 +98,7 @@ public class ArtifactSelectionDialog extends javax.swing.JDialog { } }); } - - /** - * Returns a list of the artifact types we want to report on. - */ - static List getImportantArtifactTypes() { - List types = new ArrayList(); - types.add(ARTIFACT_TYPE.TSK_WEB_BOOKMARK); - types.add(ARTIFACT_TYPE.TSK_WEB_COOKIE); - types.add(ARTIFACT_TYPE.TSK_WEB_HISTORY); - types.add(ARTIFACT_TYPE.TSK_WEB_DOWNLOAD); - types.add(ARTIFACT_TYPE.TSK_RECENT_OBJECT); - types.add(ARTIFACT_TYPE.TSK_INSTALLED_PROG); - types.add(ARTIFACT_TYPE.TSK_KEYWORD_HIT); - types.add(ARTIFACT_TYPE.TSK_HASHSET_HIT); - types.add(ARTIFACT_TYPE.TSK_DEVICE_ATTACHED); - types.add(ARTIFACT_TYPE.TSK_WEB_SEARCH_QUERY); - types.add(ARTIFACT_TYPE.TSK_METADATA_EXIF); - types.add(ARTIFACT_TYPE.TSK_TAG_FILE); - types.add(ARTIFACT_TYPE.TSK_TAG_ARTIFACT); - return types; - } - + /** * Display this dialog, and return the selected artifacts. */ @@ -289,8 +265,6 @@ public class ArtifactSelectionDialog extends javax.swing.JDialog { return this; } return new JLabel(); - } - + } } - } From 3ef3ce3da2da35c5fb6c26f5b72e3a2e665fde87 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Fri, 18 Oct 2013 15:52:50 -0400 Subject: [PATCH 061/179] Restored use of star icon for bookmark tag name nodes --- Core/src/org/sleuthkit/autopsy/datamodel/TagNameNode.java | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/TagNameNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/TagNameNode.java index a0fc06f539..3d7783ae98 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/TagNameNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/TagNameNode.java @@ -35,6 +35,7 @@ public class TagNameNode extends DisplayableItemNode { private static final String CONTENT_TAG_TYPE_NODE_KEY = "Content Tags"; private static final String BLACKBOARD_ARTIFACT_TAG_TYPE_NODE_KEY = "Result Tags"; private static final String ICON_PATH = "org/sleuthkit/autopsy/images/tag-folder-blue-icon-16.png"; + private static final String BOOKMARK_TAG_ICON_PATH = "org/sleuthkit/autopsy/images/star-bookmark-icon-16.png"; private final TagName tagName; public TagNameNode(TagName tagName) { @@ -42,7 +43,12 @@ public class TagNameNode extends DisplayableItemNode { this.tagName = tagName; super.setName(tagName.getDisplayName()); super.setDisplayName(tagName.getDisplayName()); - this.setIconBaseWithExtension(ICON_PATH); + if (tagName.getDisplayName().equals("Bookmark")) { + setIconBaseWithExtension(BOOKMARK_TAG_ICON_PATH); + } + else { + setIconBaseWithExtension(ICON_PATH); + } } @Override From 4cd68f99a14395a409d03e7f5ae12ea6d56f9a50 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Fri, 18 Oct 2013 16:43:45 -0400 Subject: [PATCH 062/179] Added counts to tag sub-tree nodes --- .../casemodule/services/TagsManager.java | 32 ++++++++++++++++++- .../autopsy/datamodel/ContentTagTypeNode.java | 13 ++++++-- .../autopsy/datamodel/TagNameNode.java | 22 ++++++++++--- .../BlackboardArtifactTagTypeNode.java | 15 +++++++-- 4 files changed, 72 insertions(+), 10 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java b/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java index 411d6ec33b..22b2c1b98b 100755 --- a/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java @@ -226,7 +226,22 @@ public class TagsManager implements Closeable { tskCase.deleteContentTag(tag); } - + + /** + * Gets content tags count by tag name. + * @param [in] tagName The tag name of interest. + * @return A count of the content tags with the specified tag name. + * @throws TskCoreException + */ + public synchronized long getContentTagsCountByTagName(TagName tagName) throws TskCoreException { + // @@@ This is a work around to be removed when database access on the EDT is correctly synchronized. + if (!tagNamesInitialized) { + getExistingTagNames(); + } + + return tskCase.getContentTagsCountByTagName(tagName); + } + /** * Gets content tags by tag name. * @param [in] tagName The tag name of interest. @@ -282,6 +297,21 @@ public class TagsManager implements Closeable { tskCase.deleteBlackboardArtifactTag(tag); } + /** + * Gets blackboard artifact tags count by tag name. + * @param [in] tagName The tag name of interest. + * @return A count of the blackboard artifact tags with the specified tag name. + * @throws TskCoreException + */ + public synchronized long getBlackboardArtifactTagsCountByTagName(TagName tagName) throws TskCoreException { + // @@@ This is a work around to be removed when database access on the EDT is correctly synchronized. + if (!tagNamesInitialized) { + getExistingTagNames(); + } + + return tskCase.getBlackboardArtifactTagsCountByTagName(tagName); + } + /** * Gets blackboard artifact tags by tag name. * @param [in] tagName The tag name of interest. diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagTypeNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagTypeNode.java index 92dfa50243..6c1a454357 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagTypeNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagTypeNode.java @@ -41,8 +41,17 @@ public class ContentTagTypeNode extends DisplayableItemNode { public ContentTagTypeNode(TagName tagName) { super(Children.create(new ContentTagNodeFactory(tagName), true)); - super.setName(DISPLAY_NAME); - super.setDisplayName(DISPLAY_NAME); + + long tagsCount = 0; + try { + tagsCount = Case.getCurrentCase().getServices().getTagsManager().getContentTagsCountByTagName(tagName); + } + catch (TskCoreException ex) { + Logger.getLogger(ContentTagTypeNode.class.getName()).log(Level.SEVERE, "Failed to get content tags count for " + tagName.getDisplayName() + " tag name", ex); + } + + super.setName(DISPLAY_NAME + " (" + tagsCount + ")"); + super.setDisplayName(DISPLAY_NAME + " (" + tagsCount + ")"); this.setIconBaseWithExtension(ICON_PATH); } diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/TagNameNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/TagNameNode.java index 3d7783ae98..05f4a00627 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/TagNameNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/TagNameNode.java @@ -19,12 +19,16 @@ package org.sleuthkit.autopsy.datamodel; import java.util.List; +import java.util.logging.Level; import org.openide.nodes.ChildFactory; import org.openide.nodes.Children; import org.openide.nodes.Node; import org.openide.nodes.Sheet; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.directorytree.BlackboardArtifactTagTypeNode; import org.sleuthkit.datamodel.TagName; +import org.sleuthkit.datamodel.TskCoreException; /** * Instances of this class are elements of Node hierarchies consisting of @@ -32,8 +36,6 @@ import org.sleuthkit.datamodel.TagName; * tag name. */ public class TagNameNode extends DisplayableItemNode { - private static final String CONTENT_TAG_TYPE_NODE_KEY = "Content Tags"; - private static final String BLACKBOARD_ARTIFACT_TAG_TYPE_NODE_KEY = "Result Tags"; private static final String ICON_PATH = "org/sleuthkit/autopsy/images/tag-folder-blue-icon-16.png"; private static final String BOOKMARK_TAG_ICON_PATH = "org/sleuthkit/autopsy/images/star-bookmark-icon-16.png"; private final TagName tagName; @@ -41,8 +43,18 @@ public class TagNameNode extends DisplayableItemNode { public TagNameNode(TagName tagName) { super(Children.create(new TagTypeNodeFactory(tagName), true)); this.tagName = tagName; - super.setName(tagName.getDisplayName()); - super.setDisplayName(tagName.getDisplayName()); + + long tagsCount = 0; + try { + tagsCount = Case.getCurrentCase().getServices().getTagsManager().getContentTagsCountByTagName(tagName); + tagsCount += Case.getCurrentCase().getServices().getTagsManager().getBlackboardArtifactTagsCountByTagName(tagName); + } + catch (TskCoreException ex) { + Logger.getLogger(TagNameNode.class.getName()).log(Level.SEVERE, "Failed to get tags count for " + tagName.getDisplayName() + " tag name", ex); + } + + super.setName(tagName.getDisplayName() + " (" + tagsCount + ")"); + super.setDisplayName(tagName.getDisplayName() + " (" + tagsCount + ")"); if (tagName.getDisplayName().equals("Bookmark")) { setIconBaseWithExtension(BOOKMARK_TAG_ICON_PATH); } @@ -78,6 +90,8 @@ public class TagNameNode extends DisplayableItemNode { } private static class TagTypeNodeFactory extends ChildFactory { + private static final String CONTENT_TAG_TYPE_NODE_KEY = "Content Tags"; + private static final String BLACKBOARD_ARTIFACT_TAG_TYPE_NODE_KEY = "Result Tags"; private final TagName tagName; TagTypeNodeFactory(TagName tagName) { diff --git a/Core/src/org/sleuthkit/autopsy/directorytree/BlackboardArtifactTagTypeNode.java b/Core/src/org/sleuthkit/autopsy/directorytree/BlackboardArtifactTagTypeNode.java index 8c2e904aed..543db96eb0 100755 --- a/Core/src/org/sleuthkit/autopsy/directorytree/BlackboardArtifactTagTypeNode.java +++ b/Core/src/org/sleuthkit/autopsy/directorytree/BlackboardArtifactTagTypeNode.java @@ -42,12 +42,21 @@ import org.sleuthkit.datamodel.TskCoreException; */ public class BlackboardArtifactTagTypeNode extends DisplayableItemNode { private static final String DISPLAY_NAME = "Result Tags"; - private static final String ICON_PATH = "org/sleuthkit/autopsy/images/tag-folder-blue-icon-16.png"; // RJCTODO: Different icon? + private static final String ICON_PATH = "org/sleuthkit/autopsy/images/tag-folder-blue-icon-16.png"; public BlackboardArtifactTagTypeNode(TagName tagName) { super(Children.create(new BlackboardArtifactTagNodeFactory(tagName), true)); - super.setName(DISPLAY_NAME); - super.setDisplayName(DISPLAY_NAME); + + long tagsCount = 0; + try { + tagsCount = Case.getCurrentCase().getServices().getTagsManager().getBlackboardArtifactTagsCountByTagName(tagName); + } + catch (TskCoreException ex) { + Logger.getLogger(BlackboardArtifactTagTypeNode.class.getName()).log(Level.SEVERE, "Failed to get blackboard artifact tags count for " + tagName.getDisplayName() + " tag name", ex); + } + + super.setName(DISPLAY_NAME + " (" + tagsCount + ")"); + super.setDisplayName(DISPLAY_NAME + " (" + tagsCount + ")"); this.setIconBaseWithExtension(ICON_PATH); } From b5095ad802324ccd0e4f9316741e4c64e436989a Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Fri, 18 Oct 2013 18:06:53 -0400 Subject: [PATCH 063/179] Additional steps towards removal of old tags code from report package --- .../autopsy/report/ReportGenerator.java | 478 +++++++----------- .../sleuthkit/autopsy/report/ReportHTML.java | 132 ++--- 2 files changed, 252 insertions(+), 358 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java b/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java index 9064532933..45a551759d 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java +++ b/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java @@ -437,9 +437,9 @@ public class ReportGenerator { boolean msgSent = false; for(ArtifactData artifactData : unsortedArtifacts) { - HashSet tags = artifactData.getTags(); - - String tagsList = makeCommaSeparatedList(tags); +// HashSet tags = artifactData.getTags(); +// +// String tagsList = makeCommaSeparatedList(tags); // Add the row data to all of the reports. for (TableReportModule module : tableModules) { @@ -447,28 +447,22 @@ public class ReportGenerator { // Get the row data for this type of artifact. List rowData; rowData = getArtifactRow(artifactData, module); - if (rowData == null) { + if (rowData.isEmpty()) { if (msgSent == false) { MessageNotifyUtil.Notify.show("Skipping artifact rows for type " + type + " in reports", "Unknown columns to report on", MessageNotifyUtil.MessageType.ERROR); msgSent = true; } continue; } - // Add the list of tag names if the artifact is not itself as tag. - if (artifactData.getArtifact().getArtifactTypeID() != ARTIFACT_TYPE.TSK_TAG_ARTIFACT.getTypeID() && - artifactData.getArtifact().getArtifactTypeID() != ARTIFACT_TYPE.TSK_TAG_FILE.getTypeID()) - { - rowData.add(tagsList); - } - // This is a temporary workaround to avoid modifying the TableReportModule interface. - if (module instanceof ReportHTML) { - ReportHTML htmlReportModule = (ReportHTML)module; - htmlReportModule.addRow(rowData, artifactData.getArtifact()); - } - else { +// // This is a temporary workaround to avoid modifying the TableReportModule interface. +// if (module instanceof ReportHTML) { +// ReportHTML htmlReportModule = (ReportHTML)module; +// htmlReportModule.addRow(rowData, artifactData.getArtifact()); +// } +// else { module.addRow(rowData); - } +// } } } @@ -618,11 +612,15 @@ public class ReportGenerator { } // Get any tags that associated with this artifact and apply the tag filter. - HashSet tags = Tags.getUniqueTagNamesForArtifact(rs.getLong("artifact_id"), ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID()); - if (failsTagFilter(tags, tagNamesFilter)) { - continue; - } - String tagsList = makeCommaSeparatedList(tags); + HashSet uniqueTagNames = new HashSet<>(); + ResultSet tagNameRows = skCase.runQuery("SELECT display_name FROM tag_names WHERE artifact_id = " + rs.getLong("artifact_id")); + while (tagNameRows.next()) { + uniqueTagNames.add(tagNameRows.getString("display_name")); + } + if(failsTagFilter(uniqueTagNames, tagNamesFilter)) { + continue; + } + String tagsList = makeCommaSeparatedList(uniqueTagNames); Long objId = rs.getLong("obj_id"); String keyword = rs.getString("keyword"); @@ -761,12 +759,16 @@ public class ReportGenerator { } } - // Get any tags that associated with this artifact and apply the tag filter. - HashSet tags = Tags.getUniqueTagNamesForArtifact(rs.getLong("artifact_id"), ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID()); - if (failsTagFilter(tags, tagNamesFilter)) { + // Get any tags that associated with this artifact and apply the tag filter. + HashSet uniqueTagNames = new HashSet<>(); + ResultSet tagNameRows = skCase.runQuery("SELECT display_name FROM tag_names WHERE artifact_id = " + rs.getLong("artifact_id")); + while (tagNameRows.next()) { + uniqueTagNames.add(tagNameRows.getString("display_name")); + } + if(failsTagFilter(uniqueTagNames, tagNamesFilter)) { continue; } - String tagsList = makeCommaSeparatedList(tags); + String tagsList = makeCommaSeparatedList(uniqueTagNames); Long objId = rs.getLong("obj_id"); String set = rs.getString("setname"); @@ -819,7 +821,7 @@ public class ReportGenerator { } } } - + /** * For a given artifact type ID, return the list of the row titles we're reporting on. * @@ -828,9 +830,8 @@ public class ReportGenerator { */ private List getArtifactTableColumnHeaders(int artifactTypeId) { ArrayList columnHeaders; - - BlackboardArtifact.ARTIFACT_TYPE type = BlackboardArtifact.ARTIFACT_TYPE.fromID(artifactTypeId); - + + BlackboardArtifact.ARTIFACT_TYPE type = BlackboardArtifact.ARTIFACT_TYPE.fromID(artifactTypeId); switch (type) { case TSK_WEB_BOOKMARK: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"URL", "Title", "Date Accessed", "Program", "Source File"})); @@ -865,12 +866,6 @@ public class ReportGenerator { case TSK_METADATA_EXIF: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Date Taken", "Device Manufacturer", "Device Model", "Latitude", "Longitude", "Source File"})); break; - case TSK_TAG_FILE: - columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"File", "Tag", "Comment"})); - break; - case TSK_TAG_ARTIFACT: - columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Result Type", "Tag", "Comment", "Source File"})); - break; case TSK_CONTACT: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Person Name", "Phone Number", "Phone Number (Home)", "Phone Number (Office)", "Phone Number (Mobile)", "Email", "Source File" })); break; @@ -884,25 +879,25 @@ public class ReportGenerator { columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Calendar Entry Type", "Description", "Start Date/Time", "End Date/Time", "Location", "Source File" })); break; case TSK_SPEED_DIAL_ENTRY: - columnHeaders = new ArrayList(Arrays.asList(new String[] {"Short Cut", "Person Name", "Phone Number", "Source File" })); + columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Short Cut", "Person Name", "Phone Number", "Source File" })); break; case TSK_BLUETOOTH_PAIRING: - columnHeaders = new ArrayList(Arrays.asList(new String[] {"Device Name", "Device Address", "Date/Time", "Source File" })); + columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Device Name", "Device Address", "Date/Time", "Source File" })); break; case TSK_GPS_TRACKPOINT: - columnHeaders = new ArrayList(Arrays.asList(new String[] {"Latitude", "Longitude", "Altitude", "Name", "Location Address", "Date/Time", "Source File" })); + columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Latitude", "Longitude", "Altitude", "Name", "Location Address", "Date/Time", "Source File" })); break; case TSK_GPS_BOOKMARK: - columnHeaders = new ArrayList(Arrays.asList(new String[] {"Latitude", "Longitude", "Altitude", "Name", "Location Address", "Date/Time", "Source File" })); + columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Latitude", "Longitude", "Altitude", "Name", "Location Address", "Date/Time", "Source File" })); break; case TSK_GPS_LAST_KNOWN_LOCATION: - columnHeaders = new ArrayList(Arrays.asList(new String[] {"Latitude", "Longitude", "Altitude", "Name", "Location Address", "Date/Time", "Source File" })); + columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Latitude", "Longitude", "Altitude", "Name", "Location Address", "Date/Time", "Source File" })); break; case TSK_GPS_SEARCH: - columnHeaders = new ArrayList(Arrays.asList(new String[] {"Latitude", "Longitude", "Altitude", "Name", "Location Address", "Date/Time", "Source File" })); + columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Latitude", "Longitude", "Altitude", "Name", "Location Address", "Date/Time", "Source File" })); break; case TSK_SERVICE_ACCOUNT: - columnHeaders = new ArrayList(Arrays.asList(new String[] {"Category", "User ID", "Password", "Person Name", "App Name", "URL", "App Path", "Description", "ReplyTo Address", "Mail Server", "Source File" })); + columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Category", "User ID", "Password", "Person Name", "App Name", "URL", "App Path", "Description", "ReplyTo Address", "Mail Server", "Source File" })); break; case TSK_TOOL_OUTPUT: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Program Name", "Text", "Source File"})); @@ -910,11 +905,7 @@ public class ReportGenerator { default: return null; } - - if (artifactTypeId != ARTIFACT_TYPE.TSK_TAG_FILE.getTypeID() && - artifactTypeId != ARTIFACT_TYPE.TSK_TAG_ARTIFACT.getTypeID()) { - columnHeaders.add("Tags"); - } + columnHeaders.add("Tags"); return columnHeaders; } @@ -993,227 +984,175 @@ public class ReportGenerator { private List getArtifactRow(ArtifactData artifactData, TableReportModule module) throws TskCoreException { Map attributes = getMappedAttributes(artifactData.getAttributes(), module); - BlackboardArtifact.ARTIFACT_TYPE type = BlackboardArtifact.ARTIFACT_TYPE.fromID(artifactData.getArtifact().getArtifactTypeID()); - + List rowData = new ArrayList<>(); + BlackboardArtifact.ARTIFACT_TYPE type = BlackboardArtifact.ARTIFACT_TYPE.fromID(artifactData.getArtifact().getArtifactTypeID()); switch (type) { case TSK_WEB_BOOKMARK: - List bookmark = new ArrayList<>(); - bookmark.add(attributes.get(ATTRIBUTE_TYPE.TSK_URL.getTypeID())); - bookmark.add(attributes.get(ATTRIBUTE_TYPE.TSK_TITLE.getTypeID())); - bookmark.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID())); - bookmark.add(attributes.get(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID())); - bookmark.add(getFileUniquePath(artifactData.getObjectID())); - return bookmark; + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_URL.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_TITLE.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID())); + rowData.add(getFileUniquePath(artifactData.getObjectID())); + break; case TSK_WEB_COOKIE: - List cookie = new ArrayList<>(); - cookie.add(attributes.get(ATTRIBUTE_TYPE.TSK_URL.getTypeID())); - cookie.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); - cookie.add(attributes.get(ATTRIBUTE_TYPE.TSK_NAME.getTypeID())); - cookie.add(attributes.get(ATTRIBUTE_TYPE.TSK_VALUE.getTypeID())); - cookie.add(attributes.get(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID())); - cookie.add(getFileUniquePath(artifactData.getObjectID())); - return cookie; + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_URL.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_NAME.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_VALUE.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID())); + rowData.add(getFileUniquePath(artifactData.getObjectID())); + break; case TSK_WEB_HISTORY: - List history = new ArrayList<>(); - history.add(attributes.get(ATTRIBUTE_TYPE.TSK_URL.getTypeID())); - history.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID())); - history.add(attributes.get(ATTRIBUTE_TYPE.TSK_REFERRER.getTypeID())); - history.add(attributes.get(ATTRIBUTE_TYPE.TSK_NAME.getTypeID())); - history.add(attributes.get(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID())); - history.add(getFileUniquePath(artifactData.getObjectID())); - return history; + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_URL.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_REFERRER.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_NAME.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID())); + rowData.add(getFileUniquePath(artifactData.getObjectID())); + break; case TSK_WEB_DOWNLOAD: - List download = new ArrayList<>(); - download.add(attributes.get(ATTRIBUTE_TYPE.TSK_PATH.getTypeID())); - download.add(attributes.get(ATTRIBUTE_TYPE.TSK_URL.getTypeID())); - download.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID())); - download.add(attributes.get(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID())); - download.add(getFileUniquePath(artifactData.getObjectID())); - return download; + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_PATH.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_URL.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID())); + rowData.add(getFileUniquePath(artifactData.getObjectID())); + break; case TSK_RECENT_OBJECT: - List recent = new ArrayList<>(); - recent.add(attributes.get(ATTRIBUTE_TYPE.TSK_PATH.getTypeID())); - recent.add(getFileUniquePath(artifactData.getObjectID())); - return recent; + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_PATH.getTypeID())); + rowData.add(getFileUniquePath(artifactData.getObjectID())); + break; case TSK_INSTALLED_PROG: - List installed = new ArrayList<>(); - installed.add(attributes.get(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID())); - installed.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); - installed.add(getFileUniquePath(artifactData.getObjectID())); - return installed; + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); + rowData.add(getFileUniquePath(artifactData.getObjectID())); + break; case TSK_DEVICE_ATTACHED: - List devices = new ArrayList<>(); - devices.add(attributes.get(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL.getTypeID())); - devices.add(attributes.get(ATTRIBUTE_TYPE.TSK_DEVICE_ID.getTypeID())); - devices.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); - devices.add(getFileUniquePath(artifactData.getObjectID())); - return devices; + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_DEVICE_ID.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); + rowData.add(getFileUniquePath(artifactData.getObjectID())); + break; case TSK_WEB_SEARCH_QUERY: - List search = new ArrayList<>(); - search.add(attributes.get(ATTRIBUTE_TYPE.TSK_TEXT.getTypeID())); - search.add(attributes.get(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID())); - search.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID())); - search.add(attributes.get(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID())); - search.add(getFileUniquePath(artifactData.getObjectID())); - return search; + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_TEXT.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID())); + rowData.add(getFileUniquePath(artifactData.getObjectID())); + break; case TSK_METADATA_EXIF: - List exif = new ArrayList<>(); - exif.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); - exif.add(attributes.get(ATTRIBUTE_TYPE.TSK_DEVICE_MAKE.getTypeID())); - exif.add(attributes.get(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL.getTypeID())); - exif.add(attributes.get(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID())); - exif.add(attributes.get(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID())); - exif.add(getFileUniquePath(artifactData.getObjectID())); - return exif; - case TSK_TAG_FILE: - List taggedFileRow = new ArrayList<>(); - AbstractFile taggedFile = getAbstractFile(artifactData.getObjectID()); - if (taggedFile != null) { - taggedFileRow.add(taggedFile.getUniquePath()); - } else { - taggedFileRow.add(""); - } - taggedFileRow.add(attributes.get(ATTRIBUTE_TYPE.TSK_TAG_NAME.getTypeID())); - taggedFileRow.add(attributes.get(ATTRIBUTE_TYPE.TSK_COMMENT.getTypeID())); - return taggedFileRow; - case TSK_TAG_ARTIFACT: - List taggedArtifactRow = new ArrayList<>(); - String taggedArtifactType = ""; - for (BlackboardAttribute attr : artifactData.getAttributes()) { - if (attr.getAttributeTypeID() == ATTRIBUTE_TYPE.TSK_TAGGED_ARTIFACT.getTypeID()) { - BlackboardArtifact taggedArtifact = getArtifact(attr.getValueLong()); - if (taggedArtifact != null) { - taggedArtifactType = taggedArtifact.getDisplayName(); - } - break; - } - } - taggedArtifactRow.add(taggedArtifactType); - taggedArtifactRow.add(attributes.get(ATTRIBUTE_TYPE.TSK_TAG_NAME.getTypeID())); - taggedArtifactRow.add(attributes.get(ATTRIBUTE_TYPE.TSK_COMMENT.getTypeID())); - AbstractFile sourceFile = getAbstractFile(artifactData.getObjectID()); - if (sourceFile != null) { - taggedArtifactRow.add(sourceFile.getUniquePath()); - } else { - taggedArtifactRow.add(""); - } - return taggedArtifactRow; + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_DEVICE_MAKE.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID())); + rowData.add(getFileUniquePath(artifactData.getObjectID())); + break; case TSK_CONTACT: - List contact = new ArrayList<>(); - contact.add(attributes.get(ATTRIBUTE_TYPE.TSK_NAME_PERSON.getTypeID())); - contact.add(attributes.get(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER.getTypeID())); - contact.add(attributes.get(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_HOME.getTypeID())); - contact.add(attributes.get(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_OFFICE.getTypeID())); - contact.add(attributes.get(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_MOBILE.getTypeID())); - contact.add(attributes.get(ATTRIBUTE_TYPE.TSK_EMAIL.getTypeID())); - contact.add(getFileUniquePath(artifactData.getObjectID())); - return contact; + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_NAME_PERSON.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_HOME.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_OFFICE.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_MOBILE.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_EMAIL.getTypeID())); + rowData.add(getFileUniquePath(artifactData.getObjectID())); + break; case TSK_MESSAGE: - List message = new ArrayList<>(); - message.add(attributes.get(ATTRIBUTE_TYPE.TSK_MESSAGE_TYPE.getTypeID())); - message.add(attributes.get(ATTRIBUTE_TYPE.TSK_DIRECTION.getTypeID())); - message.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); - message.add(attributes.get(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_FROM.getTypeID())); - message.add(attributes.get(ATTRIBUTE_TYPE.TSK_EMAIL_FROM.getTypeID())); - message.add(attributes.get(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_TO.getTypeID())); - message.add(attributes.get(ATTRIBUTE_TYPE.TSK_EMAIL_TO.getTypeID())); - message.add(attributes.get(ATTRIBUTE_TYPE.TSK_SUBJECT.getTypeID())); - message.add(attributes.get(ATTRIBUTE_TYPE.TSK_TEXT.getTypeID())); - message.add(getFileUniquePath(artifactData.getObjectID())); - return message; + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_MESSAGE_TYPE.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_DIRECTION.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_FROM.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_EMAIL_FROM.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_TO.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_EMAIL_TO.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_SUBJECT.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_TEXT.getTypeID())); + rowData.add(getFileUniquePath(artifactData.getObjectID())); + break; case TSK_CALLLOG: - List call_log = new ArrayList<>(); - call_log.add(attributes.get(ATTRIBUTE_TYPE.TSK_NAME_PERSON.getTypeID())); - call_log.add(attributes.get(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER.getTypeID())); - call_log.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); - call_log.add(attributes.get(ATTRIBUTE_TYPE.TSK_DIRECTION.getTypeID())); - call_log.add(getFileUniquePath(artifactData.getObjectID())); - return call_log; + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_NAME_PERSON.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_DIRECTION.getTypeID())); + rowData.add(getFileUniquePath(artifactData.getObjectID())); + break; case TSK_CALENDAR_ENTRY: - List calEntry = new ArrayList<>(); - calEntry.add(attributes.get(ATTRIBUTE_TYPE.TSK_CALENDAR_ENTRY_TYPE.getTypeID())); - calEntry.add(attributes.get(ATTRIBUTE_TYPE.TSK_DESCRIPTION.getTypeID())); - calEntry.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME_START.getTypeID())); - calEntry.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME_END.getTypeID())); - calEntry.add(attributes.get(ATTRIBUTE_TYPE.TSK_LOCATION.getTypeID())); - calEntry.add(getFileUniquePath(artifactData.getObjectID())); - return calEntry; + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_CALENDAR_ENTRY_TYPE.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_DESCRIPTION.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME_START.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME_END.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_LOCATION.getTypeID())); + rowData.add(getFileUniquePath(artifactData.getObjectID())); + break; case TSK_SPEED_DIAL_ENTRY: - List speedDialEntry = new ArrayList(); - speedDialEntry.add(attributes.get(ATTRIBUTE_TYPE.TSK_SHORTCUT.getTypeID())); - speedDialEntry.add(attributes.get(ATTRIBUTE_TYPE.TSK_NAME_PERSON.getTypeID())); - speedDialEntry.add(attributes.get(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER.getTypeID())); - speedDialEntry.add(getFileUniquePath(artifactData.getObjectID())); - return speedDialEntry; + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_SHORTCUT.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_NAME_PERSON.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER.getTypeID())); + rowData.add(getFileUniquePath(artifactData.getObjectID())); + break; case TSK_BLUETOOTH_PAIRING: - List bluetoothEntry = new ArrayList(); - bluetoothEntry.add(attributes.get(ATTRIBUTE_TYPE.TSK_DEVICE_NAME.getTypeID())); - bluetoothEntry.add(attributes.get(ATTRIBUTE_TYPE.TSK_DEVICE_ID.getTypeID())); - bluetoothEntry.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); - bluetoothEntry.add(getFileUniquePath(artifactData.getObjectID())); - return bluetoothEntry; + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_DEVICE_NAME.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_DEVICE_ID.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); + rowData.add(getFileUniquePath(artifactData.getObjectID())); + break; case TSK_GPS_TRACKPOINT: - List gpsTrackpoint = new ArrayList(); - gpsTrackpoint.add(attributes.get(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID())); - gpsTrackpoint.add(attributes.get(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID())); - gpsTrackpoint.add(attributes.get(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE.getTypeID())); - gpsTrackpoint.add(attributes.get(ATTRIBUTE_TYPE.TSK_NAME.getTypeID())); - gpsTrackpoint.add(attributes.get(ATTRIBUTE_TYPE.TSK_LOCATION.getTypeID())); - gpsTrackpoint.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); - gpsTrackpoint.add(getFileUniquePath(artifactData.getObjectID())); - return gpsTrackpoint; + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_NAME.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_LOCATION.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); + rowData.add(getFileUniquePath(artifactData.getObjectID())); + break; case TSK_GPS_BOOKMARK: - List gpsBookmarkEntry = new ArrayList(); - gpsBookmarkEntry.add(attributes.get(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID())); - gpsBookmarkEntry.add(attributes.get(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID())); - gpsBookmarkEntry.add(attributes.get(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE.getTypeID())); - gpsBookmarkEntry.add(attributes.get(ATTRIBUTE_TYPE.TSK_NAME.getTypeID())); - gpsBookmarkEntry.add(attributes.get(ATTRIBUTE_TYPE.TSK_LOCATION.getTypeID())); - gpsBookmarkEntry.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); - gpsBookmarkEntry.add(getFileUniquePath(artifactData.getObjectID())); - return gpsBookmarkEntry; + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_NAME.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_LOCATION.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); + rowData.add(getFileUniquePath(artifactData.getObjectID())); + break; case TSK_GPS_LAST_KNOWN_LOCATION: - List gpsLastLocation = new ArrayList(); - gpsLastLocation.add(attributes.get(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID())); - gpsLastLocation.add(attributes.get(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID())); - gpsLastLocation.add(attributes.get(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE.getTypeID())); - gpsLastLocation.add(attributes.get(ATTRIBUTE_TYPE.TSK_NAME.getTypeID())); - gpsLastLocation.add(attributes.get(ATTRIBUTE_TYPE.TSK_LOCATION.getTypeID())); - gpsLastLocation.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); - gpsLastLocation.add(getFileUniquePath(artifactData.getObjectID())); - return gpsLastLocation; + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_NAME.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_LOCATION.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); + rowData.add(getFileUniquePath(artifactData.getObjectID())); + break; case TSK_GPS_SEARCH: - List gpsSearch = new ArrayList(); - gpsSearch.add(attributes.get(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID())); - gpsSearch.add(attributes.get(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID())); - gpsSearch.add(attributes.get(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE.getTypeID())); - gpsSearch.add(attributes.get(ATTRIBUTE_TYPE.TSK_NAME.getTypeID())); - gpsSearch.add(attributes.get(ATTRIBUTE_TYPE.TSK_LOCATION.getTypeID())); - gpsSearch.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); - gpsSearch.add(getFileUniquePath(artifactData.getObjectID())); - return gpsSearch; + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_NAME.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_LOCATION.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); + rowData.add(getFileUniquePath(artifactData.getObjectID())); + break; case TSK_SERVICE_ACCOUNT: - List appAccount = new ArrayList(); - appAccount.add(attributes.get(ATTRIBUTE_TYPE.TSK_CATEGORY.getTypeID())); - appAccount.add(attributes.get(ATTRIBUTE_TYPE.TSK_USER_ID.getTypeID())); - appAccount.add(attributes.get(ATTRIBUTE_TYPE.TSK_PASSWORD.getTypeID())); - appAccount.add(attributes.get(ATTRIBUTE_TYPE.TSK_NAME.getTypeID())); - appAccount.add(attributes.get(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID())); - appAccount.add(attributes.get(ATTRIBUTE_TYPE.TSK_URL.getTypeID())); - appAccount.add(attributes.get(ATTRIBUTE_TYPE.TSK_PATH.getTypeID())); - appAccount.add(attributes.get(ATTRIBUTE_TYPE.TSK_DESCRIPTION.getTypeID())); - appAccount.add(attributes.get(ATTRIBUTE_TYPE.TSK_EMAIL_REPLYTO.getTypeID())); - appAccount.add(attributes.get(ATTRIBUTE_TYPE.TSK_SERVER_NAME.getTypeID())); - appAccount.add(getFileUniquePath(artifactData.getObjectID())); - return appAccount; + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_CATEGORY.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_USER_ID.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_PASSWORD.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_NAME.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_URL.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_PATH.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_DESCRIPTION.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_EMAIL_REPLYTO.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_SERVER_NAME.getTypeID())); + rowData.add(getFileUniquePath(artifactData.getObjectID())); + break; case TSK_TOOL_OUTPUT: - List row = new ArrayList<>(); - row.add(attributes.get(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID())); - row.add(attributes.get(ATTRIBUTE_TYPE.TSK_TEXT.getTypeID())); - row.add(getFileUniquePath(artifactData.getObjectID())); - return row; + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID())); + rowData.add(attributes.get(ATTRIBUTE_TYPE.TSK_TEXT.getTypeID())); + rowData.add(getFileUniquePath(artifactData.getObjectID())); + break; } - return null; + rowData.add(makeCommaSeparatedList(artifactData.getTags())); + + return rowData; // RJCTODO: Is anyone checking for null here? } /** @@ -1230,52 +1169,7 @@ public class ReportGenerator { } return ""; } - - /** - * Given a tsk_file's obj_id, return the name of that file. - * - * @param objId tsk_file obj_id - * @return String name - */ - private String getFileName(long objId) { - try { - return skCase.getAbstractFileById(objId).getName(); - } catch (TskCoreException ex) { - logger.log(Level.WARNING, "Failed to get Abstract File by ID.", ex); - } - return ""; - } - - /** - * Return the file associated with a tsk_file obj_id. - * - * @param objId tsk_file obj_id - * @return AbstractFile associated with objId - */ - private AbstractFile getAbstractFile(long objId) { - try { - return skCase.getAbstractFileById(objId); - } catch (TskCoreException ex) { - logger.log(Level.WARNING, "Failed to get Abstract File by ID.", ex); - } - return null; - } - - /** - * Get a BlackboardArtifact. - * - * @param long artifactId An artifact id - * @return The BlackboardArtifact associated with the artifact id - */ - private BlackboardArtifact getArtifact(long artifactId) { - try { - return skCase.getBlackboardArtifact(artifactId); - } catch (TskCoreException ex) { - logger.log(Level.WARNING, "Failed to get blackboard artifact by ID.", ex); - } - return null; - } - + /** * Container class that holds data about an Artifact to eliminate duplicate * calls to the Sleuthkit database. diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportHTML.java b/Core/src/org/sleuthkit/autopsy/report/ReportHTML.java index 4b5492ebec..c3d0466119 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ReportHTML.java +++ b/Core/src/org/sleuthkit/autopsy/report/ReportHTML.java @@ -568,15 +568,15 @@ public class ReportHTML implements TableReportModule { * @param sourceArtifact The artifact associated with the row. */ private void addRowDataForSourceArtifact(List row, BlackboardArtifact sourceArtifact) { - int artifactTypeID = sourceArtifact.getArtifactTypeID(); - BlackboardArtifact.ARTIFACT_TYPE type = BlackboardArtifact.ARTIFACT_TYPE.fromID(artifactTypeID); - switch (type) { - case TSK_TAG_FILE: - addRowDataForFileTagArtifact(row, sourceArtifact); - break; - default: - break; - } +// int artifactTypeID = sourceArtifact.getArtifactTypeID(); +// BlackboardArtifact.ARTIFACT_TYPE type = BlackboardArtifact.ARTIFACT_TYPE.fromID(artifactTypeID); +// switch (type) { +// case TSK_TAG_FILE: +// addRowDataForFileTagArtifact(row, sourceArtifact); +// break; +// default: +// break; +// } } /** @@ -586,63 +586,63 @@ public class ReportHTML implements TableReportModule { * @param sourceArtifact The artifact associated with the row. */ private void addRowDataForFileTagArtifact(List row, BlackboardArtifact sourceArtifact) { - try { - AbstractFile file = Case.getCurrentCase().getSleuthkitCase().getAbstractFileById(sourceArtifact.getObjectID()); - - // Don't make a local copy of the file if it is a directory or unallocated space. - if (file.isDir() || - file.getType() == TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS || - file.getType() == TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) { - row.add(""); - return; - } - - // Make a folder for the local file with the same name as the tag. - StringBuilder localFilePath = new StringBuilder(); - localFilePath.append(path); - HashSet tagNames = Tags.getUniqueTagNamesForArtifact(sourceArtifact); - if (!tagNames.isEmpty()) { - localFilePath.append(tagNames.iterator().next()); - } - File localFileFolder = new File(localFilePath.toString()); - if (!localFileFolder.exists()) { - localFileFolder.mkdirs(); - } - - // Construct a file name for the local file that incorporates the corresponding object id to ensure uniqueness. - String fileName = file.getName(); - String objectIdSuffix = "_" + sourceArtifact.getObjectID(); - int lastDotIndex = fileName.lastIndexOf("."); - if (lastDotIndex != -1 && lastDotIndex != 0) { - // The file name has a conventional extension. Insert the object id before the '.' of the extension. - fileName = fileName.substring(0, lastDotIndex) + objectIdSuffix + fileName.substring(lastDotIndex, fileName.length()); - } - else { - // The file has no extension or the only '.' in the file is an initial '.', as in a hidden file. - // Add the object id to the end of the file name. - fileName += objectIdSuffix; - } - localFilePath.append(File.separator); - localFilePath.append(fileName); - - // If the local file doesn't already exist, create it now. - // The existence check is necessary because it is possible to apply multiple tags with the same name to a file. - File localFile = new File(localFilePath.toString()); - if (!localFile.exists()) { - ExtractFscContentVisitor.extract(file, localFile, null, null); - } - - // Add the hyperlink to the row. A column header for it was created in startTable(). - StringBuilder localFileLink = new StringBuilder(); - localFileLink.append("View File"); - row.add(localFileLink.toString()); - } - catch (TskCoreException ex) { - logger.log(Level.WARNING, "Failed to get AbstractFile by ID.", ex); - row.add(""); - } +// try { +// AbstractFile file = Case.getCurrentCase().getSleuthkitCase().getAbstractFileById(sourceArtifact.getObjectID()); +// +// // Don't make a local copy of the file if it is a directory or unallocated space. +// if (file.isDir() || +// file.getType() == TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS || +// file.getType() == TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) { +// row.add(""); +// return; +// } +// +// // Make a folder for the local file with the same name as the tag. +// StringBuilder localFilePath = new StringBuilder(); +// localFilePath.append(path); +// HashSet tagNames = Tags.getUniqueTagNamesForArtifact(sourceArtifact); +// if (!tagNames.isEmpty()) { +// localFilePath.append(tagNames.iterator().next()); +// } +// File localFileFolder = new File(localFilePath.toString()); +// if (!localFileFolder.exists()) { +// localFileFolder.mkdirs(); +// } +// +// // Construct a file name for the local file that incorporates the corresponding object id to ensure uniqueness. +// String fileName = file.getName(); +// String objectIdSuffix = "_" + sourceArtifact.getObjectID(); +// int lastDotIndex = fileName.lastIndexOf("."); +// if (lastDotIndex != -1 && lastDotIndex != 0) { +// // The file name has a conventional extension. Insert the object id before the '.' of the extension. +// fileName = fileName.substring(0, lastDotIndex) + objectIdSuffix + fileName.substring(lastDotIndex, fileName.length()); +// } +// else { +// // The file has no extension or the only '.' in the file is an initial '.', as in a hidden file. +// // Add the object id to the end of the file name. +// fileName += objectIdSuffix; +// } +// localFilePath.append(File.separator); +// localFilePath.append(fileName); +// +// // If the local file doesn't already exist, create it now. +// // The existence check is necessary because it is possible to apply multiple tags with the same name to a file. +// File localFile = new File(localFilePath.toString()); +// if (!localFile.exists()) { +// ExtractFscContentVisitor.extract(file, localFile, null, null); +// } +// +// // Add the hyperlink to the row. A column header for it was created in startTable(). +// StringBuilder localFileLink = new StringBuilder(); +// localFileLink.append("View File"); +// row.add(localFileLink.toString()); +// } +// catch (TskCoreException ex) { +// logger.log(Level.WARNING, "Failed to get AbstractFile by ID.", ex); +// row.add(""); +// } } /** From a70390a4b8405fe994067bb4adf629f3aea115b9 Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Sat, 19 Oct 2013 00:22:12 -0400 Subject: [PATCH 064/179] Use CREATED attribute, fire events more often, basic cleanup --- .../ExifParserFileIngestModule.java | 36 +++++++++---------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserFileIngestModule.java b/ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserFileIngestModule.java index d8dae4f2cc..198bc108a5 100644 --- a/ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserFileIngestModule.java +++ b/ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserFileIngestModule.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011 Basis Technology Corp. + * Copyright 2011-2013 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -61,6 +61,7 @@ public final class ExifParserFileIngestModule extends IngestModuleAbstractFile { private static final Logger logger = Logger.getLogger(ExifParserFileIngestModule.class.getName()); private static ExifParserFileIngestModule defaultInstance = null; private int filesProcessed = 0; + private boolean filesToFire = false; //file ingest modules require a private constructor //to ensure singleton instances @@ -88,6 +89,13 @@ public final class ExifParserFileIngestModule extends IngestModuleAbstractFile { return IngestModuleAbstractFile.ProcessResult.OK; } + // update the tree every 1000 files if we have EXIF data that is not being being displayed + filesProcessed++; + if ((filesToFire) && (filesProcessed % 1000 == 0)) { + services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF)); + filesToFire = false; + } + //skip unsupported if (!parsableFormat(content)) { return IngestModuleAbstractFile.ProcessResult.OK; @@ -112,15 +120,13 @@ public final class ExifParserFileIngestModule extends IngestModuleAbstractFile { if (exifDir != null) { Date date = exifDir.getDate(ExifSubIFDDirectory.TAG_DATETIME_ORIGINAL); if (date != null) { - attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), MODULE_NAME, date.getTime() / 1000)); + attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED.getTypeID(), MODULE_NAME, date.getTime() / 1000)); } } // GPS Stuff GpsDirectory gpsDir = metadata.getDirectory(GpsDirectory.class); - if (gpsDir != null) { - Rational altitude = gpsDir.getRational(GpsDirectory.TAG_GPS_ALTITUDE); GeoLocation loc = gpsDir.getGeoLocation(); if (loc != null) { double latitude = loc.getLatitude(); @@ -128,21 +134,22 @@ public final class ExifParserFileIngestModule extends IngestModuleAbstractFile { attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID(), MODULE_NAME, latitude)); attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID(), MODULE_NAME, longitude)); } + + Rational altitude = gpsDir.getRational(GpsDirectory.TAG_GPS_ALTITUDE); if (altitude != null) { attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE.getTypeID(), MODULE_NAME, altitude.doubleValue())); } } - // Device info ExifIFD0Directory devDir = metadata.getDirectory(ExifIFD0Directory.class); if (devDir != null) { String model = devDir.getString(ExifIFD0Directory.TAG_MODEL); - String make = devDir.getString(ExifIFD0Directory.TAG_MAKE); - if (model != null && !model.isEmpty()) { attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL.getTypeID(), MODULE_NAME, model)); } + + String make = devDir.getString(ExifIFD0Directory.TAG_MAKE); if (make != null && !make.isEmpty()) { attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MAKE.getTypeID(), MODULE_NAME, make)); } @@ -152,10 +159,7 @@ public final class ExifParserFileIngestModule extends IngestModuleAbstractFile { if (!attributes.isEmpty()) { BlackboardArtifact bba = f.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF); bba.addAttributes(attributes); - ++filesProcessed; - if (filesProcessed % 100 == 0) { - services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF)); - } + filesToFire = true; } return IngestModuleAbstractFile.ProcessResult.OK; @@ -232,12 +236,10 @@ public final class ExifParserFileIngestModule extends IngestModuleAbstractFile { @Override public void complete() { logger.log(Level.INFO, "completed exif parsing " + this.toString()); - - if (filesProcessed > 0) { + if (filesToFire) { //send the final new data event services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF)); } - //module specific cleanup due to completion here } @Override @@ -252,7 +254,7 @@ public final class ExifParserFileIngestModule extends IngestModuleAbstractFile { @Override public String getDescription() { - return "Ingests .jpg and .jpeg files and retrieves their metadata."; + return "Ingests JPEG files and retrieves their EXIF metadata."; } @Override @@ -261,13 +263,11 @@ public final class ExifParserFileIngestModule extends IngestModuleAbstractFile { logger.log(Level.INFO, "init() " + this.toString()); filesProcessed = 0; + filesToFire = false; } @Override public void stop() { - logger.log(Level.INFO, "stop()"); - - //module specific cleanup due to interruption here } @Override From e461eb0a4c04ba27b26c2d23494048b16bc0d5ba Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Sat, 19 Oct 2013 00:24:21 -0400 Subject: [PATCH 065/179] minor comment addition --- Core/src/org/sleuthkit/autopsy/ingest/IngestScheduler.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestScheduler.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestScheduler.java index 2d913a3ea2..f0fa01b306 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestScheduler.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestScheduler.java @@ -702,6 +702,11 @@ class IngestScheduler { HIGH_PRI_PATHS.add(Pattern.compile("^ProgramData", Pattern.CASE_INSENSITIVE)); } + /** + * Get the scheduling priority for a given file. + * @param abstractFile + * @return + */ static AbstractFilePriotity.Priority getPriority(final AbstractFile abstractFile) { if (!abstractFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.FS)) { //quickly filter out unstructured content From ef5314b5d94df4ffa6923313ad0abe6c81b44a30 Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Sat, 19 Oct 2013 00:25:18 -0400 Subject: [PATCH 066/179] minor cleanup of code --- .../autopsy/hashdatabase/HashDbIngestModule.java | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java index 5d83630b62..3a3ff6dd0e 100644 --- a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java +++ b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java @@ -24,7 +24,6 @@ import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.logging.Level; -import org.openide.util.Exceptions; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.PipelineContext; @@ -38,10 +37,6 @@ import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; -import org.sleuthkit.datamodel.Content; -import org.sleuthkit.datamodel.ContentVisitor; -import org.sleuthkit.datamodel.DerivedFile; -import org.sleuthkit.datamodel.File; import org.sleuthkit.datamodel.Hash; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; @@ -63,8 +58,6 @@ public class HashDbIngestModule extends IngestModuleAbstractFile { private boolean nsrlIsSet; private boolean knownBadIsSet; private boolean calcHashesIsSet; - private HashDb nsrlSet; - private int nsrlPointer; static long calctime = 0; static long lookuptime = 0; private Map knownBadSets = new HashMap<>(); @@ -88,7 +81,6 @@ public class HashDbIngestModule extends IngestModuleAbstractFile { this.skCase = Case.getCurrentCase().getSleuthkitCase(); try { HashDbXML hdbxml = HashDbXML.getCurrent(); - nsrlSet = null; knownBadSets.clear(); skCase.clearLookupDatabases(); nsrlIsSet = false; @@ -98,8 +90,8 @@ public class HashDbIngestModule extends IngestModuleAbstractFile { HashDb nsrl = hdbxml.getNSRLSet(); if (nsrl != null && nsrl.getUseForIngest() && IndexStatus.isIngestible(nsrl.status())) { nsrlIsSet = true; - this.nsrlSet = nsrl; - nsrlPointer = skCase.setNSRLDatabase(nsrl.getDatabasePaths().get(0)); + // @@@ Unchecked return value + skCase.setNSRLDatabase(nsrl.getDatabasePaths().get(0)); } for (HashDb db : hdbxml.getKnownBadSets()) { @@ -311,7 +303,7 @@ public class HashDbIngestModule extends IngestModuleAbstractFile { return ProcessResult.ERROR; } } - + // look up in known bad first TskData.FileKnown status = TskData.FileKnown.UKNOWN; @@ -320,7 +312,6 @@ public class HashDbIngestModule extends IngestModuleAbstractFile { if (knownBadIsSet) { for (Map.Entry entry : knownBadSets.entrySet()) { - try { long lookupstart = System.currentTimeMillis(); status = skCase.knownBadLookupMd5(md5Hash, entry.getKey()); From d7153b0336c6b19f6bd92801b89ae1388963cb50 Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Sat, 19 Oct 2013 12:16:14 -0400 Subject: [PATCH 067/179] Minor cleanup --- .../sevenzip/SevenZipIngestModule.java | 50 ++++++------------- 1 file changed, 15 insertions(+), 35 deletions(-) diff --git a/SevenZip/src/org/sleuthkit/autopsy/sevenzip/SevenZipIngestModule.java b/SevenZip/src/org/sleuthkit/autopsy/sevenzip/SevenZipIngestModule.java index 991662764d..bbec25d70a 100644 --- a/SevenZip/src/org/sleuthkit/autopsy/sevenzip/SevenZipIngestModule.java +++ b/SevenZip/src/org/sleuthkit/autopsy/sevenzip/SevenZipIngestModule.java @@ -30,7 +30,6 @@ import java.util.Collections; import java.util.Date; import java.util.List; import java.util.logging.Level; -import javax.swing.JPanel; import net.sf.sevenzipjbinding.ISequentialOutStream; import net.sf.sevenzipjbinding.ISevenZipInArchive; import org.sleuthkit.autopsy.coreutils.Logger; @@ -51,6 +50,7 @@ import org.sleuthkit.autopsy.ingest.PipelineContext; import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestMonitor; import org.sleuthkit.autopsy.ingest.ModuleContentEvent; +import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; import org.sleuthkit.datamodel.BlackboardAttribute; @@ -74,7 +74,6 @@ public final class SevenZipIngestModule extends IngestModuleAbstractFile { final public static String MODULE_VERSION = "1.0"; private IngestServices services; private volatile int messageID = 0; - private int processedFiles = 0; private boolean initialized = false; private static SevenZipIngestModule instance = null; //TODO use content type detection instead of extensions @@ -115,7 +114,6 @@ public final class SevenZipIngestModule extends IngestModuleAbstractFile { @Override public void init(IngestModuleInit initContext) { - logger.log(Level.INFO, "init()"); services = IngestServices.getDefault(); initialized = false; @@ -185,8 +183,8 @@ public final class SevenZipIngestModule extends IngestModuleAbstractFile { try { if (abstractFile.hasChildren()) { //check if local unpacked dir exists - final String localRootPath = getLocalRootRelPath(abstractFile); - final String localRootAbsPath = getLocalRootAbsPath(localRootPath); + final String uniqueFileName = getUniqueName(abstractFile); + final String localRootAbsPath = getLocalRootAbsPath(uniqueFileName); if (new File(localRootAbsPath).exists()) { logger.log(Level.INFO, "File already has been processed as it has children and local unpacked file, skipping: " + abstractFile.getName()); return ProcessResult.OK; @@ -197,10 +195,7 @@ public final class SevenZipIngestModule extends IngestModuleAbstractFile { return ProcessResult.OK; } - logger.log(Level.INFO, "Processing with " + MODULE_NAME + ": " + abstractFile.getName()); - ++processedFiles; - List unpackedFiles = unpack(abstractFile); if (!unpackedFiles.isEmpty()) { @@ -208,8 +203,6 @@ public final class SevenZipIngestModule extends IngestModuleAbstractFile { rescheduleNewFiles(pipelineContext, unpackedFiles); } - //process, return error if occurred - return ProcessResult.OK; } @@ -230,7 +223,7 @@ public final class SevenZipIngestModule extends IngestModuleAbstractFile { * @param archiveFile * @return */ - private String getLocalRootRelPath(AbstractFile archiveFile) { + private String getUniqueName(AbstractFile archiveFile) { return archiveFile.getName() + "_" + archiveFile.getId(); } @@ -238,7 +231,7 @@ public final class SevenZipIngestModule extends IngestModuleAbstractFile { * Get local abs path to the unpacked archive root * * @param localRootRelPath relative path to archive, from - * getLocalRootRelPath() + * getUniqueName() * @return */ private String getLocalRootAbsPath(String localRootRelPath) { @@ -297,10 +290,6 @@ public final class SevenZipIngestModule extends IngestModuleAbstractFile { logger.log(Level.SEVERE, "Error getting archive item size and cannot detect if zipbomb. ", ex); return false; } - - - - } /** @@ -350,8 +339,8 @@ public final class SevenZipIngestModule extends IngestModuleAbstractFile { final ISimpleInArchive simpleInArchive = inArchive.getSimpleInterface(); //setup the archive local root folder - final String localRootPath = getLocalRootRelPath(archiveFile); - final String localRootAbsPath = getLocalRootAbsPath(localRootPath); + final String uniqueFileName = getUniqueName(archiveFile); + final String localRootAbsPath = getLocalRootAbsPath(uniqueFileName); final File localRoot = new File(localRootAbsPath); if (!localRoot.exists()) { try { @@ -364,7 +353,7 @@ public final class SevenZipIngestModule extends IngestModuleAbstractFile { } //initialize tree hierarchy to keep track of unpacked file structure - UnpackedTree uTree = new UnpackedTree(unpackDir + "/" + localRootPath, archiveFile, fileManager); + UnpackedTree uTree = new UnpackedTree(unpackDir + "/" + uniqueFileName, archiveFile, fileManager); long freeDiskSpace = services.getFreeDiskSpace(); @@ -453,7 +442,7 @@ public final class SevenZipIngestModule extends IngestModuleAbstractFile { } } - final String localFileRelPath = localRootPath + File.separator + extractedPath; + final String localFileRelPath = uniqueFileName + File.separator + extractedPath; //final String localRelPath = unpackDir + File.separator + localFileRelPath; final String localAbsPath = unpackDirPath + File.separator + localFileRelPath; @@ -565,9 +554,11 @@ public final class SevenZipIngestModule extends IngestModuleAbstractFile { if (hasEncrypted) { String encryptionType = fullEncryption ? ENCRYPTION_FULL : ENCRYPTION_FILE_LEVEL; try { - BlackboardArtifact generalInfo = archiveFile.newArtifact(ARTIFACT_TYPE.TSK_GEN_INFO); + BlackboardArtifact generalInfo = archiveFile.getGenInfoArtifact(); generalInfo.addAttribute(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_ENCRYPTION_DETECTED.getTypeID(), MODULE_NAME, encryptionType)); + //@@@ We don't fire here because GEN_INFO isn't displayed in the tree.... Need to address how these should be displayed + //services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF)); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error creating blackboard artifact for encryption detected for file: " + archiveFile, ex); } @@ -580,29 +571,20 @@ public final class SevenZipIngestModule extends IngestModuleAbstractFile { services.postMessage(IngestMessage.createWarningMessage(++messageID, instance, msg, details)); } - return unpackedFiles; } @Override public void complete() { - logger.log(Level.INFO, "complete()"); if (initialized == false) { return; } - - //cleanup if any - archiveDepthCountTree = null; - + archiveDepthCountTree = null; } @Override public void stop() { - logger.log(Level.INFO, "stop()"); - - //cleanup if any archiveDepthCountTree = null; - } @Override @@ -626,13 +608,13 @@ public final class SevenZipIngestModule extends IngestModuleAbstractFile { return false; } - - public boolean isSupported(AbstractFile file) { + private boolean isSupported(AbstractFile file) { String fileNameLower = file.getName().toLowerCase(); int dotI = fileNameLower.lastIndexOf("."); if (dotI == -1 || dotI == fileNameLower.length() - 1) { return false; //no extension } + final String extension = fileNameLower.substring(dotI + 1); for (int i = 0; i < SUPPORTED_EXTENSIONS.length; ++i) { if (extension.equals(SUPPORTED_EXTENSIONS[i])) { @@ -643,7 +625,6 @@ public final class SevenZipIngestModule extends IngestModuleAbstractFile { //if no extension match, check for zip signature //(note, in near future, we will use pre-detected content type) return isZipFileHeader(file); - } /** @@ -672,7 +653,6 @@ public final class SevenZipIngestModule extends IngestModuleAbstractFile { int signature = bytes.getInt(); return signature == ZIP_SIGNATURE_BE; - } /** From 77fdbcf6a45d81bb0c4e5697f3483279de44e4bd Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Sat, 19 Oct 2013 12:26:55 -0400 Subject: [PATCH 068/179] made line endings more consistent --- .gitattributes | 13 + .../autopsy/report/FileReportDataTypes.java | 276 +- .../autopsy/report/FileReportModule.java | 120 +- .../autopsy/report/FileReportText.java | 276 +- .../report/ReportWizardFileOptionsPanel.java | 192 +- ExifParser/nbproject/project.xml | 60 +- HashDatabase/nbproject/project.xml | 180 +- KeywordSearch/nbproject/project.xml | 830 +++--- .../keywordsearch/TikaLanguageIdentifier.java | 120 +- RecentActivity/nbproject/project.xml | 104 +- ScalpelCarver/nbproject/project.xml | 44 +- SevenZip/nbproject/project.xml | 96 +- Testing/nbproject/project.xml | 138 +- Timeline/nbproject/project.xml | 226 +- .../sleuthkit/autopsy/timeline/Timeline.java | 2354 ++++++++--------- .../netbeans/core/startup/Bundle.properties | 4 +- .../core/windows/view/ui/Bundle.properties | 10 +- thunderbirdparser/nbproject/project.xml | 62 +- 18 files changed, 2559 insertions(+), 2546 deletions(-) create mode 100644 .gitattributes diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000000..ea6aa14ebc --- /dev/null +++ b/.gitattributes @@ -0,0 +1,13 @@ +*.java text diff=java + +*.txt text +*.sh text +*.mf text +*.xml text +*.form text +*.properties text +*.html text diff=html +*.dox text +Doxyfile text + +*.py text diff=python diff --git a/Core/src/org/sleuthkit/autopsy/report/FileReportDataTypes.java b/Core/src/org/sleuthkit/autopsy/report/FileReportDataTypes.java index 8cc0b63049..c46c59ca7d 100755 --- a/Core/src/org/sleuthkit/autopsy/report/FileReportDataTypes.java +++ b/Core/src/org/sleuthkit/autopsy/report/FileReportDataTypes.java @@ -1,138 +1,138 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2013 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.report; - -import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.TskCoreException; -import org.sleuthkit.datamodel.TskData; - -/** - * Represents Column Headers for FileList Reports. - * - * Encapsulates functionality for getting column values from Files. - * - * @author jwallace - */ -public enum FileReportDataTypes { - - NAME("Name") { - @Override - public String getValue(AbstractFile file) { - return file.getName(); - } - }, - FILE_EXT("File Extension") { - @Override - public String getValue(AbstractFile file) { - String name = file.getName(); - int extIndex = name.lastIndexOf("."); - return (extIndex == -1 ? "" : name.substring(extIndex)); - } - }, - FILE_TYPE("File Type") { - @Override - public String getValue(AbstractFile file) { - return file.getMetaTypeAsString(); - } - }, - DELETED("Is Deleted") { - @Override - public String getValue(AbstractFile file) { - if (file.getMetaFlagsAsString().equals(TskData.TSK_FS_META_FLAG_ENUM.UNALLOC.toString())) { - return "yes"; - } - return ""; - } - }, - A_TIME("Last Accessed") { - @Override - public String getValue(AbstractFile file) { - return file.getAtimeAsDate(); - } - }, - CR_TIME("File Created") { - @Override - public String getValue(AbstractFile file) { - return file.getCrtimeAsDate(); - } - }, - M_TIME("Last Modified") { - @Override - public String getValue(AbstractFile file) { - return file.getMtimeAsDate(); - } - }, - SIZE("Size") { - @Override - public String getValue(AbstractFile file) { - return String.valueOf(file.getSize()); - } - }, - ADDRESS("Address") { - @Override - public String getValue(AbstractFile file) { - return String.valueOf(file.getMetaAddr()); - } - }, - HASH_VALUE("Hash Value") { - @Override - public String getValue(AbstractFile file) { - return file.getMd5Hash(); - } - }, - KNOWN_STATUS("Known Status") { - @Override - public String getValue(AbstractFile file) { - return file.getKnown().getName(); - } - }, - PERMISSIONS("Permissions") { - @Override - public String getValue(AbstractFile file) { - return file.getModesAsString(); - } - }, - FULL_PATH("Full Path") { - @Override - public String getValue(AbstractFile file) { - try { - return file.getUniquePath(); - } catch (TskCoreException ex) { - return ""; - } - } - }; - - private String name; - - FileReportDataTypes(String name) { - this.name = name; - } - - public String getName() { - return this.name; - } - - /** - * Get the value of the column from the file. - * - * @return - */ - public abstract String getValue(AbstractFile file); -} +/* + * Autopsy Forensic Browser + * + * Copyright 2013 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.report; + +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.datamodel.TskData; + +/** + * Represents Column Headers for FileList Reports. + * + * Encapsulates functionality for getting column values from Files. + * + * @author jwallace + */ +public enum FileReportDataTypes { + + NAME("Name") { + @Override + public String getValue(AbstractFile file) { + return file.getName(); + } + }, + FILE_EXT("File Extension") { + @Override + public String getValue(AbstractFile file) { + String name = file.getName(); + int extIndex = name.lastIndexOf("."); + return (extIndex == -1 ? "" : name.substring(extIndex)); + } + }, + FILE_TYPE("File Type") { + @Override + public String getValue(AbstractFile file) { + return file.getMetaTypeAsString(); + } + }, + DELETED("Is Deleted") { + @Override + public String getValue(AbstractFile file) { + if (file.getMetaFlagsAsString().equals(TskData.TSK_FS_META_FLAG_ENUM.UNALLOC.toString())) { + return "yes"; + } + return ""; + } + }, + A_TIME("Last Accessed") { + @Override + public String getValue(AbstractFile file) { + return file.getAtimeAsDate(); + } + }, + CR_TIME("File Created") { + @Override + public String getValue(AbstractFile file) { + return file.getCrtimeAsDate(); + } + }, + M_TIME("Last Modified") { + @Override + public String getValue(AbstractFile file) { + return file.getMtimeAsDate(); + } + }, + SIZE("Size") { + @Override + public String getValue(AbstractFile file) { + return String.valueOf(file.getSize()); + } + }, + ADDRESS("Address") { + @Override + public String getValue(AbstractFile file) { + return String.valueOf(file.getMetaAddr()); + } + }, + HASH_VALUE("Hash Value") { + @Override + public String getValue(AbstractFile file) { + return file.getMd5Hash(); + } + }, + KNOWN_STATUS("Known Status") { + @Override + public String getValue(AbstractFile file) { + return file.getKnown().getName(); + } + }, + PERMISSIONS("Permissions") { + @Override + public String getValue(AbstractFile file) { + return file.getModesAsString(); + } + }, + FULL_PATH("Full Path") { + @Override + public String getValue(AbstractFile file) { + try { + return file.getUniquePath(); + } catch (TskCoreException ex) { + return ""; + } + } + }; + + private String name; + + FileReportDataTypes(String name) { + this.name = name; + } + + public String getName() { + return this.name; + } + + /** + * Get the value of the column from the file. + * + * @return + */ + public abstract String getValue(AbstractFile file); +} diff --git a/Core/src/org/sleuthkit/autopsy/report/FileReportModule.java b/Core/src/org/sleuthkit/autopsy/report/FileReportModule.java index 344643b2ae..50c74a55c0 100755 --- a/Core/src/org/sleuthkit/autopsy/report/FileReportModule.java +++ b/Core/src/org/sleuthkit/autopsy/report/FileReportModule.java @@ -1,60 +1,60 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2013 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.report; - -import java.util.List; -import org.sleuthkit.datamodel.AbstractFile; - -/** - * A Report Module that reports information on files in a case. - * - * @author jwallace - */ -public interface FileReportModule extends ReportModule { - /** - * Initialize the report which will be stored at the given path. - * @param path - */ - public void startReport(String path); - - /** - * End the report. - * Will be called after the entire report has been written. - */ - public void endReport(); - - /** - * Start the file list table. - * @param headers The columns that should be included in the table. - */ - public void startTable(List headers); - - /** - * Add the given AbstractFile as a row in the table. - * Guaranteed to be called between startTable and endTable. - * @param toAdd the AbstractFile to be added. - * @param columns the columns that should be included - */ - public void addRow(AbstractFile toAdd, List columns); - - /** - * Close the table. - */ - public void endTable(); -} +/* + * Autopsy Forensic Browser + * + * Copyright 2013 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.report; + +import java.util.List; +import org.sleuthkit.datamodel.AbstractFile; + +/** + * A Report Module that reports information on files in a case. + * + * @author jwallace + */ +public interface FileReportModule extends ReportModule { + /** + * Initialize the report which will be stored at the given path. + * @param path + */ + public void startReport(String path); + + /** + * End the report. + * Will be called after the entire report has been written. + */ + public void endReport(); + + /** + * Start the file list table. + * @param headers The columns that should be included in the table. + */ + public void startTable(List headers); + + /** + * Add the given AbstractFile as a row in the table. + * Guaranteed to be called between startTable and endTable. + * @param toAdd the AbstractFile to be added. + * @param columns the columns that should be included + */ + public void addRow(AbstractFile toAdd, List columns); + + /** + * Close the table. + */ + public void endTable(); +} diff --git a/Core/src/org/sleuthkit/autopsy/report/FileReportText.java b/Core/src/org/sleuthkit/autopsy/report/FileReportText.java index 05192e6910..45ef9d736c 100755 --- a/Core/src/org/sleuthkit/autopsy/report/FileReportText.java +++ b/Core/src/org/sleuthkit/autopsy/report/FileReportText.java @@ -1,138 +1,138 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2013 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.report; - -import java.io.BufferedWriter; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.OutputStreamWriter; -import java.io.Writer; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import java.util.logging.Level; -import java.util.logging.Logger; -import org.sleuthkit.datamodel.AbstractFile; - -/** - * A Tab-delimited text report of the files in the case. - * - * @author jwallace - */ -public class FileReportText implements FileReportModule { - private static final Logger logger = Logger.getLogger(FileReportText.class.getName()); - private String reportPath; - private Writer out; - private static final String FILE_NAME = "file-report.txt"; - - private static FileReportText instance; - - // Get the default implementation of this report - public static synchronized FileReportText getDefault() { - if (instance == null) { - instance = new FileReportText(); - } - return instance; - } - - @Override - public void startReport(String path) { - this.reportPath = path + FILE_NAME; - try { - out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(this.reportPath))); - } catch (IOException ex) { - logger.log(Level.WARNING, "Failed to create report text file", ex); - } - } - - @Override - public void endReport() { - if (out != null) { - try { - out.close(); - } catch (IOException ex) { - logger.log(Level.WARNING, "Could not close output writer when ending report.", ex); - } - } - } - - private String getTabDelimitedList(List list) { - StringBuilder output = new StringBuilder(); - Iterator it = list.iterator(); - while(it.hasNext()) { - output.append(it.next()).append((it.hasNext() ? "\t" : System.lineSeparator())); - } - return output.toString(); - } - - @Override - public void startTable(List headers) { - List titles = new ArrayList<>(); - for(FileReportDataTypes col : headers) { - titles.add(col.getName()); - } - try { - out.write(getTabDelimitedList(titles)); - } catch (IOException ex) { - logger.log(Level.WARNING, "Error when writing headers to report file: {0}", ex); - } - } - - @Override - public void addRow(AbstractFile toAdd, List columns) { - List cells = new ArrayList<>(); - for(FileReportDataTypes type : columns) { - cells.add(type.getValue(toAdd)); - } - try { - out.write(getTabDelimitedList(cells)); - } catch (IOException ex) { - logger.log(Level.WARNING, "Error when writing row to report file: {0}", ex); - } - } - - @Override - public void endTable() { - try { - out.write(System.lineSeparator()); - } catch (IOException ex) { - logger.log(Level.WARNING, "Error when closing table: {0}", ex); - } - } - - @Override - public String getName() { - return "Files - Text"; - } - - @Override - public String getDescription() { - return "A tab delimited text file containing information about files in the case."; - } - - @Override - public String getExtension() { - return ".txt"; - } - - @Override - public String getFilePath() { - return FILE_NAME; - } -} +/* + * Autopsy Forensic Browser + * + * Copyright 2013 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.report; + +import java.io.BufferedWriter; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.io.Writer; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.sleuthkit.datamodel.AbstractFile; + +/** + * A Tab-delimited text report of the files in the case. + * + * @author jwallace + */ +public class FileReportText implements FileReportModule { + private static final Logger logger = Logger.getLogger(FileReportText.class.getName()); + private String reportPath; + private Writer out; + private static final String FILE_NAME = "file-report.txt"; + + private static FileReportText instance; + + // Get the default implementation of this report + public static synchronized FileReportText getDefault() { + if (instance == null) { + instance = new FileReportText(); + } + return instance; + } + + @Override + public void startReport(String path) { + this.reportPath = path + FILE_NAME; + try { + out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(this.reportPath))); + } catch (IOException ex) { + logger.log(Level.WARNING, "Failed to create report text file", ex); + } + } + + @Override + public void endReport() { + if (out != null) { + try { + out.close(); + } catch (IOException ex) { + logger.log(Level.WARNING, "Could not close output writer when ending report.", ex); + } + } + } + + private String getTabDelimitedList(List list) { + StringBuilder output = new StringBuilder(); + Iterator it = list.iterator(); + while(it.hasNext()) { + output.append(it.next()).append((it.hasNext() ? "\t" : System.lineSeparator())); + } + return output.toString(); + } + + @Override + public void startTable(List headers) { + List titles = new ArrayList<>(); + for(FileReportDataTypes col : headers) { + titles.add(col.getName()); + } + try { + out.write(getTabDelimitedList(titles)); + } catch (IOException ex) { + logger.log(Level.WARNING, "Error when writing headers to report file: {0}", ex); + } + } + + @Override + public void addRow(AbstractFile toAdd, List columns) { + List cells = new ArrayList<>(); + for(FileReportDataTypes type : columns) { + cells.add(type.getValue(toAdd)); + } + try { + out.write(getTabDelimitedList(cells)); + } catch (IOException ex) { + logger.log(Level.WARNING, "Error when writing row to report file: {0}", ex); + } + } + + @Override + public void endTable() { + try { + out.write(System.lineSeparator()); + } catch (IOException ex) { + logger.log(Level.WARNING, "Error when closing table: {0}", ex); + } + } + + @Override + public String getName() { + return "Files - Text"; + } + + @Override + public String getDescription() { + return "A tab delimited text file containing information about files in the case."; + } + + @Override + public String getExtension() { + return ".txt"; + } + + @Override + public String getFilePath() { + return FILE_NAME; + } +} diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportWizardFileOptionsPanel.java b/Core/src/org/sleuthkit/autopsy/report/ReportWizardFileOptionsPanel.java index 2e09b0d5bd..f6fcb9afe1 100755 --- a/Core/src/org/sleuthkit/autopsy/report/ReportWizardFileOptionsPanel.java +++ b/Core/src/org/sleuthkit/autopsy/report/ReportWizardFileOptionsPanel.java @@ -1,96 +1,96 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2013 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.report; - -import java.awt.event.ActionEvent; -import java.awt.event.ActionListener; -import javax.swing.JButton; -import javax.swing.event.ChangeListener; -import org.openide.WizardDescriptor; -import org.openide.util.HelpCtx; - -/** - * Wizard panel that allows configuration of File Report options. - * - * @author jwallace - */ -public class ReportWizardFileOptionsPanel implements WizardDescriptor.FinishablePanel{ - private WizardDescriptor wiz; - private ReportWizardFileOptionsVisualPanel component; - private JButton finishButton; - - ReportWizardFileOptionsPanel() { - finishButton = new JButton("Finish"); - finishButton.setEnabled(false); - - finishButton.addActionListener(new ActionListener() { - @Override - public void actionPerformed(ActionEvent e) { - wiz.doFinishClick(); - }; - }); - } - - public void setFinish(boolean enable) { - finishButton.setEnabled(enable); - } - - @Override - public boolean isFinishPanel() { - return true; - } - - @Override - public ReportWizardFileOptionsVisualPanel getComponent() { - if (component == null) { - component = new ReportWizardFileOptionsVisualPanel(this); - } - return component; - } - - @Override - public HelpCtx getHelp() { - return HelpCtx.DEFAULT_HELP; - } - - @Override - public void readSettings(WizardDescriptor data) { - this.wiz = data; - wiz.setOptions(new Object[] {WizardDescriptor.PREVIOUS_OPTION, WizardDescriptor.NEXT_OPTION, finishButton, WizardDescriptor.CANCEL_OPTION}); - } - - @Override - public void storeSettings(WizardDescriptor data) { - data.putProperty("fileReportOptions", getComponent().getFileReportOptions()); - } - - @Override - public boolean isValid() { - return true; - } - - @Override - public void addChangeListener(ChangeListener cl) { - } - - @Override - public void removeChangeListener(ChangeListener cl) { - } - -} +/* + * Autopsy Forensic Browser + * + * Copyright 2013 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.report; + +import java.awt.event.ActionEvent; +import java.awt.event.ActionListener; +import javax.swing.JButton; +import javax.swing.event.ChangeListener; +import org.openide.WizardDescriptor; +import org.openide.util.HelpCtx; + +/** + * Wizard panel that allows configuration of File Report options. + * + * @author jwallace + */ +public class ReportWizardFileOptionsPanel implements WizardDescriptor.FinishablePanel{ + private WizardDescriptor wiz; + private ReportWizardFileOptionsVisualPanel component; + private JButton finishButton; + + ReportWizardFileOptionsPanel() { + finishButton = new JButton("Finish"); + finishButton.setEnabled(false); + + finishButton.addActionListener(new ActionListener() { + @Override + public void actionPerformed(ActionEvent e) { + wiz.doFinishClick(); + }; + }); + } + + public void setFinish(boolean enable) { + finishButton.setEnabled(enable); + } + + @Override + public boolean isFinishPanel() { + return true; + } + + @Override + public ReportWizardFileOptionsVisualPanel getComponent() { + if (component == null) { + component = new ReportWizardFileOptionsVisualPanel(this); + } + return component; + } + + @Override + public HelpCtx getHelp() { + return HelpCtx.DEFAULT_HELP; + } + + @Override + public void readSettings(WizardDescriptor data) { + this.wiz = data; + wiz.setOptions(new Object[] {WizardDescriptor.PREVIOUS_OPTION, WizardDescriptor.NEXT_OPTION, finishButton, WizardDescriptor.CANCEL_OPTION}); + } + + @Override + public void storeSettings(WizardDescriptor data) { + data.putProperty("fileReportOptions", getComponent().getFileReportOptions()); + } + + @Override + public boolean isValid() { + return true; + } + + @Override + public void addChangeListener(ChangeListener cl) { + } + + @Override + public void removeChangeListener(ChangeListener cl) { + } + +} diff --git a/ExifParser/nbproject/project.xml b/ExifParser/nbproject/project.xml index b5d8819e79..da91e0b898 100644 --- a/ExifParser/nbproject/project.xml +++ b/ExifParser/nbproject/project.xml @@ -1,30 +1,30 @@ - - - org.netbeans.modules.apisupport.project - - - org.sleuthkit.autopsy.exifparser - - - - org.sleuthkit.autopsy.core - - - - 9 - 7.0 - - - - - - ext/xmpcore.jar - release/modules/ext/xmpcore.jar - - - ext/metadata-extractor-2.6.2.jar - release/modules/ext/metadata-extractor-2.6.2.jar - - - - + + + org.netbeans.modules.apisupport.project + + + org.sleuthkit.autopsy.exifparser + + + + org.sleuthkit.autopsy.core + + + + 9 + 7.0 + + + + + + ext/xmpcore.jar + release/modules/ext/xmpcore.jar + + + ext/metadata-extractor-2.6.2.jar + release/modules/ext/metadata-extractor-2.6.2.jar + + + + diff --git a/HashDatabase/nbproject/project.xml b/HashDatabase/nbproject/project.xml index 3a743f01d0..b650bf0ecc 100644 --- a/HashDatabase/nbproject/project.xml +++ b/HashDatabase/nbproject/project.xml @@ -1,90 +1,90 @@ - - - org.netbeans.modules.apisupport.project - - - org.sleuthkit.autopsy.hashdatabase - - - - org.netbeans.api.progress - - - - 1 - 1.24.1 - - - - org.netbeans.modules.options.api - - - - 1 - 1.26.1 - - - - org.openide.awt - - - - 7.31.1 - - - - org.openide.dialogs - - - - 7.20.1 - - - - org.openide.nodes - - - - 7.28.1 - - - - org.openide.util - - - - 8.15.1 - - - - org.openide.util.lookup - - - - 8.15.1 - - - - org.openide.windows - - - - 6.40.1 - - - - org.sleuthkit.autopsy.core - - - - 9 - 7.0 - - - - - org.sleuthkit.autopsy.hashdatabase - - - - + + + org.netbeans.modules.apisupport.project + + + org.sleuthkit.autopsy.hashdatabase + + + + org.netbeans.api.progress + + + + 1 + 1.24.1 + + + + org.netbeans.modules.options.api + + + + 1 + 1.26.1 + + + + org.openide.awt + + + + 7.31.1 + + + + org.openide.dialogs + + + + 7.20.1 + + + + org.openide.nodes + + + + 7.28.1 + + + + org.openide.util + + + + 8.15.1 + + + + org.openide.util.lookup + + + + 8.15.1 + + + + org.openide.windows + + + + 6.40.1 + + + + org.sleuthkit.autopsy.core + + + + 9 + 7.0 + + + + + org.sleuthkit.autopsy.hashdatabase + + + + diff --git a/KeywordSearch/nbproject/project.xml b/KeywordSearch/nbproject/project.xml index 215c6068ba..455c7745da 100644 --- a/KeywordSearch/nbproject/project.xml +++ b/KeywordSearch/nbproject/project.xml @@ -1,415 +1,415 @@ - - - org.netbeans.modules.apisupport.project - - - org.sleuthkit.autopsy.keywordsearch - - - - org.netbeans.api.progress - - - - 1 - 1.24.1 - - - - org.netbeans.modules.javahelp - - - - 1 - 2.22.1 - - - - org.netbeans.modules.options.api - - - - 1 - 1.26.1 - - - - org.netbeans.modules.settings - - - - 1 - 1.31.1 - - - - org.openide.awt - - - - 7.31.1 - - - - org.openide.modules - - - - 7.23.1 - - - - org.openide.nodes - - - - 7.21.1 - - - - org.openide.util - - - - 8.15.1 - - - - org.openide.util.lookup - - - - 8.8.1 - - - - org.openide.windows - - - - 6.40.1 - - - - org.sleuthkit.autopsy.core - - - - 9 - 7.0 - - - - - org.apache.commons.lang - org.apache.commons.lang.builder - org.apache.commons.lang.enums - org.apache.commons.lang.exception - org.apache.commons.lang.math - org.apache.commons.lang.mutable - org.apache.commons.lang.text - org.apache.commons.lang.time - org.apache.commons.logging - org.apache.commons.logging.impl - org.apache.tika - org.apache.tika.config - org.apache.tika.detect - org.apache.tika.exception - org.apache.tika.extractor - org.apache.tika.fork - org.apache.tika.io - org.apache.tika.language - org.apache.tika.metadata - org.apache.tika.mime - org.apache.tika.parser - org.apache.tika.parser.asm - org.apache.tika.parser.audio - org.apache.tika.parser.chm - org.apache.tika.parser.chm.accessor - org.apache.tika.parser.chm.assertion - org.apache.tika.parser.chm.core - org.apache.tika.parser.chm.exception - org.apache.tika.parser.chm.lzx - org.apache.tika.parser.crypto - org.apache.tika.parser.dwg - org.apache.tika.parser.epub - org.apache.tika.parser.executable - org.apache.tika.parser.external - org.apache.tika.parser.feed - org.apache.tika.parser.font - org.apache.tika.parser.hdf - org.apache.tika.parser.html - org.apache.tika.parser.image - org.apache.tika.parser.image.xmp - org.apache.tika.parser.internal - org.apache.tika.parser.iptc - org.apache.tika.parser.iwork - org.apache.tika.parser.jpeg - org.apache.tika.parser.mail - org.apache.tika.parser.mbox - org.apache.tika.parser.microsoft - org.apache.tika.parser.microsoft.ooxml - org.apache.tika.parser.mp3 - org.apache.tika.parser.mp4 - org.apache.tika.parser.netcdf - org.apache.tika.parser.odf - org.apache.tika.parser.opendocument - org.apache.tika.parser.pdf - org.apache.tika.parser.pkg - org.apache.tika.parser.prt - org.apache.tika.parser.rtf - org.apache.tika.parser.txt - org.apache.tika.parser.video - org.apache.tika.parser.xml - org.apache.tika.sax - org.apache.tika.sax.xpath - org.apache.tika.utils - org.sleuthkit.autopsy.keywordsearch - - - ext/metadata-extractor-2.4.0-beta-1.jar - release/modules/ext/metadata-extractor-2.4.0-beta-1.jar - - - ext/commons-io-2.1.jar - release/modules/ext/commons-io-2.1.jar - - - ext/commons-lang-2.4.jar - release/modules/ext/commons-lang-2.4.jar - - - ext/log4j-1.2.17.jar - release/modules/ext/log4j-1.2.17.jar - - - ext/jcl-over-slf4j-1.6.4.jar - release/modules/ext/jcl-over-slf4j-1.6.4.jar - - - ext/asm-all-3.1.jar - release/modules/ext/asm-all-3.1.jar - - - ext/qdox-1.12.jar - release/modules/ext/qdox-1.12.jar - - - ext/org.apache.felix.scr.generator-1.1.2.jar - release/modules/ext/org.apache.felix.scr.generator-1.1.2.jar - - - ext/bcmail-jdk15-1.45.jar - release/modules/ext/bcmail-jdk15-1.45.jar - - - ext/vorbis-java-core-0.1-tests.jar - release/modules/ext/vorbis-java-core-0.1-tests.jar - - - ext/tika-parsers-1.2-javadoc.jar - release/modules/ext/tika-parsers-1.2-javadoc.jar - - - ext/log4j-over-slf4j-1.6.4.jar - release/modules/ext/log4j-over-slf4j-1.6.4.jar - - - ext/vorbis-java-tika-0.1.jar - release/modules/ext/vorbis-java-tika-0.1.jar - - - ext/isoparser-1.0-RC-1.jar - release/modules/ext/isoparser-1.0-RC-1.jar - - - ext/httpcore-4.1.4.jar - release/modules/ext/httpcore-4.1.4.jar - - - ext/tika-parsers-1.2-sources.jar - release/modules/ext/tika-parsers-1.2-sources.jar - - - ext/aspectjrt-1.6.11.jar - release/modules/ext/aspectjrt-1.6.11.jar - - - ext/commons-compress-1.4.1.jar - release/modules/ext/commons-compress-1.4.1.jar - - - ext/poi-3.8.jar - release/modules/ext/poi-3.8.jar - - - ext/tika-parsers-1.2.jar - release/modules/ext/tika-parsers-1.2.jar - - - ext/apache-mime4j-core-0.7.2.jar - release/modules/ext/apache-mime4j-core-0.7.2.jar - - - ext/rome-0.9.jar - release/modules/ext/rome-0.9.jar - - - ext/httpclient-4.1.3.jar - release/modules/ext/httpclient-4.1.3.jar - - - ext/icu4j-3.8.jar - release/modules/ext/icu4j-3.8.jar - - - ext/juniversalchardet-1.0.3.jar - release/modules/ext/juniversalchardet-1.0.3.jar - - - ext/pdfbox-1.7.0.jar - release/modules/ext/pdfbox-1.7.0.jar - - - ext/jericho-html-3.3-sources.jar - release/modules/ext/jericho-html-3.3-sources.jar - - - ext/jdom-1.0.jar - release/modules/ext/jdom-1.0.jar - - - ext/commons-logging-1.1.1.jar - release/modules/ext/commons-logging-1.1.1.jar - - - ext/tagsoup-1.2.1.jar - release/modules/ext/tagsoup-1.2.1.jar - - - ext/fontbox-1.7.0.jar - release/modules/ext/fontbox-1.7.0.jar - - - ext/poi-ooxml-3.8.jar - release/modules/ext/poi-ooxml-3.8.jar - - - ext/boilerpipe-1.1.0.jar - release/modules/ext/boilerpipe-1.1.0.jar - - - ext/org.osgi.compendium-4.0.0.jar - release/modules/ext/org.osgi.compendium-4.0.0.jar - - - ext/slf4j-api-1.7.2.jar - release/modules/ext/slf4j-api-1.7.2.jar - - - ext/commons-lang-2.4-javadoc.jar - release/modules/ext/commons-lang-2.4-javadoc.jar - - - ext/jempbox-1.7.0.jar - release/modules/ext/jempbox-1.7.0.jar - - - ext/jericho-html-3.3-javadoc.jar - release/modules/ext/jericho-html-3.3-javadoc.jar - - - ext/wstx-asl-3.2.7.jar - release/modules/ext/wstx-asl-3.2.7.jar - - - ext/netcdf-4.2-min.jar - release/modules/ext/netcdf-4.2-min.jar - - - ext/solr-solrj-4.0.0-javadoc.jar - release/modules/ext/solr-solrj-4.0.0-javadoc.jar - - - ext/xmlbeans-2.3.0.jar - release/modules/ext/xmlbeans-2.3.0.jar - - - ext/httpmime-4.1.3.jar - release/modules/ext/httpmime-4.1.3.jar - - - ext/org.osgi.core-4.0.0.jar - release/modules/ext/org.osgi.core-4.0.0.jar - - - ext/org.apache.felix.scr.annotations-1.6.0.jar - release/modules/ext/org.apache.felix.scr.annotations-1.6.0.jar - - - ext/commons-logging-api-1.1.jar - release/modules/ext/commons-logging-api-1.1.jar - - - ext/xz-1.0.jar - release/modules/ext/xz-1.0.jar - - - ext/commons-codec-1.7.jar - release/modules/ext/commons-codec-1.7.jar - - - ext/tika-core-1.2.jar - release/modules/ext/tika-core-1.2.jar - - - ext/zookeeper-3.3.6.jar - release/modules/ext/zookeeper-3.3.6.jar - - - ext/dom4j-1.6.1.jar - release/modules/ext/dom4j-1.6.1.jar - - - ext/poi-scratchpad-3.8.jar - release/modules/ext/poi-scratchpad-3.8.jar - - - ext/poi-ooxml-schemas-3.8.jar - release/modules/ext/poi-ooxml-schemas-3.8.jar - - - ext/bcprov-jdk15-1.45.jar - release/modules/ext/bcprov-jdk15-1.45.jar - - - ext/jericho-html-3.3.jar - release/modules/ext/jericho-html-3.3.jar - - - ext/solr-solrj-4.0.0.jar - release/modules/ext/solr-solrj-4.0.0.jar - - - ext/commons-lang-2.4-sources.jar - release/modules/ext/commons-lang-2.4-sources.jar - - - ext/solr-solrj-4.0.0-sources.jar - release/modules/ext/solr-solrj-4.0.0-sources.jar - - - ext/apache-mime4j-dom-0.7.2.jar - release/modules/ext/apache-mime4j-dom-0.7.2.jar - - - ext/geronimo-stax-api_1.0_spec-1.0.1.jar - release/modules/ext/geronimo-stax-api_1.0_spec-1.0.1.jar - - - ext/asm-3.1.jar - release/modules/ext/asm-3.1.jar - - - - + + + org.netbeans.modules.apisupport.project + + + org.sleuthkit.autopsy.keywordsearch + + + + org.netbeans.api.progress + + + + 1 + 1.24.1 + + + + org.netbeans.modules.javahelp + + + + 1 + 2.22.1 + + + + org.netbeans.modules.options.api + + + + 1 + 1.26.1 + + + + org.netbeans.modules.settings + + + + 1 + 1.31.1 + + + + org.openide.awt + + + + 7.31.1 + + + + org.openide.modules + + + + 7.23.1 + + + + org.openide.nodes + + + + 7.21.1 + + + + org.openide.util + + + + 8.15.1 + + + + org.openide.util.lookup + + + + 8.8.1 + + + + org.openide.windows + + + + 6.40.1 + + + + org.sleuthkit.autopsy.core + + + + 9 + 7.0 + + + + + org.apache.commons.lang + org.apache.commons.lang.builder + org.apache.commons.lang.enums + org.apache.commons.lang.exception + org.apache.commons.lang.math + org.apache.commons.lang.mutable + org.apache.commons.lang.text + org.apache.commons.lang.time + org.apache.commons.logging + org.apache.commons.logging.impl + org.apache.tika + org.apache.tika.config + org.apache.tika.detect + org.apache.tika.exception + org.apache.tika.extractor + org.apache.tika.fork + org.apache.tika.io + org.apache.tika.language + org.apache.tika.metadata + org.apache.tika.mime + org.apache.tika.parser + org.apache.tika.parser.asm + org.apache.tika.parser.audio + org.apache.tika.parser.chm + org.apache.tika.parser.chm.accessor + org.apache.tika.parser.chm.assertion + org.apache.tika.parser.chm.core + org.apache.tika.parser.chm.exception + org.apache.tika.parser.chm.lzx + org.apache.tika.parser.crypto + org.apache.tika.parser.dwg + org.apache.tika.parser.epub + org.apache.tika.parser.executable + org.apache.tika.parser.external + org.apache.tika.parser.feed + org.apache.tika.parser.font + org.apache.tika.parser.hdf + org.apache.tika.parser.html + org.apache.tika.parser.image + org.apache.tika.parser.image.xmp + org.apache.tika.parser.internal + org.apache.tika.parser.iptc + org.apache.tika.parser.iwork + org.apache.tika.parser.jpeg + org.apache.tika.parser.mail + org.apache.tika.parser.mbox + org.apache.tika.parser.microsoft + org.apache.tika.parser.microsoft.ooxml + org.apache.tika.parser.mp3 + org.apache.tika.parser.mp4 + org.apache.tika.parser.netcdf + org.apache.tika.parser.odf + org.apache.tika.parser.opendocument + org.apache.tika.parser.pdf + org.apache.tika.parser.pkg + org.apache.tika.parser.prt + org.apache.tika.parser.rtf + org.apache.tika.parser.txt + org.apache.tika.parser.video + org.apache.tika.parser.xml + org.apache.tika.sax + org.apache.tika.sax.xpath + org.apache.tika.utils + org.sleuthkit.autopsy.keywordsearch + + + ext/metadata-extractor-2.4.0-beta-1.jar + release/modules/ext/metadata-extractor-2.4.0-beta-1.jar + + + ext/commons-io-2.1.jar + release/modules/ext/commons-io-2.1.jar + + + ext/commons-lang-2.4.jar + release/modules/ext/commons-lang-2.4.jar + + + ext/log4j-1.2.17.jar + release/modules/ext/log4j-1.2.17.jar + + + ext/jcl-over-slf4j-1.6.4.jar + release/modules/ext/jcl-over-slf4j-1.6.4.jar + + + ext/asm-all-3.1.jar + release/modules/ext/asm-all-3.1.jar + + + ext/qdox-1.12.jar + release/modules/ext/qdox-1.12.jar + + + ext/org.apache.felix.scr.generator-1.1.2.jar + release/modules/ext/org.apache.felix.scr.generator-1.1.2.jar + + + ext/bcmail-jdk15-1.45.jar + release/modules/ext/bcmail-jdk15-1.45.jar + + + ext/vorbis-java-core-0.1-tests.jar + release/modules/ext/vorbis-java-core-0.1-tests.jar + + + ext/tika-parsers-1.2-javadoc.jar + release/modules/ext/tika-parsers-1.2-javadoc.jar + + + ext/log4j-over-slf4j-1.6.4.jar + release/modules/ext/log4j-over-slf4j-1.6.4.jar + + + ext/vorbis-java-tika-0.1.jar + release/modules/ext/vorbis-java-tika-0.1.jar + + + ext/isoparser-1.0-RC-1.jar + release/modules/ext/isoparser-1.0-RC-1.jar + + + ext/httpcore-4.1.4.jar + release/modules/ext/httpcore-4.1.4.jar + + + ext/tika-parsers-1.2-sources.jar + release/modules/ext/tika-parsers-1.2-sources.jar + + + ext/aspectjrt-1.6.11.jar + release/modules/ext/aspectjrt-1.6.11.jar + + + ext/commons-compress-1.4.1.jar + release/modules/ext/commons-compress-1.4.1.jar + + + ext/poi-3.8.jar + release/modules/ext/poi-3.8.jar + + + ext/tika-parsers-1.2.jar + release/modules/ext/tika-parsers-1.2.jar + + + ext/apache-mime4j-core-0.7.2.jar + release/modules/ext/apache-mime4j-core-0.7.2.jar + + + ext/rome-0.9.jar + release/modules/ext/rome-0.9.jar + + + ext/httpclient-4.1.3.jar + release/modules/ext/httpclient-4.1.3.jar + + + ext/icu4j-3.8.jar + release/modules/ext/icu4j-3.8.jar + + + ext/juniversalchardet-1.0.3.jar + release/modules/ext/juniversalchardet-1.0.3.jar + + + ext/pdfbox-1.7.0.jar + release/modules/ext/pdfbox-1.7.0.jar + + + ext/jericho-html-3.3-sources.jar + release/modules/ext/jericho-html-3.3-sources.jar + + + ext/jdom-1.0.jar + release/modules/ext/jdom-1.0.jar + + + ext/commons-logging-1.1.1.jar + release/modules/ext/commons-logging-1.1.1.jar + + + ext/tagsoup-1.2.1.jar + release/modules/ext/tagsoup-1.2.1.jar + + + ext/fontbox-1.7.0.jar + release/modules/ext/fontbox-1.7.0.jar + + + ext/poi-ooxml-3.8.jar + release/modules/ext/poi-ooxml-3.8.jar + + + ext/boilerpipe-1.1.0.jar + release/modules/ext/boilerpipe-1.1.0.jar + + + ext/org.osgi.compendium-4.0.0.jar + release/modules/ext/org.osgi.compendium-4.0.0.jar + + + ext/slf4j-api-1.7.2.jar + release/modules/ext/slf4j-api-1.7.2.jar + + + ext/commons-lang-2.4-javadoc.jar + release/modules/ext/commons-lang-2.4-javadoc.jar + + + ext/jempbox-1.7.0.jar + release/modules/ext/jempbox-1.7.0.jar + + + ext/jericho-html-3.3-javadoc.jar + release/modules/ext/jericho-html-3.3-javadoc.jar + + + ext/wstx-asl-3.2.7.jar + release/modules/ext/wstx-asl-3.2.7.jar + + + ext/netcdf-4.2-min.jar + release/modules/ext/netcdf-4.2-min.jar + + + ext/solr-solrj-4.0.0-javadoc.jar + release/modules/ext/solr-solrj-4.0.0-javadoc.jar + + + ext/xmlbeans-2.3.0.jar + release/modules/ext/xmlbeans-2.3.0.jar + + + ext/httpmime-4.1.3.jar + release/modules/ext/httpmime-4.1.3.jar + + + ext/org.osgi.core-4.0.0.jar + release/modules/ext/org.osgi.core-4.0.0.jar + + + ext/org.apache.felix.scr.annotations-1.6.0.jar + release/modules/ext/org.apache.felix.scr.annotations-1.6.0.jar + + + ext/commons-logging-api-1.1.jar + release/modules/ext/commons-logging-api-1.1.jar + + + ext/xz-1.0.jar + release/modules/ext/xz-1.0.jar + + + ext/commons-codec-1.7.jar + release/modules/ext/commons-codec-1.7.jar + + + ext/tika-core-1.2.jar + release/modules/ext/tika-core-1.2.jar + + + ext/zookeeper-3.3.6.jar + release/modules/ext/zookeeper-3.3.6.jar + + + ext/dom4j-1.6.1.jar + release/modules/ext/dom4j-1.6.1.jar + + + ext/poi-scratchpad-3.8.jar + release/modules/ext/poi-scratchpad-3.8.jar + + + ext/poi-ooxml-schemas-3.8.jar + release/modules/ext/poi-ooxml-schemas-3.8.jar + + + ext/bcprov-jdk15-1.45.jar + release/modules/ext/bcprov-jdk15-1.45.jar + + + ext/jericho-html-3.3.jar + release/modules/ext/jericho-html-3.3.jar + + + ext/solr-solrj-4.0.0.jar + release/modules/ext/solr-solrj-4.0.0.jar + + + ext/commons-lang-2.4-sources.jar + release/modules/ext/commons-lang-2.4-sources.jar + + + ext/solr-solrj-4.0.0-sources.jar + release/modules/ext/solr-solrj-4.0.0-sources.jar + + + ext/apache-mime4j-dom-0.7.2.jar + release/modules/ext/apache-mime4j-dom-0.7.2.jar + + + ext/geronimo-stax-api_1.0_spec-1.0.1.jar + release/modules/ext/geronimo-stax-api_1.0_spec-1.0.1.jar + + + ext/asm-3.1.jar + release/modules/ext/asm-3.1.jar + + + + diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/TikaLanguageIdentifier.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/TikaLanguageIdentifier.java index 607daeb9a8..7d42399f89 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/TikaLanguageIdentifier.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/TikaLanguageIdentifier.java @@ -1,61 +1,61 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2013 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.keywordsearch; - -import java.util.logging.Level; -import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardAttribute; -import org.sleuthkit.datamodel.TskCoreException; - -/** - * TextLanguageIdentifier implementation based on a wrapped Tike - * LanguageIdentifier - */ -public class TikaLanguageIdentifier implements TextLanguageIdentifier { - - private static final Logger logger = Logger.getLogger(TikaLanguageIdentifier.class.getName()); - private static final int MIN_STRING_LENGTH = 1000; - - @Override - public void addLanguageToBlackBoard(String extracted, AbstractFile sourceFile) { - if (extracted.length() > MIN_STRING_LENGTH) { - org.apache.tika.language.LanguageIdentifier li = new org.apache.tika.language.LanguageIdentifier(extracted); - - //logger.log(Level.INFO, sourceFile.getName() + " detected language: " + li.getLanguage() - // + " with " + ((li.isReasonablyCertain()) ? "HIGH" : "LOW") + " confidence"); - - BlackboardArtifact genInfo; - try { - genInfo = sourceFile.getGenInfoArtifact(); - - BlackboardAttribute textLang = new BlackboardAttribute( - BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TEXT_LANGUAGE.getTypeID(), - KeywordSearchIngestModule.MODULE_NAME, li.getLanguage()); - - genInfo.addAttribute(textLang); - - } catch (TskCoreException ex) { - logger.log(Level.WARNING, "failed to add TSK_TEXT_LANGUAGE attribute to TSK_GEN_INFO artifact for file: " + sourceFile.getName(), ex); - } - - } - } +/* + * Autopsy Forensic Browser + * + * Copyright 2013 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.keywordsearch; + +import java.util.logging.Level; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.BlackboardArtifact; +import org.sleuthkit.datamodel.BlackboardAttribute; +import org.sleuthkit.datamodel.TskCoreException; + +/** + * TextLanguageIdentifier implementation based on a wrapped Tike + * LanguageIdentifier + */ +public class TikaLanguageIdentifier implements TextLanguageIdentifier { + + private static final Logger logger = Logger.getLogger(TikaLanguageIdentifier.class.getName()); + private static final int MIN_STRING_LENGTH = 1000; + + @Override + public void addLanguageToBlackBoard(String extracted, AbstractFile sourceFile) { + if (extracted.length() > MIN_STRING_LENGTH) { + org.apache.tika.language.LanguageIdentifier li = new org.apache.tika.language.LanguageIdentifier(extracted); + + //logger.log(Level.INFO, sourceFile.getName() + " detected language: " + li.getLanguage() + // + " with " + ((li.isReasonablyCertain()) ? "HIGH" : "LOW") + " confidence"); + + BlackboardArtifact genInfo; + try { + genInfo = sourceFile.getGenInfoArtifact(); + + BlackboardAttribute textLang = new BlackboardAttribute( + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TEXT_LANGUAGE.getTypeID(), + KeywordSearchIngestModule.MODULE_NAME, li.getLanguage()); + + genInfo.addAttribute(textLang); + + } catch (TskCoreException ex) { + logger.log(Level.WARNING, "failed to add TSK_TEXT_LANGUAGE attribute to TSK_GEN_INFO artifact for file: " + sourceFile.getName(), ex); + } + + } + } } \ No newline at end of file diff --git a/RecentActivity/nbproject/project.xml b/RecentActivity/nbproject/project.xml index 8da413bbc9..78526a084c 100644 --- a/RecentActivity/nbproject/project.xml +++ b/RecentActivity/nbproject/project.xml @@ -1,52 +1,52 @@ - - - org.netbeans.modules.apisupport.project - - - org.sleuthkit.autopsy.recentactivity - - - - org.openide.awt - - - - 7.46.1 - - - - org.openide.modules - - - - 7.23.1 - - - - org.openide.nodes - - - - 7.21.1 - - - - org.sleuthkit.autopsy.core - - - - 9 - 7.0 - - - - - org.sleuthkit.autopsy.recentactivity - - - ext/gson-2.1.jar - release/modules/ext/gson-2.1.jar - - - - + + + org.netbeans.modules.apisupport.project + + + org.sleuthkit.autopsy.recentactivity + + + + org.openide.awt + + + + 7.46.1 + + + + org.openide.modules + + + + 7.23.1 + + + + org.openide.nodes + + + + 7.21.1 + + + + org.sleuthkit.autopsy.core + + + + 9 + 7.0 + + + + + org.sleuthkit.autopsy.recentactivity + + + ext/gson-2.1.jar + release/modules/ext/gson-2.1.jar + + + + diff --git a/ScalpelCarver/nbproject/project.xml b/ScalpelCarver/nbproject/project.xml index d0f610f870..09a3bb2f87 100644 --- a/ScalpelCarver/nbproject/project.xml +++ b/ScalpelCarver/nbproject/project.xml @@ -1,22 +1,22 @@ - - - org.netbeans.modules.apisupport.project - - - org.sleuthkit.autopsy.scalpel - - - - org.sleuthkit.autopsy.core - - - - 9 - 7.0 - - - - - - - + + + org.netbeans.modules.apisupport.project + + + org.sleuthkit.autopsy.scalpel + + + + org.sleuthkit.autopsy.core + + + + 9 + 7.0 + + + + + + + diff --git a/SevenZip/nbproject/project.xml b/SevenZip/nbproject/project.xml index 2a2f30e675..ef6d94d674 100644 --- a/SevenZip/nbproject/project.xml +++ b/SevenZip/nbproject/project.xml @@ -1,48 +1,48 @@ - - - org.netbeans.modules.apisupport.project - - - org.sleuthkit.autopsy.sevenzip - - - - org.netbeans.api.progress - - - - 1 - 1.32.1 - - - - org.sleuthkit.autopsy.core - - - - 9 - 7.0 - - - - org.sleuthkit.autopsy.corelibs - - - - 3 - 1.1 - - - - - - ext/sevenzipjbinding.jar - release/modules/ext/sevenzipjbinding.jar - - - ext/sevenzipjbinding-AllPlatforms.jar - release/modules/ext/sevenzipjbinding-AllPlatforms.jar - - - - + + + org.netbeans.modules.apisupport.project + + + org.sleuthkit.autopsy.sevenzip + + + + org.netbeans.api.progress + + + + 1 + 1.32.1 + + + + org.sleuthkit.autopsy.core + + + + 9 + 7.0 + + + + org.sleuthkit.autopsy.corelibs + + + + 3 + 1.1 + + + + + + ext/sevenzipjbinding.jar + release/modules/ext/sevenzipjbinding.jar + + + ext/sevenzipjbinding-AllPlatforms.jar + release/modules/ext/sevenzipjbinding-AllPlatforms.jar + + + + diff --git a/Testing/nbproject/project.xml b/Testing/nbproject/project.xml index e9621dd78b..f685034d4a 100644 --- a/Testing/nbproject/project.xml +++ b/Testing/nbproject/project.xml @@ -1,69 +1,69 @@ - - - org.netbeans.modules.apisupport.project - - - org.sleuthkit.autopsy.testing - - - - org.sleuthkit.autopsy.core - - - - 9 - 7.0 - - - - org.sleuthkit.autopsy.keywordsearch - - - - 5 - 3.2 - - - - - - qa-functional - - org.netbeans.libs.junit4 - - - - org.netbeans.modules.jellytools.java - - - - org.netbeans.modules.jellytools.platform - - - - org.netbeans.modules.jemmy - - - - org.netbeans.modules.nbjunit - - - - - - unit - - org.netbeans.libs.junit4 - - - - org.netbeans.modules.nbjunit - - - - - - - - - + + + org.netbeans.modules.apisupport.project + + + org.sleuthkit.autopsy.testing + + + + org.sleuthkit.autopsy.core + + + + 9 + 7.0 + + + + org.sleuthkit.autopsy.keywordsearch + + + + 5 + 3.2 + + + + + + qa-functional + + org.netbeans.libs.junit4 + + + + org.netbeans.modules.jellytools.java + + + + org.netbeans.modules.jellytools.platform + + + + org.netbeans.modules.jemmy + + + + org.netbeans.modules.nbjunit + + + + + + unit + + org.netbeans.libs.junit4 + + + + org.netbeans.modules.nbjunit + + + + + + + + + diff --git a/Timeline/nbproject/project.xml b/Timeline/nbproject/project.xml index f21e7f63ba..352e0fd55f 100644 --- a/Timeline/nbproject/project.xml +++ b/Timeline/nbproject/project.xml @@ -1,113 +1,113 @@ - - - org.netbeans.modules.apisupport.project - - - org.sleuthkit.autopsy.timeline - - - - org.netbeans.api.progress - - - - 1 - 1.32.1 - - - - org.netbeans.modules.settings - - - - 1 - 1.35.1 - - - - org.openide.actions - - - - 6.26.1 - - - - org.openide.awt - - - - 7.46.1 - - - - org.openide.dialogs - - - - 7.25.1 - - - - org.openide.modules - - - - 7.32.1 - - - - org.openide.nodes - - - - 7.28.1 - - - - org.openide.util - - - - 8.25.2 - - - - org.openide.util.lookup - - - - 8.15.2 - - - - org.openide.windows - - - - 6.55.2 - - - - org.sleuthkit.autopsy.core - - - - 9 - 7.0 - - - - org.sleuthkit.autopsy.corelibs - - - - 3 - 1.1 - - - - - - - + + + org.netbeans.modules.apisupport.project + + + org.sleuthkit.autopsy.timeline + + + + org.netbeans.api.progress + + + + 1 + 1.32.1 + + + + org.netbeans.modules.settings + + + + 1 + 1.35.1 + + + + org.openide.actions + + + + 6.26.1 + + + + org.openide.awt + + + + 7.46.1 + + + + org.openide.dialogs + + + + 7.25.1 + + + + org.openide.modules + + + + 7.32.1 + + + + org.openide.nodes + + + + 7.28.1 + + + + org.openide.util + + + + 8.25.2 + + + + org.openide.util.lookup + + + + 8.15.2 + + + + org.openide.windows + + + + 6.55.2 + + + + org.sleuthkit.autopsy.core + + + + 9 + 7.0 + + + + org.sleuthkit.autopsy.corelibs + + + + 3 + 1.1 + + + + + + + diff --git a/Timeline/src/org/sleuthkit/autopsy/timeline/Timeline.java b/Timeline/src/org/sleuthkit/autopsy/timeline/Timeline.java index 9f027c53ce..3468ab6fef 100644 --- a/Timeline/src/org/sleuthkit/autopsy/timeline/Timeline.java +++ b/Timeline/src/org/sleuthkit/autopsy/timeline/Timeline.java @@ -1,1177 +1,1177 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2013 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.timeline; - -import java.awt.Component; -import java.awt.Cursor; -import java.awt.Dimension; -import java.awt.EventQueue; -import java.beans.PropertyChangeEvent; -import java.beans.PropertyChangeListener; -import java.io.BufferedWriter; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.FileWriter; -import java.io.IOException; -import java.io.Writer; -import java.text.DateFormat; -import java.text.DateFormatSymbols; -import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.util.ArrayList; -import java.util.Calendar; -import java.util.Date; -import java.util.List; -import java.util.Locale; -import java.util.Scanner; -import java.util.Stack; -import java.util.logging.Level; -import javafx.application.Platform; -import javafx.beans.value.ChangeListener; -import javafx.beans.value.ObservableValue; -import javafx.collections.FXCollections; -import javafx.collections.ObservableList; -import javafx.embed.swing.JFXPanel; -import javafx.event.ActionEvent; -import javafx.event.EventHandler; -import javafx.geometry.Pos; -import javafx.scene.Group; -import javafx.scene.Scene; -import javafx.scene.chart.BarChart; -import javafx.scene.chart.CategoryAxis; -import javafx.scene.chart.NumberAxis; -import javafx.scene.control.Button; -import javafx.scene.control.ComboBox; -import javafx.scene.control.Label; -import javafx.scene.control.ScrollPane; -import javafx.scene.input.MouseButton; -import javafx.scene.input.MouseEvent; -import javafx.scene.layout.HBox; -import javafx.scene.layout.VBox; -import javafx.scene.paint.Color; -import javax.swing.JFrame; -import javax.swing.JOptionPane; -import javax.swing.SwingUtilities; -import org.netbeans.api.progress.ProgressHandle; -import org.netbeans.api.progress.ProgressHandleFactory; -import org.openide.awt.ActionID; -import org.openide.awt.ActionReference; -import org.openide.awt.ActionReferences; -import org.openide.awt.ActionRegistration; -import org.openide.modules.InstalledFileLocator; -import org.openide.modules.ModuleInstall; -import org.openide.nodes.Children; -import org.openide.nodes.Node; -import org.openide.util.HelpCtx; -import org.openide.util.NbBundle; -import org.openide.util.actions.CallableSystemAction; -import org.openide.util.actions.Presenter; -import org.openide.util.lookup.Lookups; -import org.openide.windows.WindowManager; -import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.core.Installer; -import org.sleuthkit.autopsy.corecomponents.DataContentPanel; -import org.sleuthkit.autopsy.corecomponents.DataResultPanel; -import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.coreutils.PlatformUtil; -import org.sleuthkit.autopsy.datamodel.FilterNodeLeaf; -import org.sleuthkit.autopsy.datamodel.DirectoryNode; -import org.sleuthkit.autopsy.datamodel.DisplayableItemNode; -import org.sleuthkit.autopsy.datamodel.DisplayableItemNodeVisitor; -import org.sleuthkit.autopsy.datamodel.FileNode; -import org.sleuthkit.autopsy.ingest.IngestManager; -import org.sleuthkit.autopsy.coreutils.ExecUtil; -import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.SleuthkitCase; -import org.sleuthkit.datamodel.TskCoreException; - -@ActionID(category = "Tools", id = "org.sleuthkit.autopsy.timeline.Timeline") -@ActionRegistration(displayName = "#CTL_MakeTimeline", lazy = false) -@ActionReferences(value = { - @ActionReference(path = "Menu/Tools", position = 100)}) -@NbBundle.Messages(value = "CTL_TimelineView=Generate Timeline") -/** - * The Timeline Action entry point. Collects data and pushes data to javafx - * widgets - * - */ -public class Timeline extends CallableSystemAction implements Presenter.Toolbar, PropertyChangeListener { - - private static final Logger logger = Logger.getLogger(Timeline.class.getName()); - private final java.io.File macRoot = InstalledFileLocator.getDefault().locate("mactime", Timeline.class.getPackage().getName(), false); - private TimelineFrame mainFrame; //frame for holding all the elements - private Group fxGroupCharts; //Orders the charts - private Scene fxSceneCharts; //Displays the charts - private HBox fxHBoxCharts; //Holds the navigation buttons in horiztonal fashion. - private VBox fxVBox; //Holds the JavaFX Elements in vertical fashion. - private JFXPanel fxPanelCharts; //FX panel to hold the group - private BarChart fxChartEvents; //Yearly/Monthly events - Bar chart - private ScrollPane fxScrollEvents; //Scroll Panes for dealing with oversized an oversized chart - private static final int FRAME_HEIGHT = 700; //Sizing constants - private static final int FRAME_WIDTH = 1200; - private Button fxZoomOutButton; //Navigation buttons - private ComboBox fxDropdownSelectYears; //Dropdown box for selecting years. Useful when the charts' scale means some years are unclickable, despite having events. - private final Stack> fxStackPrevCharts = new Stack>(); //Stack for storing drill-up information. - private BarChart fxChartTopLevel; //the topmost chart, used for resetting to default view. - private DataResultPanel dataResultPanel; - private DataContentPanel dataContentPanel; - private ProgressHandle progress; - private java.io.File moduleDir; - private String mactimeFileName; - private List data; - private boolean listeningToAddImage = false; - private long lastObjectId = -1; - private TimelineProgressDialog progressDialog; - private EventHandler fxMouseEnteredListener; - private EventHandler fxMouseExitedListener; - private SleuthkitCase skCase; - private boolean fxInited = false; - - public Timeline() { - super(); - - fxInited = Installer.isJavaFxInited(); - - } - - //Swing components and JavafX components don't play super well together - //Swing components need to be initialized first, in the swing specific thread - //Next, the javafx components may be initialized. - private void customize() { - - //listeners - fxMouseEnteredListener = new EventHandler() { - @Override - public void handle(MouseEvent e) { - fxPanelCharts.setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR)); - } - }; - fxMouseExitedListener = new EventHandler() { - @Override - public void handle(MouseEvent e) { - fxPanelCharts.setCursor(null); - } - }; - - SwingUtilities.invokeLater(new Runnable() { - @Override - public void run() { - //Making the main frame * - - mainFrame = new TimelineFrame(); - mainFrame.setFrameName(Case.getCurrentCase().getName() + " - Autopsy Timeline (Beta)"); - - //use the same icon on jframe as main application - mainFrame.setIconImage(WindowManager.getDefault().getMainWindow().getIconImage()); - mainFrame.setFrameSize(new Dimension(FRAME_WIDTH, FRAME_HEIGHT)); //(Width, Height) - - - dataContentPanel = DataContentPanel.createInstance(); - //dataContentPanel.setAlignmentX(Component.RIGHT_ALIGNMENT); - //dataContentPanel.setPreferredSize(new Dimension(FRAME_WIDTH, (int) (FRAME_HEIGHT * 0.4))); - - dataResultPanel = DataResultPanel.createInstance("Timeline Results", "", Node.EMPTY, 0, dataContentPanel); - dataResultPanel.setContentViewer(dataContentPanel); - //dataResultPanel.setAlignmentX(Component.LEFT_ALIGNMENT); - //dataResultPanel.setPreferredSize(new Dimension((int)(FRAME_WIDTH * 0.5), (int) (FRAME_HEIGHT * 0.5))); - logger.log(Level.INFO, "Successfully created viewers"); - - mainFrame.setBottomLeftPanel(dataResultPanel); - mainFrame.setBottomRightPanel(dataContentPanel); - - runJavaFxThread(); - } - }); - - - } - - private void runJavaFxThread() { - //JavaFX thread - //JavaFX components MUST be run in the JavaFX thread, otherwise massive amounts of exceptions will be thrown and caught. Liable to freeze up and crash. - //Components can be declared whenever, but initialization and manipulation must take place here. - Platform.runLater(new Runnable() { - @Override - public void run() { - try { - // start the progress bar - progress = ProgressHandleFactory.createHandle("Creating timeline . . ."); - progress.start(); - - fxChartEvents = null; //important to reset old data - fxPanelCharts = new JFXPanel(); - fxGroupCharts = new Group(); - fxSceneCharts = new Scene(fxGroupCharts, FRAME_WIDTH, FRAME_HEIGHT * 0.6); //Width, Height - fxVBox = new VBox(5); - fxVBox.setAlignment(Pos.BOTTOM_CENTER); - fxHBoxCharts = new HBox(10); - fxHBoxCharts.setAlignment(Pos.BOTTOM_CENTER); - - //Initializing default values for the scroll pane - fxScrollEvents = new ScrollPane(); - fxScrollEvents.setPrefSize(FRAME_WIDTH, FRAME_HEIGHT * 0.6); //Width, Height - fxScrollEvents.setContent(null); //Needs some content, otherwise it crashes - - // set up moduleDir - moduleDir = new java.io.File(Case.getCurrentCase().getModulesOutputDirAbsPath() + java.io.File.separator + "timeline"); - if (!moduleDir.exists()) { - moduleDir.mkdir(); - } - - int currentProgress = 0; - java.io.File mactimeFile = new java.io.File(moduleDir, mactimeFileName); - if (!mactimeFile.exists()) { - progressDialog.setProgressTotal(3); //total 3 units - logger.log(Level.INFO, "Creating body file"); - progressDialog.updateProgressBar("Generating Bodyfile"); - String bodyFilePath = makeBodyFile(); - progressDialog.updateProgressBar(++currentProgress); - logger.log(Level.INFO, "Creating mactime file: " + mactimeFile.getAbsolutePath()); - progressDialog.updateProgressBar("Generating Mactime"); - makeMacTime(bodyFilePath); - progressDialog.updateProgressBar(++currentProgress); - data = null; - } else { - progressDialog.setProgressTotal(1); //total 1 units - logger.log(Level.INFO, "Mactime file already exists; parsing that: " + mactimeFile.getAbsolutePath()); - } - - - progressDialog.updateProgressBar("Parsing Mactime"); - if (data == null) { - logger.log(Level.INFO, "Parsing mactime file: " + mactimeFile.getAbsolutePath()); - data = parseMacTime(mactimeFile); //The sum total of the mactime parsing. YearEpochs contain everything you need to make a timeline. - } - progressDialog.updateProgressBar(++currentProgress); - - //Making a dropdown box to select years. - List lsi = new ArrayList(); //List is in the format of {Year : Number of Events}, used for selecting from the dropdown. - for (YearEpoch ye : data) { - lsi.add(ye.year + " : " + ye.getNumFiles()); - } - ObservableList listSelect = FXCollections.observableArrayList(lsi); - fxDropdownSelectYears = new ComboBox(listSelect); - - //Buttons for navigating up and down the timeline - fxZoomOutButton = new Button("Zoom Out"); - fxZoomOutButton.setOnAction(new EventHandler() { - @Override - public void handle(ActionEvent e) { - BarChart bc; - if (fxStackPrevCharts.size() == 0) { - bc = fxChartTopLevel; - } else { - bc = fxStackPrevCharts.pop(); - } - fxChartEvents = bc; - fxScrollEvents.setContent(fxChartEvents); - } - }); - - fxDropdownSelectYears.getSelectionModel().selectedItemProperty().addListener(new ChangeListener() { - @Override - public void changed(ObservableValue ov, String t, String t1) { - if (fxDropdownSelectYears.getValue() != null) { - mainFrame.setTopComponentCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); - try { - fxChartEvents = createMonthsWithDrill(findYear(data, Integer.valueOf(fxDropdownSelectYears.getValue().split(" ")[0]))); - fxScrollEvents.setContent(fxChartEvents); - } finally { - mainFrame.setTopComponentCursor(null); - } - } - } - }); - - //Adding things to the V and H boxes. - //hBox_Charts stores the pseudo menu bar at the top of the timeline. |Zoom Out|View Year: [Select Year]|â–º| - fxHBoxCharts.getChildren().addAll(fxZoomOutButton, new Label("Go To:"), fxDropdownSelectYears); - fxVBox.getChildren().addAll(fxHBoxCharts, fxScrollEvents); //FxBox_V holds things in a visual stack. - fxGroupCharts.getChildren().add(fxVBox); //Adding the FxBox to the group. Groups make things easier to manipulate without having to update a hundred things every change. - fxPanelCharts.setScene(fxSceneCharts); - - - fxPanelCharts.setAlignmentX(Component.LEFT_ALIGNMENT); - - fxChartTopLevel = createYearChartWithDrill(data); - fxChartEvents = fxChartTopLevel; - fxScrollEvents.setContent(fxChartEvents); - - EventQueue.invokeLater(new Runnable() { - @Override - public void run() { - mainFrame.setTopPanel(fxPanelCharts); - dataResultPanel.open(); - //mainFrame.pack(); - mainFrame.setVisible(true); - } - }); - } finally { - // stop the progress bar - progress.finish(); - - // close the progressDialog - progressDialog.doClose(0); - } - } - }); - } - - /** - * Creates a BarChart with datapoints for all the years from the parsed - * mactime file. - * - * @param allYears The list of years that have barData from the mactime file - * @return BarChart scaled to the year level - */ - private BarChart createYearChartWithDrill(final List allYears) { - final CategoryAxis xAxis = new CategoryAxis(); //Axes are very specific types. Categorys are strings. - final NumberAxis yAxis = new NumberAxis(); - final Label l = new Label(""); - l.setStyle("-fx-font: 24 arial;"); - l.setTextFill(Color.AZURE); - xAxis.setLabel("Years"); - yAxis.setLabel("Number of Events"); - //Charts are made up of individual pieces of Chart.Data. In this case, a piece of barData is a single bar on the graph. - //Data is packaged into a series, which can be assigned custom colors or styling - //After the series are created, 1 or more series are packaged into a single chart. - ObservableList> bcData = FXCollections.observableArrayList(); - BarChart.Series se = new BarChart.Series(); - if (allYears != null) { - for (final YearEpoch ye : allYears) { - se.getData().add(new BarChart.Data(String.valueOf(ye.year), ye.getNumFiles())); - } - } - bcData.add(se); - - - //Note: - // BarChart.Data wraps the Java Nodes class. BUT, until a BarChart.Data gets added to an actual series, it's node is null, and you can perform no operations on it. - // When the Data is added to a series(or a chart? I am unclear on where), a node is automaticaly generated for it, after which you can perform any of the operations it offers. - // In addtion, you are free to set the node to whatever you want. It wraps the most generic Node class. - // But it is for this reason that the chart generating functions have two forloops. I do not believe they can be condensed into a single loop due to the nodes being null until - // an undetermined point in time. - BarChart bc = new BarChart(xAxis, yAxis, bcData); - for (final BarChart.Data barData : bc.getData().get(0).getData()) { //.get(0) refers to the BarChart.Series class to work on. There is only one series in this graph, so get(0) is safe. - barData.getNode().setScaleX(.5); - - final javafx.scene.Node barNode = barData.getNode(); - //hover listener - barNode.addEventHandler(MouseEvent.MOUSE_ENTERED_TARGET, fxMouseEnteredListener); - barNode.addEventHandler(MouseEvent.MOUSE_EXITED_TARGET, fxMouseExitedListener); - - //click listener - barNode.addEventHandler(MouseEvent.MOUSE_CLICKED, - new EventHandler() { - @Override - public void handle(MouseEvent e) { - if (e.getButton().equals(MouseButton.PRIMARY)) { - if (e.getClickCount() == 1) { - Platform.runLater(new Runnable() { - @Override - public void run() { - BarChart b = - createMonthsWithDrill(findYear(allYears, Integer.valueOf(barData.getXValue()))); - fxChartEvents = b; - fxScrollEvents.setContent(fxChartEvents); - } - }); - - } - } - } - }); - } - - bc.autosize(); //Get an auto height - bc.setPrefWidth(FRAME_WIDTH); //but override the width - bc.setLegendVisible(false); //The legend adds too much extra chart space, it's not necessary. - return bc; - } - - /* - * Displays a chart with events from one year only, separated into 1-month chunks. - * Always 12 per year, empty months are represented by no bar. - */ - private BarChart createMonthsWithDrill(final YearEpoch ye) { - - final CategoryAxis xAxis = new CategoryAxis(); - final NumberAxis yAxis = new NumberAxis(); - xAxis.setLabel("Month (" + ye.year + ")"); - yAxis.setLabel("Number of Events"); - ObservableList> bcData = FXCollections.observableArrayList(); - - BarChart.Series se = new BarChart.Series(); - for (int monthNum = 0; monthNum < 12; ++monthNum) { - String monthName = new DateFormatSymbols().getMonths()[monthNum]; - MonthEpoch month = ye.getMonth(monthNum); - int numEvents = month == null ? 0 : month.getNumFiles(); - se.getData().add(new BarChart.Data(monthName, numEvents)); //Adding new barData at {X-pos, Y-Pos} - } - bcData.add(se); - final BarChart bc = new BarChart(xAxis, yAxis, bcData); - - for (int i = 0; i < 12; i++) { - for (final BarChart.Data barData : bc.getData().get(0).getData()) { - //Note: - // All the charts of this package have a problem where when the chart gets below a certain pixel ratio, the barData stops drawing. The axes and the labels remain, - // But the actual chart barData is invisible, unclickable, and unrendered. To partially compensate for that, barData.getNode() can be manually scaled up to increase visibility. - // Sometimes I've had it jacked up to as much as x2400 just to see a sliver of information. - // But that doesn't work all the time. Adding it to a scrollpane and letting the user scroll up and down to view the chart is the other workaround. Both of these fixes suck. - final javafx.scene.Node barNode = barData.getNode(); - barNode.setScaleX(.5); - - //hover listener - barNode.addEventHandler(MouseEvent.MOUSE_ENTERED_TARGET, fxMouseEnteredListener); - barNode.addEventHandler(MouseEvent.MOUSE_EXITED_TARGET, fxMouseExitedListener); - - //clicks - barNode.addEventHandler(MouseEvent.MOUSE_PRESSED, - new EventHandler() { - @Override - public void handle(MouseEvent e) { - if (e.getButton().equals(MouseButton.PRIMARY)) { - if (e.getClickCount() == 1) { - Platform.runLater(new Runnable() { - @Override - public void run() { - fxChartEvents = createEventsByMonth(findMonth(ye.months, monthStringToInt(barData.getXValue())), ye); - fxScrollEvents.setContent(fxChartEvents); - } - }); - } - } - } - }); - } - } - - bc.autosize(); - bc.setPrefWidth(FRAME_WIDTH); - bc.setLegendVisible(false); - fxStackPrevCharts.push(bc); - return bc; - } - - - /* - * Displays a chart with events from one month only. - * Up to 31 days per month, as low as 28 as determined by the specific MonthEpoch - */ - private BarChart createEventsByMonth(final MonthEpoch me, final YearEpoch ye) { - final CategoryAxis xAxis = new CategoryAxis(); - final NumberAxis yAxis = new NumberAxis(); - xAxis.setLabel("Day of Month"); - yAxis.setLabel("Number of Events"); - ObservableList> bcData = makeObservableListByMonthAllDays(me, ye.getYear()); - BarChart.Series series = new BarChart.Series(bcData); - series.setName(me.getMonthName() + " " + ye.getYear()); - - - ObservableList> ol = - FXCollections.>observableArrayList(series); - - final BarChart bc = new BarChart(xAxis, yAxis, ol); - for (final BarChart.Data barData : bc.getData().get(0).getData()) { - //data.getNode().setScaleX(2); - - final javafx.scene.Node barNode = barData.getNode(); - - //hover listener - barNode.addEventHandler(MouseEvent.MOUSE_ENTERED_TARGET, fxMouseEnteredListener); - barNode.addEventHandler(MouseEvent.MOUSE_EXITED_TARGET, fxMouseExitedListener); - - barNode.addEventHandler(MouseEvent.MOUSE_PRESSED, - new EventHandler() { - MonthEpoch myme = me; - - @Override - public void handle(MouseEvent e) { - SwingUtilities.invokeLater(new Runnable() { - @Override - public void run() { - //reset the view and free the current nodes before loading new ones - final FileRootNode d = new FileRootNode("Empty Root", new ArrayList()); - dataResultPanel.setNode(d); - dataResultPanel.setPath("Loading..."); - } - }); - final int day = (Integer.valueOf((barData.getXValue()).split("-")[1])); - final DayEpoch de = myme.getDay(day); - final List afs; - if (de != null) { - afs = de.getEvents(); - } else { - logger.log(Level.SEVERE, "There were no events for the clicked-on day: " + day); - return; - } - - SwingUtilities.invokeLater(new Runnable() { - @Override - public void run() { - final FileRootNode d = new FileRootNode("Root", afs); - dataResultPanel.setNode(d); - //set result viewer title path with the current date - String dateString = ye.getYear() + "-" + (1 + me.getMonthInt()) + "-" + +de.dayNum; - dataResultPanel.setPath(dateString); - } - }); - - - } - }); - } - bc.autosize(); - bc.setPrefWidth(FRAME_WIDTH); - return bc; - } - - private static ObservableList> makeObservableListByMonthAllDays(final MonthEpoch me, int year) { - ObservableList> bcData = FXCollections.observableArrayList(); - int totalDays = me.getTotalNumDays(year); - for (int i = 1; i <= totalDays; ++i) { - DayEpoch day = me.getDay(i); - int numFiles = day == null ? 0 : day.getNumFiles(); - BarChart.Data d = new BarChart.Data(me.month + 1 + "-" + i, numFiles); - d.setExtraValue(me); - bcData.add(d); - } - return bcData; - } - - /* - * Section for Utility functions - */ - /** - * - * @param mon The month to convert. Must be minimum 4 characters long - * "February" and "Febr" are acceptable. - * @return The integer value of the month. February = 1, July = 6 - */ - private static int monthStringToInt(String mon) { - try { - Date date = new SimpleDateFormat("MMMM", Locale.ENGLISH).parse(mon); - Calendar cal = Calendar.getInstance(); - cal.setTime(date); - return cal.get(Calendar.MONTH); - } catch (ParseException ex) { - logger.log(Level.WARNING, "Unable to convert string " + mon + " to integer", ex); - return -1; - } - } - - /** - * Used for finding the proper month in a list of available months - * - * @param lst The list of months to search through. It is assumed that the - * desired match is in this list. - * @param match The month, in integer format, to retrieve. - * @return The month epoch as specified by match. - */ - private static MonthEpoch findMonth(List lst, int match) { - for (MonthEpoch e : lst) { - if (e.month == match) { - return e; - } - } - return null; - } - - /** - * Used for finding the proper year in a list of available years - * - * @param lst The list of years to search through. It is assumed that the - * desired match is in this list. - * @param match The year to retrieve. - * @return The year epoch as specified by match. - */ - private static YearEpoch findYear(List lst, int match) { - for (YearEpoch e : lst) { - if (e.year == match) { - return e; - } - } - return null; - } - - @Override - public void propertyChange(PropertyChangeEvent evt) { - String prop = evt.getPropertyName(); - if (prop.equals(Case.CASE_ADD_DATA_SOURCE)) { - if (mainFrame != null && !mainFrame.isVisible()) { - // change the lastObjectId to trigger a reparse of mactime barData - ++lastObjectId; - return; - } - - int answer = JOptionPane.showConfirmDialog(mainFrame, "Timeline is out of date. Would you like to regenerate it?", "Select an option", JOptionPane.YES_NO_OPTION); - if (answer != JOptionPane.YES_OPTION) { - return; - } - - clearMactimeData(); - - // call performAction as if the user selected 'Make Timeline' from the menu - performAction(); - } else if (prop.equals(Case.CASE_CURRENT_CASE)) { - if (mainFrame != null && mainFrame.isVisible()) { - mainFrame.dispose(); - mainFrame = null; - } - - data = null; - } - } - - private void clearMactimeData() { - // get rid of the old barData - data = null; - - // get rid of the mactime file - java.io.File mactimeFile = new java.io.File(moduleDir, mactimeFileName); - mactimeFile.delete(); - - // close the jframe - if (mainFrame != null) { - mainFrame.setVisible(false); - mainFrame.dispose(); - mainFrame = null; - } - - // remove ourself as change listener on Case - Case.removePropertyChangeListener(this); - listeningToAddImage = false; - - } - - /* - * The backbone of the timeline functionality, years are split into months, months into days, and days contain the events of that given day. - * All of those are Epochs. - */ - abstract class Epoch { - - abstract public int getNumFiles(); - } - - private class YearEpoch extends Epoch { - - private int year; - private List months = new ArrayList<>(); - - YearEpoch(int year) { - this.year = year; - } - - public int getYear() { - return year; - } - - @Override - public int getNumFiles() { - int size = 0; - for (MonthEpoch me : months) { - size += me.getNumFiles(); - } - return size; - } - - public MonthEpoch getMonth(int monthNum) { - MonthEpoch month = null; - for (MonthEpoch me : months) { - if (me.getMonthInt() == monthNum) { - month = me; - break; - } - } - return month; - } - - public void add(long fileId, int month, int day) { - // see if this month is in the list - MonthEpoch monthEpoch = null; - for (MonthEpoch me : months) { - if (me.getMonthInt() == month) { - monthEpoch = me; - break; - } - } - - if (monthEpoch == null) { - monthEpoch = new MonthEpoch(month); - months.add(monthEpoch); - } - - // add the file the the MonthEpoch object - monthEpoch.add(fileId, day); - } - } - - private class MonthEpoch extends Epoch { - - private int month; //Zero-indexed: June = 5, August = 7, etc - private List days = new ArrayList<>(); //List of DayEpochs in this month, max 31 - - MonthEpoch(int month) { - this.month = month; - } - - public int getMonthInt() { - return month; - } - - public int getTotalNumDays(int year) { - Calendar cal = Calendar.getInstance(); - cal.set(year, month, 1); - return cal.getActualMaximum(Calendar.DAY_OF_MONTH); - } - - @Override - public int getNumFiles() { - int numFiles = 0; - for (DayEpoch de : days) { - numFiles += de.getNumFiles(); - } - return numFiles; - } - - public DayEpoch getDay(int dayNum) { - DayEpoch de = null; - for (DayEpoch d : days) { - if (d.dayNum == dayNum) { - de = d; - break; - } - } - return de; - } - - public void add(long fileId, int day) { - DayEpoch dayEpoch = null; - for (DayEpoch de : days) { - if (de.getDayInt() == day) { - dayEpoch = de; - break; - } - } - - if (dayEpoch == null) { - dayEpoch = new DayEpoch(day); - days.add(dayEpoch); - } - - dayEpoch.add(fileId); - } - - /** - * Returns the month's name in String format, e.g., September, July, - */ - String getMonthName() { - return new DateFormatSymbols().getMonths()[month]; - } - - /** - * @return the list of days in this month - */ - List getDays() { - return this.days; - } - } - - private class DayEpoch extends Epoch { - - private final List fileIds = new ArrayList<>(); - int dayNum = 0; //Day of the month this Epoch represents, 1 indexed: 28=28. - - DayEpoch(int dayOfMonth) { - this.dayNum = dayOfMonth; - } - - public int getDayInt() { - return dayNum; - } - - @Override - public int getNumFiles() { - return fileIds.size(); - } - - public void add(long fileId) { - fileIds.add(fileId); - } - - List getEvents() { - return this.fileIds; - } - } - - // The node factories used to make lists of files to send to the result viewer - // using the lazy loading (rather than background) loading option to facilitate - // loading a huge number of nodes for the given day - private class FileNodeChildFactory extends Children.Keys { - - private List fileIds; - - FileNodeChildFactory(List fileIds) { - super(true); - this.fileIds = fileIds; - } - - @Override - protected void addNotify() { - super.addNotify(); - setKeys(fileIds); - } - - @Override - protected void removeNotify() { - super.removeNotify(); - setKeys(new ArrayList()); - } - - @Override - protected Node[] createNodes(Long t) { - return new Node[]{createNodeForKey(t)}; - } - - // @Override - // protected boolean createKeys(List list) { - // list.addAll(fileIds); - // return true; - // } - //@Override - protected Node createNodeForKey(Long fileId) { - AbstractFile af = null; - try { - af = skCase.getAbstractFileById(fileId); - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error getting file by id and creating a node in Timeline: " + fileId, ex); - //no node will be shown for this object - return null; - } - - Node wrapped; - if (af.isDir()) { - wrapped = new DirectoryNode(af, false); - } else { - wrapped = new FileNode(af, false); - } - return new FilterNodeLeaf(wrapped); - } - } - - private class FileRootNode extends DisplayableItemNode { - - FileRootNode(String NAME, List fileIds) { - //super(Children.create(new FileNodeChildFactory(fileIds), true)); - super(new FileNodeChildFactory(fileIds), Lookups.singleton(fileIds)); - super.setName(NAME); - super.setDisplayName(NAME); - } - - @Override - public DisplayableItemNode.TYPE getDisplayableItemNodeType() { - return DisplayableItemNode.TYPE.CONTENT; - } - - @Override - public T accept(DisplayableItemNodeVisitor v) { - return null; - } - } - - private List parseMacTime(java.io.File f) { - List years = new ArrayList<>(); - Scanner scan; - try { - scan = new Scanner(new FileInputStream(f)); - } catch (FileNotFoundException ex) { - logger.log(Level.SEVERE, "Error: could not find mactime file.", ex); - return years; - } - scan.useDelimiter(","); - scan.nextLine(); // skip the header line - - int prevYear = -1; - YearEpoch ye = null; - while (scan.hasNextLine()) { - String[] s = scan.nextLine().split(","); //1999-02-08T11:08:08Z, 78706, m..b, rrwxrwxrwx, 0, 0, 8355, /img... - String[] datetime = s[0].split("T"); //{1999-02-08, 11:08:08Z} - String[] date = datetime[0].split("-"); // {1999, 02, 08} - int year = Integer.valueOf(date[0]); - int month = Integer.valueOf(date[1]) - 1; //Months are zero indexed: 1 = February, 6 = July, 11 = December - int day = Integer.valueOf(date[2]); //Days are 1 indexed - long ObjId = Long.valueOf(s[4]); - - // when the year changes, create and add a new YearEpoch object to the list - if (year != prevYear) { - ye = new YearEpoch(year); - years.add(ye); - prevYear = year; - } - - if (ye != null) { - ye.add(ObjId, month, day); - } - } - - scan.close(); - - return years; - } - - /** - * Crate a body file and return its path or null if error - * - * @return absolute path string or null if error - */ - private String makeBodyFile() { - // Setup timestamp - DateFormat dateFormat = new SimpleDateFormat("MM-dd-yyyy-HH-mm-ss"); - Date date = new Date(); - String datenotime = dateFormat.format(date); - - final Case currentCase = Case.getCurrentCase(); - - // Get report path - String bodyFilePath = moduleDir.getAbsolutePath() - + java.io.File.separator + currentCase.getName() + "-" + datenotime + ".txt"; - - // Run query to get all files - final String filesAndDirs = "name != '.' " - + "AND name != '..'"; - List fileIds = null; - try { - fileIds = skCase.findAllFileIdsWhere(filesAndDirs); - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error querying image files to make a body file: " + bodyFilePath, ex); - return null; - } - - // Loop files and write info to report - FileWriter fileWriter = null; - try { - fileWriter = new FileWriter(bodyFilePath, true); - } catch (IOException ex) { - logger.log(Level.SEVERE, "Error creating output stream to write body file to: " + bodyFilePath, ex); - return null; - } - - BufferedWriter out = null; - try { - out = new BufferedWriter(fileWriter); - for (long fileId : fileIds) { - AbstractFile file = skCase.getAbstractFileById(fileId); - // try { - // MD5|name|inode|mode_as_string|ObjId|GID|size|atime|mtime|ctime|crtime - if (file.getMd5Hash() != null) { - out.write(file.getMd5Hash()); - } - out.write("|"); - String path = null; - try { - path = file.getUniquePath(); - } catch (TskCoreException e) { - logger.log(Level.SEVERE, "Failed to get the unique path of: " + file + " and writing body file.", e); - return null; - } - - out.write(path); - - out.write("|"); - out.write(Long.toString(file.getMetaAddr())); - out.write("|"); - String modeString = file.getModesAsString(); - if (modeString != null) { - out.write(modeString); - } - out.write("|"); - out.write(Long.toString(file.getId())); - out.write("|"); - out.write(Long.toString(file.getGid())); - out.write("|"); - out.write(Long.toString(file.getSize())); - out.write("|"); - out.write(Long.toString(file.getAtime())); - out.write("|"); - out.write(Long.toString(file.getMtime())); - out.write("|"); - out.write(Long.toString(file.getCtime())); - out.write("|"); - out.write(Long.toString(file.getCrtime())); - out.write("\n"); - } - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error querying file by id", ex); - return null; - - } catch (IOException ex) { - logger.log(Level.WARNING, "Error while trying to write data to the body file.", ex); - return null; - } finally { - if (out != null) { - try { - out.flush(); - out.close(); - } catch (IOException ex1) { - logger.log(Level.WARNING, "Could not flush and/or close body file.", ex1); - } - } - } - - - return bodyFilePath; - } - - private String makeMacTime(String pathToBodyFile) { - String cmdpath = ""; - String macpath = ""; - String[] mactimeArgs; - final String machome = macRoot.getAbsolutePath(); - pathToBodyFile = PlatformUtil.getOSFilePath(pathToBodyFile); - if (PlatformUtil.isWindowsOS()) { - macpath = machome + java.io.File.separator + "mactime.exe"; - cmdpath = PlatformUtil.getOSFilePath(macpath); - mactimeArgs = new String[]{"-b", pathToBodyFile, "-d", "-y"}; - } else { - cmdpath = "perl"; - macpath = machome + java.io.File.separator + "mactime.pl"; - mactimeArgs = new String[]{macpath, "-b", pathToBodyFile, "-d", "-y"}; - } - - String macfile = moduleDir.getAbsolutePath() + java.io.File.separator + mactimeFileName; - - - String output = ""; - ExecUtil execUtil = new ExecUtil(); - Writer writer = null; - try { - //JavaSystemCaller.Exec.execute("\"" + command + "\""); - writer = new FileWriter(macfile); - execUtil.execute(writer, cmdpath, mactimeArgs); - } catch (InterruptedException ie) { - logger.log(Level.WARNING, "Mactime process was interrupted by user", ie); - return null; - } catch (IOException ioe) { - logger.log(Level.SEVERE, "Could not create mactime file, encountered error ", ioe); - return null; - } finally { - if (writer != null) { - try { - writer.close(); - } catch (IOException ex) { - logger.log(Level.SEVERE, "Could not clsoe writer after creating mactime file, encountered error ", ex); - } - } - } - - return macfile; - } - - @Override - public boolean isEnabled() { - return Case.isCaseOpen() && this.fxInited; - } - - @Override - public void performAction() { - initTimeline(); - } - - private void initTimeline() { - if (!Case.existsCurrentCase()) { - return; - } - - final Case currentCase = Case.getCurrentCase(); - skCase = currentCase.getSleuthkitCase(); - - try { - if (currentCase.getRootObjectsCount() == 0) { - logger.log(Level.INFO, "Error creating timeline, there are no data sources. "); - } else { - - if (IngestManager.getDefault().isIngestRunning()) { - int answer = JOptionPane.showConfirmDialog(new JFrame(), - "You are trying to generate a timeline before " - + "ingest has been completed. The timeline may be " - + "incomplete. Do you want to continue?", "Timeline", - JOptionPane.YES_NO_OPTION); - if (answer != JOptionPane.YES_OPTION) { - return; - } - } - - logger.log(Level.INFO, "Beginning generation of timeline"); - - // if the timeline window is already open, bring to front and do nothing - if (mainFrame != null && mainFrame.isVisible()) { - mainFrame.toFront(); - return; - } - - // listen for case changes (specifically images being added). - if (Case.isCaseOpen() && !listeningToAddImage) { - Case.addPropertyChangeListener(this); - listeningToAddImage = true; - } - - // create the modal progressDialog - SwingUtilities.invokeLater(new Runnable() { - @Override - public void run() { - progressDialog = new TimelineProgressDialog(WindowManager.getDefault().getMainWindow(), true); - progressDialog.setVisible(true); - } - }); - - // initialize mactimeFileName - mactimeFileName = currentCase.getName() + "-MACTIME.txt"; - - // see if barData has been added to the database since the last - // time timeline ran - long objId = skCase.getLastObjectId(); - if (objId != lastObjectId && lastObjectId != -1) { - clearMactimeData(); - } - lastObjectId = objId; - - customize(); - } - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error when generating timeline, ", ex); - } catch (Exception ex) { - logger.log(Level.SEVERE, "Unexpected error when generating timeline, ", ex); - } - } - - @Override - public String getName() { - return "Make Timeline (Beta)"; - } - - @Override - public HelpCtx getHelpCtx() { - return HelpCtx.DEFAULT_HELP; - } - - @Override - public boolean asynchronous() { - return false; - } -} +/* + * Autopsy Forensic Browser + * + * Copyright 2013 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.timeline; + +import java.awt.Component; +import java.awt.Cursor; +import java.awt.Dimension; +import java.awt.EventQueue; +import java.beans.PropertyChangeEvent; +import java.beans.PropertyChangeListener; +import java.io.BufferedWriter; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.FileWriter; +import java.io.IOException; +import java.io.Writer; +import java.text.DateFormat; +import java.text.DateFormatSymbols; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Calendar; +import java.util.Date; +import java.util.List; +import java.util.Locale; +import java.util.Scanner; +import java.util.Stack; +import java.util.logging.Level; +import javafx.application.Platform; +import javafx.beans.value.ChangeListener; +import javafx.beans.value.ObservableValue; +import javafx.collections.FXCollections; +import javafx.collections.ObservableList; +import javafx.embed.swing.JFXPanel; +import javafx.event.ActionEvent; +import javafx.event.EventHandler; +import javafx.geometry.Pos; +import javafx.scene.Group; +import javafx.scene.Scene; +import javafx.scene.chart.BarChart; +import javafx.scene.chart.CategoryAxis; +import javafx.scene.chart.NumberAxis; +import javafx.scene.control.Button; +import javafx.scene.control.ComboBox; +import javafx.scene.control.Label; +import javafx.scene.control.ScrollPane; +import javafx.scene.input.MouseButton; +import javafx.scene.input.MouseEvent; +import javafx.scene.layout.HBox; +import javafx.scene.layout.VBox; +import javafx.scene.paint.Color; +import javax.swing.JFrame; +import javax.swing.JOptionPane; +import javax.swing.SwingUtilities; +import org.netbeans.api.progress.ProgressHandle; +import org.netbeans.api.progress.ProgressHandleFactory; +import org.openide.awt.ActionID; +import org.openide.awt.ActionReference; +import org.openide.awt.ActionReferences; +import org.openide.awt.ActionRegistration; +import org.openide.modules.InstalledFileLocator; +import org.openide.modules.ModuleInstall; +import org.openide.nodes.Children; +import org.openide.nodes.Node; +import org.openide.util.HelpCtx; +import org.openide.util.NbBundle; +import org.openide.util.actions.CallableSystemAction; +import org.openide.util.actions.Presenter; +import org.openide.util.lookup.Lookups; +import org.openide.windows.WindowManager; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.core.Installer; +import org.sleuthkit.autopsy.corecomponents.DataContentPanel; +import org.sleuthkit.autopsy.corecomponents.DataResultPanel; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.coreutils.PlatformUtil; +import org.sleuthkit.autopsy.datamodel.FilterNodeLeaf; +import org.sleuthkit.autopsy.datamodel.DirectoryNode; +import org.sleuthkit.autopsy.datamodel.DisplayableItemNode; +import org.sleuthkit.autopsy.datamodel.DisplayableItemNodeVisitor; +import org.sleuthkit.autopsy.datamodel.FileNode; +import org.sleuthkit.autopsy.ingest.IngestManager; +import org.sleuthkit.autopsy.coreutils.ExecUtil; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.SleuthkitCase; +import org.sleuthkit.datamodel.TskCoreException; + +@ActionID(category = "Tools", id = "org.sleuthkit.autopsy.timeline.Timeline") +@ActionRegistration(displayName = "#CTL_MakeTimeline", lazy = false) +@ActionReferences(value = { + @ActionReference(path = "Menu/Tools", position = 100)}) +@NbBundle.Messages(value = "CTL_TimelineView=Generate Timeline") +/** + * The Timeline Action entry point. Collects data and pushes data to javafx + * widgets + * + */ +public class Timeline extends CallableSystemAction implements Presenter.Toolbar, PropertyChangeListener { + + private static final Logger logger = Logger.getLogger(Timeline.class.getName()); + private final java.io.File macRoot = InstalledFileLocator.getDefault().locate("mactime", Timeline.class.getPackage().getName(), false); + private TimelineFrame mainFrame; //frame for holding all the elements + private Group fxGroupCharts; //Orders the charts + private Scene fxSceneCharts; //Displays the charts + private HBox fxHBoxCharts; //Holds the navigation buttons in horiztonal fashion. + private VBox fxVBox; //Holds the JavaFX Elements in vertical fashion. + private JFXPanel fxPanelCharts; //FX panel to hold the group + private BarChart fxChartEvents; //Yearly/Monthly events - Bar chart + private ScrollPane fxScrollEvents; //Scroll Panes for dealing with oversized an oversized chart + private static final int FRAME_HEIGHT = 700; //Sizing constants + private static final int FRAME_WIDTH = 1200; + private Button fxZoomOutButton; //Navigation buttons + private ComboBox fxDropdownSelectYears; //Dropdown box for selecting years. Useful when the charts' scale means some years are unclickable, despite having events. + private final Stack> fxStackPrevCharts = new Stack>(); //Stack for storing drill-up information. + private BarChart fxChartTopLevel; //the topmost chart, used for resetting to default view. + private DataResultPanel dataResultPanel; + private DataContentPanel dataContentPanel; + private ProgressHandle progress; + private java.io.File moduleDir; + private String mactimeFileName; + private List data; + private boolean listeningToAddImage = false; + private long lastObjectId = -1; + private TimelineProgressDialog progressDialog; + private EventHandler fxMouseEnteredListener; + private EventHandler fxMouseExitedListener; + private SleuthkitCase skCase; + private boolean fxInited = false; + + public Timeline() { + super(); + + fxInited = Installer.isJavaFxInited(); + + } + + //Swing components and JavafX components don't play super well together + //Swing components need to be initialized first, in the swing specific thread + //Next, the javafx components may be initialized. + private void customize() { + + //listeners + fxMouseEnteredListener = new EventHandler() { + @Override + public void handle(MouseEvent e) { + fxPanelCharts.setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR)); + } + }; + fxMouseExitedListener = new EventHandler() { + @Override + public void handle(MouseEvent e) { + fxPanelCharts.setCursor(null); + } + }; + + SwingUtilities.invokeLater(new Runnable() { + @Override + public void run() { + //Making the main frame * + + mainFrame = new TimelineFrame(); + mainFrame.setFrameName(Case.getCurrentCase().getName() + " - Autopsy Timeline (Beta)"); + + //use the same icon on jframe as main application + mainFrame.setIconImage(WindowManager.getDefault().getMainWindow().getIconImage()); + mainFrame.setFrameSize(new Dimension(FRAME_WIDTH, FRAME_HEIGHT)); //(Width, Height) + + + dataContentPanel = DataContentPanel.createInstance(); + //dataContentPanel.setAlignmentX(Component.RIGHT_ALIGNMENT); + //dataContentPanel.setPreferredSize(new Dimension(FRAME_WIDTH, (int) (FRAME_HEIGHT * 0.4))); + + dataResultPanel = DataResultPanel.createInstance("Timeline Results", "", Node.EMPTY, 0, dataContentPanel); + dataResultPanel.setContentViewer(dataContentPanel); + //dataResultPanel.setAlignmentX(Component.LEFT_ALIGNMENT); + //dataResultPanel.setPreferredSize(new Dimension((int)(FRAME_WIDTH * 0.5), (int) (FRAME_HEIGHT * 0.5))); + logger.log(Level.INFO, "Successfully created viewers"); + + mainFrame.setBottomLeftPanel(dataResultPanel); + mainFrame.setBottomRightPanel(dataContentPanel); + + runJavaFxThread(); + } + }); + + + } + + private void runJavaFxThread() { + //JavaFX thread + //JavaFX components MUST be run in the JavaFX thread, otherwise massive amounts of exceptions will be thrown and caught. Liable to freeze up and crash. + //Components can be declared whenever, but initialization and manipulation must take place here. + Platform.runLater(new Runnable() { + @Override + public void run() { + try { + // start the progress bar + progress = ProgressHandleFactory.createHandle("Creating timeline . . ."); + progress.start(); + + fxChartEvents = null; //important to reset old data + fxPanelCharts = new JFXPanel(); + fxGroupCharts = new Group(); + fxSceneCharts = new Scene(fxGroupCharts, FRAME_WIDTH, FRAME_HEIGHT * 0.6); //Width, Height + fxVBox = new VBox(5); + fxVBox.setAlignment(Pos.BOTTOM_CENTER); + fxHBoxCharts = new HBox(10); + fxHBoxCharts.setAlignment(Pos.BOTTOM_CENTER); + + //Initializing default values for the scroll pane + fxScrollEvents = new ScrollPane(); + fxScrollEvents.setPrefSize(FRAME_WIDTH, FRAME_HEIGHT * 0.6); //Width, Height + fxScrollEvents.setContent(null); //Needs some content, otherwise it crashes + + // set up moduleDir + moduleDir = new java.io.File(Case.getCurrentCase().getModulesOutputDirAbsPath() + java.io.File.separator + "timeline"); + if (!moduleDir.exists()) { + moduleDir.mkdir(); + } + + int currentProgress = 0; + java.io.File mactimeFile = new java.io.File(moduleDir, mactimeFileName); + if (!mactimeFile.exists()) { + progressDialog.setProgressTotal(3); //total 3 units + logger.log(Level.INFO, "Creating body file"); + progressDialog.updateProgressBar("Generating Bodyfile"); + String bodyFilePath = makeBodyFile(); + progressDialog.updateProgressBar(++currentProgress); + logger.log(Level.INFO, "Creating mactime file: " + mactimeFile.getAbsolutePath()); + progressDialog.updateProgressBar("Generating Mactime"); + makeMacTime(bodyFilePath); + progressDialog.updateProgressBar(++currentProgress); + data = null; + } else { + progressDialog.setProgressTotal(1); //total 1 units + logger.log(Level.INFO, "Mactime file already exists; parsing that: " + mactimeFile.getAbsolutePath()); + } + + + progressDialog.updateProgressBar("Parsing Mactime"); + if (data == null) { + logger.log(Level.INFO, "Parsing mactime file: " + mactimeFile.getAbsolutePath()); + data = parseMacTime(mactimeFile); //The sum total of the mactime parsing. YearEpochs contain everything you need to make a timeline. + } + progressDialog.updateProgressBar(++currentProgress); + + //Making a dropdown box to select years. + List lsi = new ArrayList(); //List is in the format of {Year : Number of Events}, used for selecting from the dropdown. + for (YearEpoch ye : data) { + lsi.add(ye.year + " : " + ye.getNumFiles()); + } + ObservableList listSelect = FXCollections.observableArrayList(lsi); + fxDropdownSelectYears = new ComboBox(listSelect); + + //Buttons for navigating up and down the timeline + fxZoomOutButton = new Button("Zoom Out"); + fxZoomOutButton.setOnAction(new EventHandler() { + @Override + public void handle(ActionEvent e) { + BarChart bc; + if (fxStackPrevCharts.size() == 0) { + bc = fxChartTopLevel; + } else { + bc = fxStackPrevCharts.pop(); + } + fxChartEvents = bc; + fxScrollEvents.setContent(fxChartEvents); + } + }); + + fxDropdownSelectYears.getSelectionModel().selectedItemProperty().addListener(new ChangeListener() { + @Override + public void changed(ObservableValue ov, String t, String t1) { + if (fxDropdownSelectYears.getValue() != null) { + mainFrame.setTopComponentCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); + try { + fxChartEvents = createMonthsWithDrill(findYear(data, Integer.valueOf(fxDropdownSelectYears.getValue().split(" ")[0]))); + fxScrollEvents.setContent(fxChartEvents); + } finally { + mainFrame.setTopComponentCursor(null); + } + } + } + }); + + //Adding things to the V and H boxes. + //hBox_Charts stores the pseudo menu bar at the top of the timeline. |Zoom Out|View Year: [Select Year]|â–º| + fxHBoxCharts.getChildren().addAll(fxZoomOutButton, new Label("Go To:"), fxDropdownSelectYears); + fxVBox.getChildren().addAll(fxHBoxCharts, fxScrollEvents); //FxBox_V holds things in a visual stack. + fxGroupCharts.getChildren().add(fxVBox); //Adding the FxBox to the group. Groups make things easier to manipulate without having to update a hundred things every change. + fxPanelCharts.setScene(fxSceneCharts); + + + fxPanelCharts.setAlignmentX(Component.LEFT_ALIGNMENT); + + fxChartTopLevel = createYearChartWithDrill(data); + fxChartEvents = fxChartTopLevel; + fxScrollEvents.setContent(fxChartEvents); + + EventQueue.invokeLater(new Runnable() { + @Override + public void run() { + mainFrame.setTopPanel(fxPanelCharts); + dataResultPanel.open(); + //mainFrame.pack(); + mainFrame.setVisible(true); + } + }); + } finally { + // stop the progress bar + progress.finish(); + + // close the progressDialog + progressDialog.doClose(0); + } + } + }); + } + + /** + * Creates a BarChart with datapoints for all the years from the parsed + * mactime file. + * + * @param allYears The list of years that have barData from the mactime file + * @return BarChart scaled to the year level + */ + private BarChart createYearChartWithDrill(final List allYears) { + final CategoryAxis xAxis = new CategoryAxis(); //Axes are very specific types. Categorys are strings. + final NumberAxis yAxis = new NumberAxis(); + final Label l = new Label(""); + l.setStyle("-fx-font: 24 arial;"); + l.setTextFill(Color.AZURE); + xAxis.setLabel("Years"); + yAxis.setLabel("Number of Events"); + //Charts are made up of individual pieces of Chart.Data. In this case, a piece of barData is a single bar on the graph. + //Data is packaged into a series, which can be assigned custom colors or styling + //After the series are created, 1 or more series are packaged into a single chart. + ObservableList> bcData = FXCollections.observableArrayList(); + BarChart.Series se = new BarChart.Series(); + if (allYears != null) { + for (final YearEpoch ye : allYears) { + se.getData().add(new BarChart.Data(String.valueOf(ye.year), ye.getNumFiles())); + } + } + bcData.add(se); + + + //Note: + // BarChart.Data wraps the Java Nodes class. BUT, until a BarChart.Data gets added to an actual series, it's node is null, and you can perform no operations on it. + // When the Data is added to a series(or a chart? I am unclear on where), a node is automaticaly generated for it, after which you can perform any of the operations it offers. + // In addtion, you are free to set the node to whatever you want. It wraps the most generic Node class. + // But it is for this reason that the chart generating functions have two forloops. I do not believe they can be condensed into a single loop due to the nodes being null until + // an undetermined point in time. + BarChart bc = new BarChart(xAxis, yAxis, bcData); + for (final BarChart.Data barData : bc.getData().get(0).getData()) { //.get(0) refers to the BarChart.Series class to work on. There is only one series in this graph, so get(0) is safe. + barData.getNode().setScaleX(.5); + + final javafx.scene.Node barNode = barData.getNode(); + //hover listener + barNode.addEventHandler(MouseEvent.MOUSE_ENTERED_TARGET, fxMouseEnteredListener); + barNode.addEventHandler(MouseEvent.MOUSE_EXITED_TARGET, fxMouseExitedListener); + + //click listener + barNode.addEventHandler(MouseEvent.MOUSE_CLICKED, + new EventHandler() { + @Override + public void handle(MouseEvent e) { + if (e.getButton().equals(MouseButton.PRIMARY)) { + if (e.getClickCount() == 1) { + Platform.runLater(new Runnable() { + @Override + public void run() { + BarChart b = + createMonthsWithDrill(findYear(allYears, Integer.valueOf(barData.getXValue()))); + fxChartEvents = b; + fxScrollEvents.setContent(fxChartEvents); + } + }); + + } + } + } + }); + } + + bc.autosize(); //Get an auto height + bc.setPrefWidth(FRAME_WIDTH); //but override the width + bc.setLegendVisible(false); //The legend adds too much extra chart space, it's not necessary. + return bc; + } + + /* + * Displays a chart with events from one year only, separated into 1-month chunks. + * Always 12 per year, empty months are represented by no bar. + */ + private BarChart createMonthsWithDrill(final YearEpoch ye) { + + final CategoryAxis xAxis = new CategoryAxis(); + final NumberAxis yAxis = new NumberAxis(); + xAxis.setLabel("Month (" + ye.year + ")"); + yAxis.setLabel("Number of Events"); + ObservableList> bcData = FXCollections.observableArrayList(); + + BarChart.Series se = new BarChart.Series(); + for (int monthNum = 0; monthNum < 12; ++monthNum) { + String monthName = new DateFormatSymbols().getMonths()[monthNum]; + MonthEpoch month = ye.getMonth(monthNum); + int numEvents = month == null ? 0 : month.getNumFiles(); + se.getData().add(new BarChart.Data(monthName, numEvents)); //Adding new barData at {X-pos, Y-Pos} + } + bcData.add(se); + final BarChart bc = new BarChart(xAxis, yAxis, bcData); + + for (int i = 0; i < 12; i++) { + for (final BarChart.Data barData : bc.getData().get(0).getData()) { + //Note: + // All the charts of this package have a problem where when the chart gets below a certain pixel ratio, the barData stops drawing. The axes and the labels remain, + // But the actual chart barData is invisible, unclickable, and unrendered. To partially compensate for that, barData.getNode() can be manually scaled up to increase visibility. + // Sometimes I've had it jacked up to as much as x2400 just to see a sliver of information. + // But that doesn't work all the time. Adding it to a scrollpane and letting the user scroll up and down to view the chart is the other workaround. Both of these fixes suck. + final javafx.scene.Node barNode = barData.getNode(); + barNode.setScaleX(.5); + + //hover listener + barNode.addEventHandler(MouseEvent.MOUSE_ENTERED_TARGET, fxMouseEnteredListener); + barNode.addEventHandler(MouseEvent.MOUSE_EXITED_TARGET, fxMouseExitedListener); + + //clicks + barNode.addEventHandler(MouseEvent.MOUSE_PRESSED, + new EventHandler() { + @Override + public void handle(MouseEvent e) { + if (e.getButton().equals(MouseButton.PRIMARY)) { + if (e.getClickCount() == 1) { + Platform.runLater(new Runnable() { + @Override + public void run() { + fxChartEvents = createEventsByMonth(findMonth(ye.months, monthStringToInt(barData.getXValue())), ye); + fxScrollEvents.setContent(fxChartEvents); + } + }); + } + } + } + }); + } + } + + bc.autosize(); + bc.setPrefWidth(FRAME_WIDTH); + bc.setLegendVisible(false); + fxStackPrevCharts.push(bc); + return bc; + } + + + /* + * Displays a chart with events from one month only. + * Up to 31 days per month, as low as 28 as determined by the specific MonthEpoch + */ + private BarChart createEventsByMonth(final MonthEpoch me, final YearEpoch ye) { + final CategoryAxis xAxis = new CategoryAxis(); + final NumberAxis yAxis = new NumberAxis(); + xAxis.setLabel("Day of Month"); + yAxis.setLabel("Number of Events"); + ObservableList> bcData = makeObservableListByMonthAllDays(me, ye.getYear()); + BarChart.Series series = new BarChart.Series(bcData); + series.setName(me.getMonthName() + " " + ye.getYear()); + + + ObservableList> ol = + FXCollections.>observableArrayList(series); + + final BarChart bc = new BarChart(xAxis, yAxis, ol); + for (final BarChart.Data barData : bc.getData().get(0).getData()) { + //data.getNode().setScaleX(2); + + final javafx.scene.Node barNode = barData.getNode(); + + //hover listener + barNode.addEventHandler(MouseEvent.MOUSE_ENTERED_TARGET, fxMouseEnteredListener); + barNode.addEventHandler(MouseEvent.MOUSE_EXITED_TARGET, fxMouseExitedListener); + + barNode.addEventHandler(MouseEvent.MOUSE_PRESSED, + new EventHandler() { + MonthEpoch myme = me; + + @Override + public void handle(MouseEvent e) { + SwingUtilities.invokeLater(new Runnable() { + @Override + public void run() { + //reset the view and free the current nodes before loading new ones + final FileRootNode d = new FileRootNode("Empty Root", new ArrayList()); + dataResultPanel.setNode(d); + dataResultPanel.setPath("Loading..."); + } + }); + final int day = (Integer.valueOf((barData.getXValue()).split("-")[1])); + final DayEpoch de = myme.getDay(day); + final List afs; + if (de != null) { + afs = de.getEvents(); + } else { + logger.log(Level.SEVERE, "There were no events for the clicked-on day: " + day); + return; + } + + SwingUtilities.invokeLater(new Runnable() { + @Override + public void run() { + final FileRootNode d = new FileRootNode("Root", afs); + dataResultPanel.setNode(d); + //set result viewer title path with the current date + String dateString = ye.getYear() + "-" + (1 + me.getMonthInt()) + "-" + +de.dayNum; + dataResultPanel.setPath(dateString); + } + }); + + + } + }); + } + bc.autosize(); + bc.setPrefWidth(FRAME_WIDTH); + return bc; + } + + private static ObservableList> makeObservableListByMonthAllDays(final MonthEpoch me, int year) { + ObservableList> bcData = FXCollections.observableArrayList(); + int totalDays = me.getTotalNumDays(year); + for (int i = 1; i <= totalDays; ++i) { + DayEpoch day = me.getDay(i); + int numFiles = day == null ? 0 : day.getNumFiles(); + BarChart.Data d = new BarChart.Data(me.month + 1 + "-" + i, numFiles); + d.setExtraValue(me); + bcData.add(d); + } + return bcData; + } + + /* + * Section for Utility functions + */ + /** + * + * @param mon The month to convert. Must be minimum 4 characters long + * "February" and "Febr" are acceptable. + * @return The integer value of the month. February = 1, July = 6 + */ + private static int monthStringToInt(String mon) { + try { + Date date = new SimpleDateFormat("MMMM", Locale.ENGLISH).parse(mon); + Calendar cal = Calendar.getInstance(); + cal.setTime(date); + return cal.get(Calendar.MONTH); + } catch (ParseException ex) { + logger.log(Level.WARNING, "Unable to convert string " + mon + " to integer", ex); + return -1; + } + } + + /** + * Used for finding the proper month in a list of available months + * + * @param lst The list of months to search through. It is assumed that the + * desired match is in this list. + * @param match The month, in integer format, to retrieve. + * @return The month epoch as specified by match. + */ + private static MonthEpoch findMonth(List lst, int match) { + for (MonthEpoch e : lst) { + if (e.month == match) { + return e; + } + } + return null; + } + + /** + * Used for finding the proper year in a list of available years + * + * @param lst The list of years to search through. It is assumed that the + * desired match is in this list. + * @param match The year to retrieve. + * @return The year epoch as specified by match. + */ + private static YearEpoch findYear(List lst, int match) { + for (YearEpoch e : lst) { + if (e.year == match) { + return e; + } + } + return null; + } + + @Override + public void propertyChange(PropertyChangeEvent evt) { + String prop = evt.getPropertyName(); + if (prop.equals(Case.CASE_ADD_DATA_SOURCE)) { + if (mainFrame != null && !mainFrame.isVisible()) { + // change the lastObjectId to trigger a reparse of mactime barData + ++lastObjectId; + return; + } + + int answer = JOptionPane.showConfirmDialog(mainFrame, "Timeline is out of date. Would you like to regenerate it?", "Select an option", JOptionPane.YES_NO_OPTION); + if (answer != JOptionPane.YES_OPTION) { + return; + } + + clearMactimeData(); + + // call performAction as if the user selected 'Make Timeline' from the menu + performAction(); + } else if (prop.equals(Case.CASE_CURRENT_CASE)) { + if (mainFrame != null && mainFrame.isVisible()) { + mainFrame.dispose(); + mainFrame = null; + } + + data = null; + } + } + + private void clearMactimeData() { + // get rid of the old barData + data = null; + + // get rid of the mactime file + java.io.File mactimeFile = new java.io.File(moduleDir, mactimeFileName); + mactimeFile.delete(); + + // close the jframe + if (mainFrame != null) { + mainFrame.setVisible(false); + mainFrame.dispose(); + mainFrame = null; + } + + // remove ourself as change listener on Case + Case.removePropertyChangeListener(this); + listeningToAddImage = false; + + } + + /* + * The backbone of the timeline functionality, years are split into months, months into days, and days contain the events of that given day. + * All of those are Epochs. + */ + abstract class Epoch { + + abstract public int getNumFiles(); + } + + private class YearEpoch extends Epoch { + + private int year; + private List months = new ArrayList<>(); + + YearEpoch(int year) { + this.year = year; + } + + public int getYear() { + return year; + } + + @Override + public int getNumFiles() { + int size = 0; + for (MonthEpoch me : months) { + size += me.getNumFiles(); + } + return size; + } + + public MonthEpoch getMonth(int monthNum) { + MonthEpoch month = null; + for (MonthEpoch me : months) { + if (me.getMonthInt() == monthNum) { + month = me; + break; + } + } + return month; + } + + public void add(long fileId, int month, int day) { + // see if this month is in the list + MonthEpoch monthEpoch = null; + for (MonthEpoch me : months) { + if (me.getMonthInt() == month) { + monthEpoch = me; + break; + } + } + + if (monthEpoch == null) { + monthEpoch = new MonthEpoch(month); + months.add(monthEpoch); + } + + // add the file the the MonthEpoch object + monthEpoch.add(fileId, day); + } + } + + private class MonthEpoch extends Epoch { + + private int month; //Zero-indexed: June = 5, August = 7, etc + private List days = new ArrayList<>(); //List of DayEpochs in this month, max 31 + + MonthEpoch(int month) { + this.month = month; + } + + public int getMonthInt() { + return month; + } + + public int getTotalNumDays(int year) { + Calendar cal = Calendar.getInstance(); + cal.set(year, month, 1); + return cal.getActualMaximum(Calendar.DAY_OF_MONTH); + } + + @Override + public int getNumFiles() { + int numFiles = 0; + for (DayEpoch de : days) { + numFiles += de.getNumFiles(); + } + return numFiles; + } + + public DayEpoch getDay(int dayNum) { + DayEpoch de = null; + for (DayEpoch d : days) { + if (d.dayNum == dayNum) { + de = d; + break; + } + } + return de; + } + + public void add(long fileId, int day) { + DayEpoch dayEpoch = null; + for (DayEpoch de : days) { + if (de.getDayInt() == day) { + dayEpoch = de; + break; + } + } + + if (dayEpoch == null) { + dayEpoch = new DayEpoch(day); + days.add(dayEpoch); + } + + dayEpoch.add(fileId); + } + + /** + * Returns the month's name in String format, e.g., September, July, + */ + String getMonthName() { + return new DateFormatSymbols().getMonths()[month]; + } + + /** + * @return the list of days in this month + */ + List getDays() { + return this.days; + } + } + + private class DayEpoch extends Epoch { + + private final List fileIds = new ArrayList<>(); + int dayNum = 0; //Day of the month this Epoch represents, 1 indexed: 28=28. + + DayEpoch(int dayOfMonth) { + this.dayNum = dayOfMonth; + } + + public int getDayInt() { + return dayNum; + } + + @Override + public int getNumFiles() { + return fileIds.size(); + } + + public void add(long fileId) { + fileIds.add(fileId); + } + + List getEvents() { + return this.fileIds; + } + } + + // The node factories used to make lists of files to send to the result viewer + // using the lazy loading (rather than background) loading option to facilitate + // loading a huge number of nodes for the given day + private class FileNodeChildFactory extends Children.Keys { + + private List fileIds; + + FileNodeChildFactory(List fileIds) { + super(true); + this.fileIds = fileIds; + } + + @Override + protected void addNotify() { + super.addNotify(); + setKeys(fileIds); + } + + @Override + protected void removeNotify() { + super.removeNotify(); + setKeys(new ArrayList()); + } + + @Override + protected Node[] createNodes(Long t) { + return new Node[]{createNodeForKey(t)}; + } + + // @Override + // protected boolean createKeys(List list) { + // list.addAll(fileIds); + // return true; + // } + //@Override + protected Node createNodeForKey(Long fileId) { + AbstractFile af = null; + try { + af = skCase.getAbstractFileById(fileId); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, "Error getting file by id and creating a node in Timeline: " + fileId, ex); + //no node will be shown for this object + return null; + } + + Node wrapped; + if (af.isDir()) { + wrapped = new DirectoryNode(af, false); + } else { + wrapped = new FileNode(af, false); + } + return new FilterNodeLeaf(wrapped); + } + } + + private class FileRootNode extends DisplayableItemNode { + + FileRootNode(String NAME, List fileIds) { + //super(Children.create(new FileNodeChildFactory(fileIds), true)); + super(new FileNodeChildFactory(fileIds), Lookups.singleton(fileIds)); + super.setName(NAME); + super.setDisplayName(NAME); + } + + @Override + public DisplayableItemNode.TYPE getDisplayableItemNodeType() { + return DisplayableItemNode.TYPE.CONTENT; + } + + @Override + public T accept(DisplayableItemNodeVisitor v) { + return null; + } + } + + private List parseMacTime(java.io.File f) { + List years = new ArrayList<>(); + Scanner scan; + try { + scan = new Scanner(new FileInputStream(f)); + } catch (FileNotFoundException ex) { + logger.log(Level.SEVERE, "Error: could not find mactime file.", ex); + return years; + } + scan.useDelimiter(","); + scan.nextLine(); // skip the header line + + int prevYear = -1; + YearEpoch ye = null; + while (scan.hasNextLine()) { + String[] s = scan.nextLine().split(","); //1999-02-08T11:08:08Z, 78706, m..b, rrwxrwxrwx, 0, 0, 8355, /img... + String[] datetime = s[0].split("T"); //{1999-02-08, 11:08:08Z} + String[] date = datetime[0].split("-"); // {1999, 02, 08} + int year = Integer.valueOf(date[0]); + int month = Integer.valueOf(date[1]) - 1; //Months are zero indexed: 1 = February, 6 = July, 11 = December + int day = Integer.valueOf(date[2]); //Days are 1 indexed + long ObjId = Long.valueOf(s[4]); + + // when the year changes, create and add a new YearEpoch object to the list + if (year != prevYear) { + ye = new YearEpoch(year); + years.add(ye); + prevYear = year; + } + + if (ye != null) { + ye.add(ObjId, month, day); + } + } + + scan.close(); + + return years; + } + + /** + * Crate a body file and return its path or null if error + * + * @return absolute path string or null if error + */ + private String makeBodyFile() { + // Setup timestamp + DateFormat dateFormat = new SimpleDateFormat("MM-dd-yyyy-HH-mm-ss"); + Date date = new Date(); + String datenotime = dateFormat.format(date); + + final Case currentCase = Case.getCurrentCase(); + + // Get report path + String bodyFilePath = moduleDir.getAbsolutePath() + + java.io.File.separator + currentCase.getName() + "-" + datenotime + ".txt"; + + // Run query to get all files + final String filesAndDirs = "name != '.' " + + "AND name != '..'"; + List fileIds = null; + try { + fileIds = skCase.findAllFileIdsWhere(filesAndDirs); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, "Error querying image files to make a body file: " + bodyFilePath, ex); + return null; + } + + // Loop files and write info to report + FileWriter fileWriter = null; + try { + fileWriter = new FileWriter(bodyFilePath, true); + } catch (IOException ex) { + logger.log(Level.SEVERE, "Error creating output stream to write body file to: " + bodyFilePath, ex); + return null; + } + + BufferedWriter out = null; + try { + out = new BufferedWriter(fileWriter); + for (long fileId : fileIds) { + AbstractFile file = skCase.getAbstractFileById(fileId); + // try { + // MD5|name|inode|mode_as_string|ObjId|GID|size|atime|mtime|ctime|crtime + if (file.getMd5Hash() != null) { + out.write(file.getMd5Hash()); + } + out.write("|"); + String path = null; + try { + path = file.getUniquePath(); + } catch (TskCoreException e) { + logger.log(Level.SEVERE, "Failed to get the unique path of: " + file + " and writing body file.", e); + return null; + } + + out.write(path); + + out.write("|"); + out.write(Long.toString(file.getMetaAddr())); + out.write("|"); + String modeString = file.getModesAsString(); + if (modeString != null) { + out.write(modeString); + } + out.write("|"); + out.write(Long.toString(file.getId())); + out.write("|"); + out.write(Long.toString(file.getGid())); + out.write("|"); + out.write(Long.toString(file.getSize())); + out.write("|"); + out.write(Long.toString(file.getAtime())); + out.write("|"); + out.write(Long.toString(file.getMtime())); + out.write("|"); + out.write(Long.toString(file.getCtime())); + out.write("|"); + out.write(Long.toString(file.getCrtime())); + out.write("\n"); + } + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, "Error querying file by id", ex); + return null; + + } catch (IOException ex) { + logger.log(Level.WARNING, "Error while trying to write data to the body file.", ex); + return null; + } finally { + if (out != null) { + try { + out.flush(); + out.close(); + } catch (IOException ex1) { + logger.log(Level.WARNING, "Could not flush and/or close body file.", ex1); + } + } + } + + + return bodyFilePath; + } + + private String makeMacTime(String pathToBodyFile) { + String cmdpath = ""; + String macpath = ""; + String[] mactimeArgs; + final String machome = macRoot.getAbsolutePath(); + pathToBodyFile = PlatformUtil.getOSFilePath(pathToBodyFile); + if (PlatformUtil.isWindowsOS()) { + macpath = machome + java.io.File.separator + "mactime.exe"; + cmdpath = PlatformUtil.getOSFilePath(macpath); + mactimeArgs = new String[]{"-b", pathToBodyFile, "-d", "-y"}; + } else { + cmdpath = "perl"; + macpath = machome + java.io.File.separator + "mactime.pl"; + mactimeArgs = new String[]{macpath, "-b", pathToBodyFile, "-d", "-y"}; + } + + String macfile = moduleDir.getAbsolutePath() + java.io.File.separator + mactimeFileName; + + + String output = ""; + ExecUtil execUtil = new ExecUtil(); + Writer writer = null; + try { + //JavaSystemCaller.Exec.execute("\"" + command + "\""); + writer = new FileWriter(macfile); + execUtil.execute(writer, cmdpath, mactimeArgs); + } catch (InterruptedException ie) { + logger.log(Level.WARNING, "Mactime process was interrupted by user", ie); + return null; + } catch (IOException ioe) { + logger.log(Level.SEVERE, "Could not create mactime file, encountered error ", ioe); + return null; + } finally { + if (writer != null) { + try { + writer.close(); + } catch (IOException ex) { + logger.log(Level.SEVERE, "Could not clsoe writer after creating mactime file, encountered error ", ex); + } + } + } + + return macfile; + } + + @Override + public boolean isEnabled() { + return Case.isCaseOpen() && this.fxInited; + } + + @Override + public void performAction() { + initTimeline(); + } + + private void initTimeline() { + if (!Case.existsCurrentCase()) { + return; + } + + final Case currentCase = Case.getCurrentCase(); + skCase = currentCase.getSleuthkitCase(); + + try { + if (currentCase.getRootObjectsCount() == 0) { + logger.log(Level.INFO, "Error creating timeline, there are no data sources. "); + } else { + + if (IngestManager.getDefault().isIngestRunning()) { + int answer = JOptionPane.showConfirmDialog(new JFrame(), + "You are trying to generate a timeline before " + + "ingest has been completed. The timeline may be " + + "incomplete. Do you want to continue?", "Timeline", + JOptionPane.YES_NO_OPTION); + if (answer != JOptionPane.YES_OPTION) { + return; + } + } + + logger.log(Level.INFO, "Beginning generation of timeline"); + + // if the timeline window is already open, bring to front and do nothing + if (mainFrame != null && mainFrame.isVisible()) { + mainFrame.toFront(); + return; + } + + // listen for case changes (specifically images being added). + if (Case.isCaseOpen() && !listeningToAddImage) { + Case.addPropertyChangeListener(this); + listeningToAddImage = true; + } + + // create the modal progressDialog + SwingUtilities.invokeLater(new Runnable() { + @Override + public void run() { + progressDialog = new TimelineProgressDialog(WindowManager.getDefault().getMainWindow(), true); + progressDialog.setVisible(true); + } + }); + + // initialize mactimeFileName + mactimeFileName = currentCase.getName() + "-MACTIME.txt"; + + // see if barData has been added to the database since the last + // time timeline ran + long objId = skCase.getLastObjectId(); + if (objId != lastObjectId && lastObjectId != -1) { + clearMactimeData(); + } + lastObjectId = objId; + + customize(); + } + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, "Error when generating timeline, ", ex); + } catch (Exception ex) { + logger.log(Level.SEVERE, "Unexpected error when generating timeline, ", ex); + } + } + + @Override + public String getName() { + return "Make Timeline (Beta)"; + } + + @Override + public HelpCtx getHelpCtx() { + return HelpCtx.DEFAULT_HELP; + } + + @Override + public boolean asynchronous() { + return false; + } +} diff --git a/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties b/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties index 4744f62146..ea31e2447b 100644 --- a/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties +++ b/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties @@ -1,5 +1,5 @@ #Updated by build script -#Wed, 25 Sep 2013 13:55:37 -0400 +#Fri, 18 Oct 2013 23:25:14 -0400 LBL_splash_window_title=Starting Autopsy SPLASH_HEIGHT=288 SPLASH_WIDTH=538 @@ -8,4 +8,4 @@ SplashRunningTextBounds=5,266,530,17 SplashRunningTextColor=0x0 SplashRunningTextFontSize=18 -currentVersion=Autopsy 3.0.7 +currentVersion=Autopsy 3.0.8 diff --git a/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties b/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties index 0dbd5e9a00..5b961ec43f 100644 --- a/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties +++ b/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties @@ -1,5 +1,5 @@ -#Updated by build script -#Wed, 25 Sep 2013 13:55:37 -0400 - -CTL_MainWindow_Title=Autopsy 3.0.7 -CTL_MainWindow_Title_No_Project=Autopsy 3.0.7 +#Updated by build script +#Fri, 18 Oct 2013 23:25:14 -0400 + +CTL_MainWindow_Title=Autopsy 3.0.8 +CTL_MainWindow_Title_No_Project=Autopsy 3.0.8 diff --git a/thunderbirdparser/nbproject/project.xml b/thunderbirdparser/nbproject/project.xml index aec76cd632..52a74cd1a7 100644 --- a/thunderbirdparser/nbproject/project.xml +++ b/thunderbirdparser/nbproject/project.xml @@ -1,31 +1,31 @@ - - - org.netbeans.modules.apisupport.project - - - org.sleuthkit.autopsy.thunderbirdparser - - - - org.sleuthkit.autopsy.core - - - - 9 - 7.0 - - - - org.sleuthkit.autopsy.keywordsearch - - - - 5 - 3.2 - - - - - - - + + + org.netbeans.modules.apisupport.project + + + org.sleuthkit.autopsy.thunderbirdparser + + + + org.sleuthkit.autopsy.core + + + + 9 + 7.0 + + + + org.sleuthkit.autopsy.keywordsearch + + + + 5 + 3.2 + + + + + + + From af1e969b4cbd0171d90c346d281aa74755884686 Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Sat, 19 Oct 2013 12:29:42 -0400 Subject: [PATCH 069/179] Updated line endings --- docs/doxygen/main.dox | 48 +-- docs/doxygen/modAdvanced.dox | 78 ++--- docs/doxygen/modDev.dox | 174 +++++----- docs/doxygen/modIngest.dox | 524 +++++++++++++++--------------- docs/doxygen/platformConcepts.dox | 50 +-- docs/doxygen/services.dox | 52 +-- 6 files changed, 463 insertions(+), 463 deletions(-) diff --git a/docs/doxygen/main.dox b/docs/doxygen/main.dox index 30f7b15546..4159f893e6 100644 --- a/docs/doxygen/main.dox +++ b/docs/doxygen/main.dox @@ -1,24 +1,24 @@ -/*! \mainpage Autopsy Forensic Browser Developer's Guide and API Reference - -

Overview

-Autopsy has been designed as a platform for open source tools besides just The Sleuth Kit. This document is for developers who want to add functionality into Autopsy. This could be in the form of enhancing the existing functionality or by making a module that plugs into it and you may distribute from your own site or push it back into the base distribution. - -If you want to write modules, then these pages are for you: -- \subpage platform_page -- \subpage mod_dev_page -- \subpage services_page -- The following are based on specific types of modules: - - \subpage mod_ingest_page - - \subpage mod_report_page - - \subpage mod_content_page - - \subpage mod_result_page -- \subpage adv_dev_page - -These pages are more detailed if you want to modify Autopsy code instead of writing add-on modules. -- \subpage workflow_page -- \subpage regression_test_page - - -*/ - - +/*! \mainpage Autopsy Forensic Browser Developer's Guide and API Reference + +

Overview

+Autopsy has been designed as a platform for open source tools besides just The Sleuth Kit. This document is for developers who want to add functionality into Autopsy. This could be in the form of enhancing the existing functionality or by making a module that plugs into it and you may distribute from your own site or push it back into the base distribution. + +If you want to write modules, then these pages are for you: +- \subpage platform_page +- \subpage mod_dev_page +- \subpage services_page +- The following are based on specific types of modules: + - \subpage mod_ingest_page + - \subpage mod_report_page + - \subpage mod_content_page + - \subpage mod_result_page +- \subpage adv_dev_page + +These pages are more detailed if you want to modify Autopsy code instead of writing add-on modules. +- \subpage workflow_page +- \subpage regression_test_page + + +*/ + + diff --git a/docs/doxygen/modAdvanced.dox b/docs/doxygen/modAdvanced.dox index df2c3e020c..5a2416f1a2 100644 --- a/docs/doxygen/modAdvanced.dox +++ b/docs/doxygen/modAdvanced.dox @@ -1,46 +1,46 @@ /*! \page adv_dev_page Advanced Develpment Concepts -\section mod_dev_adv Advanced Concepts - -These aren't really advanced, but you don't need to know them in detail when you start your first module. You'll want to refer back to them after you get started and wonder, "how do I do X". - - -\subsection mod_dev_adv_options Option Panels - - -Some modules may have configuration settings that uses can change. We recommend that you use the infrastructure provided by Autopsy and NetBeans to do this so that all module condiguration is done in a single place. +\section mod_dev_adv Advanced Concepts + +These aren't really advanced, but you don't need to know them in detail when you start your first module. You'll want to refer back to them after you get started and wonder, "how do I do X". + + +\subsection mod_dev_adv_options Option Panels + + +Some modules may have configuration settings that uses can change. We recommend that you use the infrastructure provided by Autopsy and NetBeans to do this so that all module condiguration is done in a single place. Note: This option panel applies to all module types. Ingest modules have a second type of option panel that can be accessed when a data source is added to a case. Refer to \ref ingestmodule_making_configuration for details on how to use those option panels. - -To add a panel to the options menu, right click the module and choose New > Other. Under the Module Development category, select Options Panel and press Next. - -Select Create Primary Panel, name the panel (preferably with the module's name), select an icon, and add keywords, then click Next and Finish. Note that NetBeans will automatically copy the selected icon to the module's directory if not already there. - -NetBeans will generate two Java files for you, the panel and the controller. For now, we only need to focus on the panel. - -First, use NetBeans' GUI builder to design the panel. Be sure to include all options, settings, preferences, etc for the module, as this is what the user will see. The recommended size of an options panel is about 675 x 500. - -Second, in the source code of the panel, there are two important methods: \c load() and \c store(). When the options panel is opened via Tools > Options in Autopsy, the \c load() method will be called. Conversely, when the user presses OK after editing the options, the \c store() method will be called. - -If one wishes to make any additional panels within the original options panel, or panels which the original opens, Autopsy provides the org.sleuthkit.autopsy.corecomponents.OptionsPanel interface to help. This interface requires the \c store() and \c load() functions also be provided in the separate panels, allowing for easier child storing and loading. - -Any storing or loading of settings or properties should be done in the \c store() and \c load() methods. The next section, \ref mod_dev_adv_properties, has more details on doing this. - - -\subsection mod_dev_adv_properties Saving Settings and Properties - -It is recommended to have the module settings persistent, so that when a change is made and Autopsy is re-opened -the user made changes remain effective and not reset back to defaults. -Use org.sleuthkit.autopsy.coreutils.ModuleSettings class for saving and reading back settings for your module. - - -\subsection mod_dev_adv_events Registering for Events - -Autopsy will generate events as the application runs and modules may want to listen for those events so that they can change their state. There is not an exhaustive list of events, but here are some common ones to listen for: - -- Case change events occur when a case is opened, closed, or changed. The org.sleuthkit.autopsy.casemodule.Case.addPropertyChangeListener() method can be used for this. -- IngestManager events occur when new results are available. The org.sleuthkit.autopsy.ingest.IngestManager.addPropertyChangeListener() method can be used for this. - + +To add a panel to the options menu, right click the module and choose New > Other. Under the Module Development category, select Options Panel and press Next. + +Select Create Primary Panel, name the panel (preferably with the module's name), select an icon, and add keywords, then click Next and Finish. Note that NetBeans will automatically copy the selected icon to the module's directory if not already there. + +NetBeans will generate two Java files for you, the panel and the controller. For now, we only need to focus on the panel. + +First, use NetBeans' GUI builder to design the panel. Be sure to include all options, settings, preferences, etc for the module, as this is what the user will see. The recommended size of an options panel is about 675 x 500. + +Second, in the source code of the panel, there are two important methods: \c load() and \c store(). When the options panel is opened via Tools > Options in Autopsy, the \c load() method will be called. Conversely, when the user presses OK after editing the options, the \c store() method will be called. + +If one wishes to make any additional panels within the original options panel, or panels which the original opens, Autopsy provides the org.sleuthkit.autopsy.corecomponents.OptionsPanel interface to help. This interface requires the \c store() and \c load() functions also be provided in the separate panels, allowing for easier child storing and loading. + +Any storing or loading of settings or properties should be done in the \c store() and \c load() methods. The next section, \ref mod_dev_adv_properties, has more details on doing this. + + +\subsection mod_dev_adv_properties Saving Settings and Properties + +It is recommended to have the module settings persistent, so that when a change is made and Autopsy is re-opened +the user made changes remain effective and not reset back to defaults. +Use org.sleuthkit.autopsy.coreutils.ModuleSettings class for saving and reading back settings for your module. + + +\subsection mod_dev_adv_events Registering for Events + +Autopsy will generate events as the application runs and modules may want to listen for those events so that they can change their state. There is not an exhaustive list of events, but here are some common ones to listen for: + +- Case change events occur when a case is opened, closed, or changed. The org.sleuthkit.autopsy.casemodule.Case.addPropertyChangeListener() method can be used for this. +- IngestManager events occur when new results are available. The org.sleuthkit.autopsy.ingest.IngestManager.addPropertyChangeListener() method can be used for this. + */ diff --git a/docs/doxygen/modDev.dox b/docs/doxygen/modDev.dox index 890eaa2209..840ff123e3 100644 --- a/docs/doxygen/modDev.dox +++ b/docs/doxygen/modDev.dox @@ -1,93 +1,93 @@ -/*! \page mod_dev_page Development Setup - - - -This page describes the basic concepts and setup that are needed regardless of the module type that you are building. - -\section mod_dev_setup Basic Setup - -\subsection mod_dev_setup_nb NetBeans and Java - -Autopsy is built on top of the NetBeans Rich Client Platform, which makes it easy to make plug-in infrastructures. To do any development, you really need to download NetBeans first. You can in theory develop modules by command line only, but this document assumes that you are using the IDE. Download and install the latest version of the IDE from http://www.netbeans.org. - -Autopsy currently requires Java 1.7. Ensure that it is installed. - -\subsection mod_dev_setup_platform Obtain the Autopsy Platform - -Before we can make a module, we must configure NetBeans to know about Autopsy as a platform. This will allow you to access all of the classes and services that Autopsy provides. There are two ways of configuring the NetBeans IDE to know about Autopsy: - -- Download an official release of Autopsy and build against it. -- Download Autopsy source code, build it, and make a platform to build against. - +/*! \page mod_dev_page Development Setup + + + +This page describes the basic concepts and setup that are needed regardless of the module type that you are building. + +\section mod_dev_setup Basic Setup + +\subsection mod_dev_setup_nb NetBeans and Java + +Autopsy is built on top of the NetBeans Rich Client Platform, which makes it easy to make plug-in infrastructures. To do any development, you really need to download NetBeans first. You can in theory develop modules by command line only, but this document assumes that you are using the IDE. Download and install the latest version of the IDE from http://www.netbeans.org. + +Autopsy currently requires Java 1.7. Ensure that it is installed. + +\subsection mod_dev_setup_platform Obtain the Autopsy Platform + +Before we can make a module, we must configure NetBeans to know about Autopsy as a platform. This will allow you to access all of the classes and services that Autopsy provides. There are two ways of configuring the NetBeans IDE to know about Autopsy: + +- Download an official release of Autopsy and build against it. +- Download Autopsy source code, build it, and make a platform to build against. + \subsubsection mod_dev_setup_platform_rel Using a Released Version - -The easiest method for obtaining the platform is to install Autopsy on your computer. It will have everything that you need. If you installed it in "C:\Program Files\Autopsy", then the platform is in "C:\Program Files\Autopsy\platform". You can now also download just the ZIP file of the Autopsy release instead of the MSI installer. This maybe more convenient for development situations. - -\subsubsection mod_dev_setup_platform_src Building a Platform from Code - -If you want to build against the bleeding edge code and updates that have occurred since the last release, then you must download the latest source code and build it. This involves getting a full development environment setup. Refer to the wiki page at http://wiki.sleuthkit.org/index.php?title=Autopsy_Developer%27s_Guide for details on getting the source code and a development environment setup. - -To use the latest Autopsy source code as your development environment, first follow BUILDING.TXT in the root source repository to properly build and setup Autopsy in NetBeans. - -Once Autopsy has been successfully built, right click on the Autopsy project in NetBeans and select Package as > ZIP Distribution. Once the ZIP file is created, extract its contents to a directory. This directory is the platform that you will build against. Note that you will building the module against this built platform. If you need to make changes to Autopsy infrastructure for your module, then you will need to then make a new ZIP file and configure your module to use it each time. - - -\section mod_dev_module Creating a Basic NetBeans Module - -The Autopsy modules are encapsulated inside of NetBeans modules. A NetBeans module will be packaged as a single ".nbm" file. A single NetBeans module can contain many Autopsy modules. The NetBeans module is what the user will install and provides things like auto-update. - -\subsection mod_dev_mod_nb Creating a NetBeans Module - -If this is your first module, then you will need to make a NetBeans module. If you have already made an Autopsy module and are now working on a second one, you can consider adding it to your pevious NetBeans module. - + +The easiest method for obtaining the platform is to install Autopsy on your computer. It will have everything that you need. If you installed it in "C:\Program Files\Autopsy", then the platform is in "C:\Program Files\Autopsy\platform". You can now also download just the ZIP file of the Autopsy release instead of the MSI installer. This maybe more convenient for development situations. + +\subsubsection mod_dev_setup_platform_src Building a Platform from Code + +If you want to build against the bleeding edge code and updates that have occurred since the last release, then you must download the latest source code and build it. This involves getting a full development environment setup. Refer to the wiki page at http://wiki.sleuthkit.org/index.php?title=Autopsy_Developer%27s_Guide for details on getting the source code and a development environment setup. + +To use the latest Autopsy source code as your development environment, first follow BUILDING.TXT in the root source repository to properly build and setup Autopsy in NetBeans. + +Once Autopsy has been successfully built, right click on the Autopsy project in NetBeans and select Package as > ZIP Distribution. Once the ZIP file is created, extract its contents to a directory. This directory is the platform that you will build against. Note that you will building the module against this built platform. If you need to make changes to Autopsy infrastructure for your module, then you will need to then make a new ZIP file and configure your module to use it each time. + + +\section mod_dev_module Creating a Basic NetBeans Module + +The Autopsy modules are encapsulated inside of NetBeans modules. A NetBeans module will be packaged as a single ".nbm" file. A single NetBeans module can contain many Autopsy modules. The NetBeans module is what the user will install and provides things like auto-update. + +\subsection mod_dev_mod_nb Creating a NetBeans Module + +If this is your first module, then you will need to make a NetBeans module. If you have already made an Autopsy module and are now working on a second one, you can consider adding it to your pevious NetBeans module. + To make a NetBeans module: -- Open the NetBeans IDE and go to File -> New Project. -- From the list of categories, choose "NetBeans Modules" and then "Module" from the list of "Projects". Click Next. -- In the next panel of the wizard, give the module a name and directory. Select Standalone Module (the default is typically "Add to Suite") so that you build the module as an external module against Autopsy. You will need to tell NetBeans about the Autopsy platform, so choose the "Manage" button. Choose the "Add Platform" button and browse to the location of the platform discussed in the previous sections (as a reminder this will either be the location that you installed Autopsy into or where you opened up the ZIP file you created from source). Click Next. -- Finally, enter the code base name. Press Finish. +- Open the NetBeans IDE and go to File -> New Project. +- From the list of categories, choose "NetBeans Modules" and then "Module" from the list of "Projects". Click Next. +- In the next panel of the wizard, give the module a name and directory. Select Standalone Module (the default is typically "Add to Suite") so that you build the module as an external module against Autopsy. You will need to tell NetBeans about the Autopsy platform, so choose the "Manage" button. Choose the "Add Platform" button and browse to the location of the platform discussed in the previous sections (as a reminder this will either be the location that you installed Autopsy into or where you opened up the ZIP file you created from source). Click Next. +- Finally, enter the code base name. Press Finish. \subsubsection mod_dev_mod_nb_config Configuring the NetBeans Module - -After the module is created, you will need to do some further configuration. -- Right click on the newly created module and choose "Properties". -- You will need to configure the module to be dependent on modules from within the Autopsy platform. Go to the "Libraries" area and choose "Add" in the "Module Dependencies" section. Choose the "Autopsy-core" library. You now have access to the Autopsy services. -- If you later determine that you need to pull in external JAR files, then you will use the "Wrapped Jar" section to add them in. -- Note, you will also need to come back to this section if you update the platform. You may need to add a new dependency for the version of the Autopsy-core that comes with the updated platform. -- Autopsy requires that all modules restart Autopsy after they are installed. Configure your module this way under Build -> Packaging. Check the box that says Needs Restart on Install. -You now have a NetBeans module that is using Autopsy as its build platform. That means you will have access to all of the services and utilities that Autopsy provides (such as \ref platform_details). - - -\subsubsection mod_dev_mod_config_other Optional Settings -There are several optional things in the Properties section. You can add a description and specify the version. You can do all of this later though and it does not need to be done before you start development. - -A link about the NetBeans versioning scheme can be found here http://wiki.netbeans.org/VersioningPolicy. +After the module is created, you will need to do some further configuration. +- Right click on the newly created module and choose "Properties". +- You will need to configure the module to be dependent on modules from within the Autopsy platform. Go to the "Libraries" area and choose "Add" in the "Module Dependencies" section. Choose the "Autopsy-core" library. You now have access to the Autopsy services. +- If you later determine that you need to pull in external JAR files, then you will use the "Wrapped Jar" section to add them in. +- Note, you will also need to come back to this section if you update the platform. You may need to add a new dependency for the version of the Autopsy-core that comes with the updated platform. +- Autopsy requires that all modules restart Autopsy after they are installed. Configure your module this way under Build -> Packaging. Check the box that says Needs Restart on Install. + +You now have a NetBeans module that is using Autopsy as its build platform. That means you will have access to all of the services and utilities that Autopsy provides (such as \ref platform_details). + + +\subsubsection mod_dev_mod_config_other Optional Settings +There are several optional things in the Properties section. You can add a description and specify the version. You can do all of this later though and it does not need to be done before you start development. + +A link about the NetBeans versioning scheme can be found here http://wiki.netbeans.org/VersioningPolicy. Autopsy follows this scheme and a link to the details can be found at http://wiki.sleuthkit.org/index.php?title=Autopsy_3_Module_Versions. - -\subsection mod_dev_mod_other Other Links - -For general NetBeans module information, refer to this guide from NetBeans.org. - - -\section mod_dev_aut Creating Autopsy Modules - -You can now add Autopsy modules into the NetBeans container module. There are other pages that focus on that and are listed on the main page. The rest of this document contains info that you will eventually want to come back to though. -As you will read in the later sections about the different module types, each Autopsy Module is a java class that extends an interface (the interface depends on the type of module). - - -\subsection mod_dev_aut_run1 Running Your Module During Development - -When you are developing your Autopsy module, you can simply choose "Run" on the module and it will launch the Autopsy platform with the module enabled in it. This is also how you can debug the module. - -\subsection mod_dev_aut_deploy Deploying Your Module - -When you are ready to share your module, create an NBM file by right clicking on the module and selecting "Create NBM". - -\subsection mod_dev_aut_install Installing Your Module - -To install the module on a non-development environment, launch Autopsy and choose Plugins under the Tools menu. Open the Downloaded tab and click Add Plugins. Navigate to the NBM file and open it. Next, click Install and follow the wizard. - - -*/ + +\subsection mod_dev_mod_other Other Links + +For general NetBeans module information, refer to this guide from NetBeans.org. + + +\section mod_dev_aut Creating Autopsy Modules + +You can now add Autopsy modules into the NetBeans container module. There are other pages that focus on that and are listed on the main page. The rest of this document contains info that you will eventually want to come back to though. +As you will read in the later sections about the different module types, each Autopsy Module is a java class that extends an interface (the interface depends on the type of module). + + +\subsection mod_dev_aut_run1 Running Your Module During Development + +When you are developing your Autopsy module, you can simply choose "Run" on the module and it will launch the Autopsy platform with the module enabled in it. This is also how you can debug the module. + +\subsection mod_dev_aut_deploy Deploying Your Module + +When you are ready to share your module, create an NBM file by right clicking on the module and selecting "Create NBM". + +\subsection mod_dev_aut_install Installing Your Module + +To install the module on a non-development environment, launch Autopsy and choose Plugins under the Tools menu. Open the Downloaded tab and click Add Plugins. Navigate to the NBM file and open it. Next, click Install and follow the wizard. + + +*/ diff --git a/docs/doxygen/modIngest.dox b/docs/doxygen/modIngest.dox index b4acc149f1..fc78671848 100644 --- a/docs/doxygen/modIngest.dox +++ b/docs/doxygen/modIngest.dox @@ -1,262 +1,262 @@ -/*! \page mod_ingest_page Developing Ingest Modules - - -\section ingestmodule_modules Ingest Module Basics - -This section tells you how to make an Ingest Module. Ingest modules -analyze data from a data source (a disk image or set of logical -files). They typically focus on a specific type of data analysis. -The modules are loaded each time that Autopsy starts. The user can -choose to enable each module when they add an image to the case. -It assumes you have already setup your development environment as -described in \ref mod_dev_page. - -First, you need to choose the type of Ingest Module. - -- Data Source-level modules are passed in a reference to a top-level data source, such as an Image or folder of logical files. -These modules may query the database for a small set of specific files. For example, a Windows registry module that runs on the hive files. It is interested in only a small subset of the hard drive files. - -- File-level modules are passed in a reference to each file. -The Ingest Manager chooses which files to pass and when. -These modules are intended to analyze most of the files on the system -For example, a hash calculation module that reads in the content of every file. - - - -Refer to org.sleuthkit.autopsy.ingest.example for sample source code of dummy modules. - -\section ingest_common Commonalities - -There are several things about these module types that are common and we'll outline those here. For both modules, you will extend an interface and implement some methods. - -Refer to the documentation for each method for its use. -- org.sleuthkit.autopsy.ingest.IngestModuleAbstract.init() is invoked when an ingest session starts. -- org.sleuthkit.autopsy.ingest.IngestModuleAbstract.complete() is invoked when an ingest session completes. -- org.sleuthkit.autopsy.ingest.IngestModuleAbstract.stop() is invoked on a module when an ingest session is interrupted by the user or system. -- org.sleuthkit.autopsy.ingest.IngestModuleAbstract.getName() returns the name of the module. -- org.sleuthkit.autopsy.ingest.IngestModuleAbstract.getDescription() returns a short description of the module. -- org.sleuthkit.autopsy.ingest.IngestModuleAbstract.getVersion() returns the version of the module. - - -The process() method is invoked to analyze the data. This is where -the analysis is done. The specific method depends on the module -type; it is passed either a data source or a file to process. We'll -cover this in later sections. This method will post results to the -blackboard and with inbox messages to the user. - - -\section ingest_datasrc Data Source-level Modules - -To make a data source-level module, make a new Java class either manually or using the NetBeans wizards. Edit the class to extend "org.sleuthkit.autopsy.ingest.IngestModuleDataSource". NetBeans will likely complain that you have not implemented the necessary methods and you can use its "hints" to automatically generate stubs for them. Use the documentation for the org.sleuthkit.autopsy.ingest.IngestModuleDataSource class for details on what each needs to do. -You can also refer to org.sleuthkit.autopsy.examples.SampleDataSourceIngestModule as an example module. - -Example snippet of an ingest-level module process() method: - -\code -@Override -public void process(Content dataSource, IngestDataSourceWorkerController controller) { - - //we have some number workunits / sub-tasks to execute - //in this case, we know the number of total tasks in advance - final int totalTasks = 12; - - //initialize the overall image ingest progress - controller.switchToDeterminate(); - controller.progress(totalTasks); - - for(int subTask = 0; subTask < totalTasks; ++subTask) { - //add cancellation support - if (controller.isCancelled() ) { - break; // break out early to let the thread terminate - } - - //do the work - try { - //sub-task may add blackboard artifacts and create an inbox message - performSubTask(i); - } catch (Exception ex) { - logger.log(Level.WARNING, "Exception occurred in subtask " + subTask, ex); - } - - //update progress - controller.progress(i+1); - } -} -\endcode - - -\section ingest_file File-level Modules - -To make a File-level module, make a new Java class either manually or using the NetBeans wizards. Edit the class to extend "org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile". NetBeans will likely complain that you have not implemented the necessary methods and you can use its "hints" to automatically generate stubs for them. Use the method documentation in the org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile class to fill in the details. -You can also refer to org.sleuthkit.autopsy.examples.SampleFileIngestModule as an example module. - -Unlike Data Source-level modules, file-level modules are singletons. Only a single instance is created for all files. -The same file-level module instance will be used for files in different images and even different cases if new cases are opened. - -Every file-level module should support multiple init() -> process() -> complete(), and init() -> process() -> stop() invocations. It should also support init() -> complete() sequences. A new case could be open for each call of init(). - -Currently (and this is likely to change in the future), File-level ingest modules are Singletons (meaning that only a single instance is created for the runtime of Autopsy). -You will need to implement a public static getDefault() method that returns a static instance of the module. Note that if you skip this step, you will not see an error until Autopsy tries to load your module and the log will say that it does not have a getDefault method. - -The implementation of this method is very standard, example: - -\code -public static synchronized MyIngestModule getDefault() { - - //defaultInstance is a private static class variable - if (defaultInstance == null) { - defaultInstance = new MyIngestModule(); - } - return defaultInstance; -} -\endcode - - -You should also make the constructor private to ensure the singleton status. - -As a result of the singleton design, init() will be called multiple times and even for different cases. Ensure that you update local member variables accordingly each time init() is called. Again, this design will likely change, but it is what it is for now. - - -\section ingestmodule_registration Module Registration - -Modules are automatically discovered if they implement the proper interface. -Currently, a restart of Autopsy is required after a module is installed before it is discovered. - -By default, modules that do not come with a standard Autopsy installation will run after the standard modules. No order -is implied. This design will likely change in the future, but currently manual configuration is needed to enforce order. - - -There is an XML pipeline configuration that contains the standard modules and specifies the order that they are run in. -If you need to specify the order of modules, then they needed to be manually addded to this file in the correct order. -This file is the same format as The Sleuth Kit Framework configuration file. -Refer to http://sleuthkit.org/sleuthkit/docs/framework-docs/pipeline_config_page.html which is an official documentation -for the pipeline configuration schema. - -Autopsy will provide tools for reconfiguring the ingest pipeline in the near future, -and user/developer will be able to reload current view of discovered modules, -reorder modules in the pipeline and set their arguments using GUI. - - -\section ingestmodule_services Ingest Services - -Class org.sleuthkit.autopsy.ingest.IngestServices provides services specifically for the ingest modules -and a module developer should use these utilities to send messages, get current case, etc. Refer to its documentation for method details. - -Remember, update references to IngestServices and Cases with each call to init() inside of the module. - -Module developers are encouraged to use Autopsy's org.sleuthkit.autopsy.coreutils.Logger -infrastructure to log errors to the Autopsy log. -The logger can also be accessed using the org.sleuthkit.autopsy.ingest.IngestServices class. - -Certain modules may need need a persistant store (other than for storing results) for storing and reading -module configurations or state. -The ModuleSettings API can be used also via org.sleuthkit.autopsy.ingest.IngestServices class. - - -\section ingestmodule_making_results Making Results Available to User - -Ingest modules run in the background. There are three ways to send messages and save results so that the user can see them: -- Blackboard for long-term storage of analysis results and to display in the results tree. -- Ingest Inbox to notify user of high-value analysis results that were also posted to blackboard. -- Error messages. - -\subsection ingestmodule_making_results_bb Posting Results to Blackboard -The blackboard is used to store results so that they are displayed in the results tree. See \ref platform_blackboard for details on posting results to it. - -The blackboard defines artifacts for specific data types (such as web bookmarks). You can use one of the standard artifact types, create your own, or simply post text with a org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_TOOL_OUTPUT. The later is much easier (for example, you can simply copy in the output from an existing tool), but it forces the user to parse the output themselves. - -When modules add data to the blackboard, -they should notify listeners of the new data by -invoking IngestServices.fireModuleDataEvent() method. -Do so as soon as you have added an artifact to the blackboard. -This allows other modules (and the main UI) to know when to query the blackboard for the latest data. -However, if you are writing a larger number of blackboard artifacts in a loop, it is better to invoke -IngestServices.fireModuleDataEvent() only once after the bulk write, not to flood the system with events. - - -\subsection ingestmodule_making_results_inbox Posting Results to Message Inbox - -Modules should post messages to the inbox when interesting data is found -that has also been posted to the blackboard. The idea behind these -messages are that they are presented in chronological order so that -users can see what was found while they were focusing on something else. -Error messages are also sent here as is summary information after the module has run to give the user some feedback. - - -These messages should only be sent if the result has a low false positive rate and will likely be relevant. -For example, the hash lookup module will send messages if known bad (notable) files are found, -but not if known good (NSRL) files are found. You can provide options to the users on when to make messages. - - -A single message includes the module name, message subject, message details, -a unique message id (in the context of the originating module), and a uniqueness attribute. -The uniqueness attribute is used to group similar messages together -and to determine the overall importance priority of the message -(if the same message is seen repeatedly, it is considered lower priority). - -For example, for a keyword search module, the uniqueness attribute would the keyword that was hit. - -Messages are created using the org.sleuthkit.autopsy.ingest.IngestMessage class and posted to the inbox using org.sleuthkit.autopsy.ingest.IngestServices.postMessage() method. - - -\subsection ingestmodule_making_results_error Reporting Errors - -When an error occurs, you should send a message to the ingest inbox with an error level. The downside of this though is that the ingest inbox was not entirely designed for this goal and it is easy for the user to miss these messages. Therefore, we identify these messages in the IngestInbox and also post a pop-up message that comes up in the lower right. - -You can make your own message in the lower right by using -org.sleuthkit.autopsy.coreutils.MessageNotifyUtil.Notify.show() - - - -\section ingestmodule_making_configuration Module Configuration - -Ingest modules may require user configuration. In \ref mod_dev_adv_options, you wll learn about Autopsy-wide settings. There are some -settings that are specific to ingest modules as well. - -The framework -supports two levels of configuration: simple and advanced. Simple settings enable the user to enable and disable basic things at run-time (using check boxes and such). -Advanced settings require more in-depth configuration with more powerful interface. - -As an example, the advanced configuration for the keyword search module allows you to add and create keyword lists, choose encodings, etc. The simple interface allows -you to enable and disable lists. - -Module configuration is module-specific: every module maintains its own configuration state and is responsible for implementing the graphical interface. -If a module needs simple or advanced configuration, it needs to implement methods in its interface. -The org.sleuthkit.autopsy.ingest.IngestModuleAbstract.hasSimpleConfiguration(), -org.sleuthkit.autopsy.ingest.IngestModuleAbstract.getSimpleConfiguration(), and org.sleuthkit.autopsy.ingest.IngestModuleAbstract.saveSimpleConfiguration() -methods should be used for simple configuration. This panel will be shown when the user chooses which ingest modules to enable. - -The advanced configuration is implemented with the -org.sleuthkit.autopsy.ingest.IngestModuleAbstract.hasAdvancedConfiguration(), -org.sleuthkit.autopsy.ingest.IngestModuleAbstract.getAdvancedConfiguration(), and -org.sleuthkit.autopsy.ingest.IngestModuleAbstract.saveAdvancedConfiguration() -methods. This panel can be accessed from the "Advanced" button when the user chooses which ingest modules to enable. -It is recommended that the advanced panel be the same panel that is used in the Options area (see \ref mod_dev_adv_options). - -Refer to \ref mod_dev_adv_properties for details on saving properties from these panels. - - - - -*/ +/*! \page mod_ingest_page Developing Ingest Modules + + +\section ingestmodule_modules Ingest Module Basics + +This section tells you how to make an Ingest Module. Ingest modules +analyze data from a data source (a disk image or set of logical +files). They typically focus on a specific type of data analysis. +The modules are loaded each time that Autopsy starts. The user can +choose to enable each module when they add an image to the case. +It assumes you have already setup your development environment as +described in \ref mod_dev_page. + +First, you need to choose the type of Ingest Module. + +- Data Source-level modules are passed in a reference to a top-level data source, such as an Image or folder of logical files. +These modules may query the database for a small set of specific files. For example, a Windows registry module that runs on the hive files. It is interested in only a small subset of the hard drive files. + +- File-level modules are passed in a reference to each file. +The Ingest Manager chooses which files to pass and when. +These modules are intended to analyze most of the files on the system +For example, a hash calculation module that reads in the content of every file. + + + +Refer to org.sleuthkit.autopsy.ingest.example for sample source code of dummy modules. + +\section ingest_common Commonalities + +There are several things about these module types that are common and we'll outline those here. For both modules, you will extend an interface and implement some methods. + +Refer to the documentation for each method for its use. +- org.sleuthkit.autopsy.ingest.IngestModuleAbstract.init() is invoked when an ingest session starts. +- org.sleuthkit.autopsy.ingest.IngestModuleAbstract.complete() is invoked when an ingest session completes. +- org.sleuthkit.autopsy.ingest.IngestModuleAbstract.stop() is invoked on a module when an ingest session is interrupted by the user or system. +- org.sleuthkit.autopsy.ingest.IngestModuleAbstract.getName() returns the name of the module. +- org.sleuthkit.autopsy.ingest.IngestModuleAbstract.getDescription() returns a short description of the module. +- org.sleuthkit.autopsy.ingest.IngestModuleAbstract.getVersion() returns the version of the module. + + +The process() method is invoked to analyze the data. This is where +the analysis is done. The specific method depends on the module +type; it is passed either a data source or a file to process. We'll +cover this in later sections. This method will post results to the +blackboard and with inbox messages to the user. + + +\section ingest_datasrc Data Source-level Modules + +To make a data source-level module, make a new Java class either manually or using the NetBeans wizards. Edit the class to extend "org.sleuthkit.autopsy.ingest.IngestModuleDataSource". NetBeans will likely complain that you have not implemented the necessary methods and you can use its "hints" to automatically generate stubs for them. Use the documentation for the org.sleuthkit.autopsy.ingest.IngestModuleDataSource class for details on what each needs to do. +You can also refer to org.sleuthkit.autopsy.examples.SampleDataSourceIngestModule as an example module. + +Example snippet of an ingest-level module process() method: + +\code +@Override +public void process(Content dataSource, IngestDataSourceWorkerController controller) { + + //we have some number workunits / sub-tasks to execute + //in this case, we know the number of total tasks in advance + final int totalTasks = 12; + + //initialize the overall image ingest progress + controller.switchToDeterminate(); + controller.progress(totalTasks); + + for(int subTask = 0; subTask < totalTasks; ++subTask) { + //add cancellation support + if (controller.isCancelled() ) { + break; // break out early to let the thread terminate + } + + //do the work + try { + //sub-task may add blackboard artifacts and create an inbox message + performSubTask(i); + } catch (Exception ex) { + logger.log(Level.WARNING, "Exception occurred in subtask " + subTask, ex); + } + + //update progress + controller.progress(i+1); + } +} +\endcode + + +\section ingest_file File-level Modules + +To make a File-level module, make a new Java class either manually or using the NetBeans wizards. Edit the class to extend "org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile". NetBeans will likely complain that you have not implemented the necessary methods and you can use its "hints" to automatically generate stubs for them. Use the method documentation in the org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile class to fill in the details. +You can also refer to org.sleuthkit.autopsy.examples.SampleFileIngestModule as an example module. + +Unlike Data Source-level modules, file-level modules are singletons. Only a single instance is created for all files. +The same file-level module instance will be used for files in different images and even different cases if new cases are opened. + +Every file-level module should support multiple init() -> process() -> complete(), and init() -> process() -> stop() invocations. It should also support init() -> complete() sequences. A new case could be open for each call of init(). + +Currently (and this is likely to change in the future), File-level ingest modules are Singletons (meaning that only a single instance is created for the runtime of Autopsy). +You will need to implement a public static getDefault() method that returns a static instance of the module. Note that if you skip this step, you will not see an error until Autopsy tries to load your module and the log will say that it does not have a getDefault method. + +The implementation of this method is very standard, example: + +\code +public static synchronized MyIngestModule getDefault() { + + //defaultInstance is a private static class variable + if (defaultInstance == null) { + defaultInstance = new MyIngestModule(); + } + return defaultInstance; +} +\endcode + + +You should also make the constructor private to ensure the singleton status. + +As a result of the singleton design, init() will be called multiple times and even for different cases. Ensure that you update local member variables accordingly each time init() is called. Again, this design will likely change, but it is what it is for now. + + +\section ingestmodule_registration Module Registration + +Modules are automatically discovered if they implement the proper interface. +Currently, a restart of Autopsy is required after a module is installed before it is discovered. + +By default, modules that do not come with a standard Autopsy installation will run after the standard modules. No order +is implied. This design will likely change in the future, but currently manual configuration is needed to enforce order. + + +There is an XML pipeline configuration that contains the standard modules and specifies the order that they are run in. +If you need to specify the order of modules, then they needed to be manually addded to this file in the correct order. +This file is the same format as The Sleuth Kit Framework configuration file. +Refer to http://sleuthkit.org/sleuthkit/docs/framework-docs/pipeline_config_page.html which is an official documentation +for the pipeline configuration schema. + +Autopsy will provide tools for reconfiguring the ingest pipeline in the near future, +and user/developer will be able to reload current view of discovered modules, +reorder modules in the pipeline and set their arguments using GUI. + + +\section ingestmodule_services Ingest Services + +Class org.sleuthkit.autopsy.ingest.IngestServices provides services specifically for the ingest modules +and a module developer should use these utilities to send messages, get current case, etc. Refer to its documentation for method details. + +Remember, update references to IngestServices and Cases with each call to init() inside of the module. + +Module developers are encouraged to use Autopsy's org.sleuthkit.autopsy.coreutils.Logger +infrastructure to log errors to the Autopsy log. +The logger can also be accessed using the org.sleuthkit.autopsy.ingest.IngestServices class. + +Certain modules may need need a persistant store (other than for storing results) for storing and reading +module configurations or state. +The ModuleSettings API can be used also via org.sleuthkit.autopsy.ingest.IngestServices class. + + +\section ingestmodule_making_results Making Results Available to User + +Ingest modules run in the background. There are three ways to send messages and save results so that the user can see them: +- Blackboard for long-term storage of analysis results and to display in the results tree. +- Ingest Inbox to notify user of high-value analysis results that were also posted to blackboard. +- Error messages. + +\subsection ingestmodule_making_results_bb Posting Results to Blackboard +The blackboard is used to store results so that they are displayed in the results tree. See \ref platform_blackboard for details on posting results to it. + +The blackboard defines artifacts for specific data types (such as web bookmarks). You can use one of the standard artifact types, create your own, or simply post text with a org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_TOOL_OUTPUT. The later is much easier (for example, you can simply copy in the output from an existing tool), but it forces the user to parse the output themselves. + +When modules add data to the blackboard, +they should notify listeners of the new data by +invoking IngestServices.fireModuleDataEvent() method. +Do so as soon as you have added an artifact to the blackboard. +This allows other modules (and the main UI) to know when to query the blackboard for the latest data. +However, if you are writing a larger number of blackboard artifacts in a loop, it is better to invoke +IngestServices.fireModuleDataEvent() only once after the bulk write, not to flood the system with events. + + +\subsection ingestmodule_making_results_inbox Posting Results to Message Inbox + +Modules should post messages to the inbox when interesting data is found +that has also been posted to the blackboard. The idea behind these +messages are that they are presented in chronological order so that +users can see what was found while they were focusing on something else. +Error messages are also sent here as is summary information after the module has run to give the user some feedback. + + +These messages should only be sent if the result has a low false positive rate and will likely be relevant. +For example, the hash lookup module will send messages if known bad (notable) files are found, +but not if known good (NSRL) files are found. You can provide options to the users on when to make messages. + + +A single message includes the module name, message subject, message details, +a unique message id (in the context of the originating module), and a uniqueness attribute. +The uniqueness attribute is used to group similar messages together +and to determine the overall importance priority of the message +(if the same message is seen repeatedly, it is considered lower priority). + +For example, for a keyword search module, the uniqueness attribute would the keyword that was hit. + +Messages are created using the org.sleuthkit.autopsy.ingest.IngestMessage class and posted to the inbox using org.sleuthkit.autopsy.ingest.IngestServices.postMessage() method. + + +\subsection ingestmodule_making_results_error Reporting Errors + +When an error occurs, you should send a message to the ingest inbox with an error level. The downside of this though is that the ingest inbox was not entirely designed for this goal and it is easy for the user to miss these messages. Therefore, we identify these messages in the IngestInbox and also post a pop-up message that comes up in the lower right. + +You can make your own message in the lower right by using +org.sleuthkit.autopsy.coreutils.MessageNotifyUtil.Notify.show() + + + +\section ingestmodule_making_configuration Module Configuration + +Ingest modules may require user configuration. In \ref mod_dev_adv_options, you wll learn about Autopsy-wide settings. There are some +settings that are specific to ingest modules as well. + +The framework +supports two levels of configuration: simple and advanced. Simple settings enable the user to enable and disable basic things at run-time (using check boxes and such). +Advanced settings require more in-depth configuration with more powerful interface. + +As an example, the advanced configuration for the keyword search module allows you to add and create keyword lists, choose encodings, etc. The simple interface allows +you to enable and disable lists. + +Module configuration is module-specific: every module maintains its own configuration state and is responsible for implementing the graphical interface. +If a module needs simple or advanced configuration, it needs to implement methods in its interface. +The org.sleuthkit.autopsy.ingest.IngestModuleAbstract.hasSimpleConfiguration(), +org.sleuthkit.autopsy.ingest.IngestModuleAbstract.getSimpleConfiguration(), and org.sleuthkit.autopsy.ingest.IngestModuleAbstract.saveSimpleConfiguration() +methods should be used for simple configuration. This panel will be shown when the user chooses which ingest modules to enable. + +The advanced configuration is implemented with the +org.sleuthkit.autopsy.ingest.IngestModuleAbstract.hasAdvancedConfiguration(), +org.sleuthkit.autopsy.ingest.IngestModuleAbstract.getAdvancedConfiguration(), and +org.sleuthkit.autopsy.ingest.IngestModuleAbstract.saveAdvancedConfiguration() +methods. This panel can be accessed from the "Advanced" button when the user chooses which ingest modules to enable. +It is recommended that the advanced panel be the same panel that is used in the Options area (see \ref mod_dev_adv_options). + +Refer to \ref mod_dev_adv_properties for details on saving properties from these panels. + + + + +*/ diff --git a/docs/doxygen/platformConcepts.dox b/docs/doxygen/platformConcepts.dox index 15d1fb8c0e..e75fe4d584 100644 --- a/docs/doxygen/platformConcepts.dox +++ b/docs/doxygen/platformConcepts.dox @@ -1,32 +1,32 @@ -/*! \page platform_page Platform Concepts - -\section platform_basics Basic Concepts - -These are the basic concepts that you should be aware of before writing a module: - -- Phases: The platform has been design to support different phases in the investigation process: - - Case Creation: Use wizards to create a new case. - - Data Source Adding: Where disk images and logical files are added to a case and file systems in disk images are analyzed to populate the database. The end result of this phase is that the central database has a basic record of each file so that it can be analyzed. This happens in the Add Image Wizard. - - Ingest Module Analysis: A variety of analysis modules then run on the files referenced in the database to perform specific tasks. - - Browsing and searching: User manually browses and searches the data using the user interface. They can browse through the results from the ingest modules that may still be running in the background. +/*! \page platform_page Platform Concepts + +\section platform_basics Basic Concepts + +These are the basic concepts that you should be aware of before writing a module: + +- Phases: The platform has been design to support different phases in the investigation process: + - Case Creation: Use wizards to create a new case. + - Data Source Adding: Where disk images and logical files are added to a case and file systems in disk images are analyzed to populate the database. The end result of this phase is that the central database has a basic record of each file so that it can be analyzed. This happens in the Add Image Wizard. + - Ingest Module Analysis: A variety of analysis modules then run on the files referenced in the database to perform specific tasks. + - Browsing and searching: User manually browses and searches the data using the user interface. They can browse through the results from the ingest modules that may still be running in the background. - Report: A final report is generated at the end of the case. - Central Database: All data except for the disk image is stored in a SQLite database. This includes information about what files exist in a disk image and the output from modules. Access to this database can be found from the org.sleuthkit.datamodel.SleuthkitCase class, but you'll probably never need to directly interact with it. The services and data model classes will interact with it. -- Case: A case class (org.sleuthkit.autopsy.casemodule.Case) is the top-level object for the data being analyzed. From here, you can access all of the files and query it. +- Case: A case class (org.sleuthkit.autopsy.casemodule.Case) is the top-level object for the data being analyzed. From here, you can access all of the files and query it. - Blackboard: The platform uses the blackboard to enable modules to communicate with each other and to display data in the GUI. See the \ref platform_blackboard section for more details. -- Services: There are services provided by the platform. See the \ref mod_dev_other_services section for more details. +- Services: There are services provided by the platform. See the \ref mod_dev_other_services section for more details. - Utilities: There are core utilities that the platform provides to modules. See the \ref mod_dev_other_utilities section for more details. -- Single tree: Results from the various modules can generally be found in a single tree. This makes it easy for users to find their results. - - -\section platform_frameworks Frameworks in the Platform -Autopsy was designed to be an extensible platform for other developers to leverage. There are several places in the platform where plug-in modules can be applied. -- Ingest Modules: These modules are run when a new data source is added to a case (and can be re-run afterwards too). These modules come in two forms: - - File Ingest Modules are called for every file in the image. Use this type of module if you want to examine the contents of all or most of the files. Examples include hash calculation, hash lookup, file type identification, and entropy calculation. +- Single tree: Results from the various modules can generally be found in a single tree. This makes it easy for users to find their results. + + +\section platform_frameworks Frameworks in the Platform +Autopsy was designed to be an extensible platform for other developers to leverage. There are several places in the platform where plug-in modules can be applied. +- Ingest Modules: These modules are run when a new data source is added to a case (and can be re-run afterwards too). These modules come in two forms: + - File Ingest Modules are called for every file in the image. Use this type of module if you want to examine the contents of all or most of the files. Examples include hash calculation, hash lookup, file type identification, and entropy calculation. - Data Source Ingest Modules are called once for every image or set of logical files. These modules can use the database to query for one or more files and perform analysis on them. Examples include web artifact analysis and searches that can rely only file names and extensions. See \ref mod_ingest_page for details on building these modules. -- Report Modules: These modules create different types of outputs that contain the analysis results. See \ref mod_report_page for details on creating these modules. -- Content Viewers: These modules show information about a specific file. These are the modules in the lower right of the interface. The platform comes with viewers to view the file in hexadecimal, extract the strings from the file, and view images and movies. See \ref mod_content_page for details on creating these modules. -- Result Viewers: These modules show information about a set of files. These modules are in the upper right of the interface. The platform comes with viewers to view the set of files in a table and thumbnails. See \ref mod_result_page for details on creating these modules. +- Report Modules: These modules create different types of outputs that contain the analysis results. See \ref mod_report_page for details on creating these modules. +- Content Viewers: These modules show information about a specific file. These are the modules in the lower right of the interface. The platform comes with viewers to view the file in hexadecimal, extract the strings from the file, and view images and movies. See \ref mod_content_page for details on creating these modules. +- Result Viewers: These modules show information about a set of files. These modules are in the upper right of the interface. The platform comes with viewers to view the set of files in a table and thumbnails. See \ref mod_result_page for details on creating these modules. - -*/ + +*/ diff --git a/docs/doxygen/services.dox b/docs/doxygen/services.dox index 88262d7289..48b8039025 100644 --- a/docs/doxygen/services.dox +++ b/docs/doxygen/services.dox @@ -1,37 +1,37 @@ -/*! \page services_page Platform Services - +/*! \page services_page Platform Services + \section platform_services Services The platform provides a variety of services and utilities that you need to be familiar with. This section outlines the basic ones and additional ones are described at the end of the document in \ref mod_dev_adv. - -\subsection platform_blackboard The Blackboard - -The blackboard allows modules to communicate with each other and the UI. It has three main uses in Autopsy: -- Ingest modules can communicate with each other. For example, one module can calculate a MD5 hash of a file and post it to the blackboard. Then another module can retrieve the hash value from the blackboard and not need to calculate it again. -- The tree in the right-hand side of the UI uses the blackboard to populate its Results section. The bookmarks, hashset hits, etc. are all populated from Ingest modules that created blackboard entries. -- The report modules query the blackboard to identify what they should report on. + +\subsection platform_blackboard The Blackboard + +The blackboard allows modules to communicate with each other and the UI. It has three main uses in Autopsy: +- Ingest modules can communicate with each other. For example, one module can calculate a MD5 hash of a file and post it to the blackboard. Then another module can retrieve the hash value from the blackboard and not need to calculate it again. +- The tree in the right-hand side of the UI uses the blackboard to populate its Results section. The bookmarks, hashset hits, etc. are all populated from Ingest modules that created blackboard entries. +- The report modules query the blackboard to identify what they should report on. The blackboard is not unique to Autopsy. It is part of The Sleuth Kit datamodel and The Sleuth Kit Framework. In the name of reducing the amount of documentation that we need to maintain, we provide links here to those documentation sources. - Details on the blackboard concepts (artifacts versus attributes) can be found at http://sleuthkit.org/sleuthkit/docs/framework-docs/mod_bbpage.html. These documents are about the C++ implementation of the blackboard, but it is the same concepts. -- Details of the Java classes can be found in \ref jni_blackboard section of the The Sleuth Kit JNI documents (http://sleuthkit.org/sleuthkit/docs/jni-docs/). - - -\subsection mod_dev_other_services Framework Services - +- Details of the Java classes can be found in \ref jni_blackboard section of the The Sleuth Kit JNI documents (http://sleuthkit.org/sleuthkit/docs/jni-docs/). + + +\subsection mod_dev_other_services Framework Services + The followig are basic services that are available. - -- FileManager: the org.sleuthkit.autopsy.casemodule.services.FileManager service provides an API to access any file in the case. You can access FileManager by calling org.sleuthkit.autopsy.casemodule.services.Services.getFileManager(). Data Source-level Ingest modules and Report modules typically use this service because the other modules are passed in a reference to a specific file to do something with. + +- FileManager: the org.sleuthkit.autopsy.casemodule.services.FileManager service provides an API to access any file in the case. You can access FileManager by calling org.sleuthkit.autopsy.casemodule.services.Services.getFileManager(). Data Source-level Ingest modules and Report modules typically use this service because the other modules are passed in a reference to a specific file to do something with. - org.sleuthkit.autopsy.coreutils.Logger - Use this class to log error and informational messages to the central Autopsy log file. - If you have a background task that needs the provide the user with feedback, you can use the org.sleuthkit.autopsy.coreutils.MessageNotifyUtil.Notify.show() method to make a message in the lower right hand area. -- IngestModules also have a class that provides additional services. See \ref ingestmodule_services. +- IngestModules also have a class that provides additional services. See \ref ingestmodule_services. - -\subsection mod_dev_other_utilities Framework Utilities - -In addition to the services previously listed, there are some general utilities that could be useful to modules. These include: -- org.sleuthkit.autopsy.coreutils.PlatformUtil - platform-specific methods to determine available disk space, memory, etc. -- org.sleuthkit.autopsy.coreutils.ModuleSettings - to persist module configuration and settings -- org.sleuthkit.autopsy.coreutils.FileUtil - to delete and add folders, etc. - -*/ + +\subsection mod_dev_other_utilities Framework Utilities + +In addition to the services previously listed, there are some general utilities that could be useful to modules. These include: +- org.sleuthkit.autopsy.coreutils.PlatformUtil - platform-specific methods to determine available disk space, memory, etc. +- org.sleuthkit.autopsy.coreutils.ModuleSettings - to persist module configuration and settings +- org.sleuthkit.autopsy.coreutils.FileUtil - to delete and add folders, etc. + +*/ From f8f0bd9050faa0cfddbf7eb2a7358b3e20d9de17 Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Sat, 19 Oct 2013 12:50:55 -0400 Subject: [PATCH 070/179] Added reference to filemanager in datasource docs --- docs/doxygen/modIngest.dox | 527 +++++++++++++++++++------------------ 1 file changed, 265 insertions(+), 262 deletions(-) diff --git a/docs/doxygen/modIngest.dox b/docs/doxygen/modIngest.dox index b4acc149f1..2512a96c37 100644 --- a/docs/doxygen/modIngest.dox +++ b/docs/doxygen/modIngest.dox @@ -1,262 +1,265 @@ -/*! \page mod_ingest_page Developing Ingest Modules - - -\section ingestmodule_modules Ingest Module Basics - -This section tells you how to make an Ingest Module. Ingest modules -analyze data from a data source (a disk image or set of logical -files). They typically focus on a specific type of data analysis. -The modules are loaded each time that Autopsy starts. The user can -choose to enable each module when they add an image to the case. -It assumes you have already setup your development environment as -described in \ref mod_dev_page. - -First, you need to choose the type of Ingest Module. - -- Data Source-level modules are passed in a reference to a top-level data source, such as an Image or folder of logical files. -These modules may query the database for a small set of specific files. For example, a Windows registry module that runs on the hive files. It is interested in only a small subset of the hard drive files. - -- File-level modules are passed in a reference to each file. -The Ingest Manager chooses which files to pass and when. -These modules are intended to analyze most of the files on the system -For example, a hash calculation module that reads in the content of every file. - - - -Refer to org.sleuthkit.autopsy.ingest.example for sample source code of dummy modules. - -\section ingest_common Commonalities - -There are several things about these module types that are common and we'll outline those here. For both modules, you will extend an interface and implement some methods. - -Refer to the documentation for each method for its use. -- org.sleuthkit.autopsy.ingest.IngestModuleAbstract.init() is invoked when an ingest session starts. -- org.sleuthkit.autopsy.ingest.IngestModuleAbstract.complete() is invoked when an ingest session completes. -- org.sleuthkit.autopsy.ingest.IngestModuleAbstract.stop() is invoked on a module when an ingest session is interrupted by the user or system. -- org.sleuthkit.autopsy.ingest.IngestModuleAbstract.getName() returns the name of the module. -- org.sleuthkit.autopsy.ingest.IngestModuleAbstract.getDescription() returns a short description of the module. -- org.sleuthkit.autopsy.ingest.IngestModuleAbstract.getVersion() returns the version of the module. - - -The process() method is invoked to analyze the data. This is where -the analysis is done. The specific method depends on the module -type; it is passed either a data source or a file to process. We'll -cover this in later sections. This method will post results to the -blackboard and with inbox messages to the user. - - -\section ingest_datasrc Data Source-level Modules - -To make a data source-level module, make a new Java class either manually or using the NetBeans wizards. Edit the class to extend "org.sleuthkit.autopsy.ingest.IngestModuleDataSource". NetBeans will likely complain that you have not implemented the necessary methods and you can use its "hints" to automatically generate stubs for them. Use the documentation for the org.sleuthkit.autopsy.ingest.IngestModuleDataSource class for details on what each needs to do. -You can also refer to org.sleuthkit.autopsy.examples.SampleDataSourceIngestModule as an example module. - -Example snippet of an ingest-level module process() method: - -\code -@Override -public void process(Content dataSource, IngestDataSourceWorkerController controller) { - - //we have some number workunits / sub-tasks to execute - //in this case, we know the number of total tasks in advance - final int totalTasks = 12; - - //initialize the overall image ingest progress - controller.switchToDeterminate(); - controller.progress(totalTasks); - - for(int subTask = 0; subTask < totalTasks; ++subTask) { - //add cancellation support - if (controller.isCancelled() ) { - break; // break out early to let the thread terminate - } - - //do the work - try { - //sub-task may add blackboard artifacts and create an inbox message - performSubTask(i); - } catch (Exception ex) { - logger.log(Level.WARNING, "Exception occurred in subtask " + subTask, ex); - } - - //update progress - controller.progress(i+1); - } -} -\endcode - - -\section ingest_file File-level Modules - -To make a File-level module, make a new Java class either manually or using the NetBeans wizards. Edit the class to extend "org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile". NetBeans will likely complain that you have not implemented the necessary methods and you can use its "hints" to automatically generate stubs for them. Use the method documentation in the org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile class to fill in the details. -You can also refer to org.sleuthkit.autopsy.examples.SampleFileIngestModule as an example module. - -Unlike Data Source-level modules, file-level modules are singletons. Only a single instance is created for all files. -The same file-level module instance will be used for files in different images and even different cases if new cases are opened. - -Every file-level module should support multiple init() -> process() -> complete(), and init() -> process() -> stop() invocations. It should also support init() -> complete() sequences. A new case could be open for each call of init(). - -Currently (and this is likely to change in the future), File-level ingest modules are Singletons (meaning that only a single instance is created for the runtime of Autopsy). -You will need to implement a public static getDefault() method that returns a static instance of the module. Note that if you skip this step, you will not see an error until Autopsy tries to load your module and the log will say that it does not have a getDefault method. - -The implementation of this method is very standard, example: - -\code -public static synchronized MyIngestModule getDefault() { - - //defaultInstance is a private static class variable - if (defaultInstance == null) { - defaultInstance = new MyIngestModule(); - } - return defaultInstance; -} -\endcode - - -You should also make the constructor private to ensure the singleton status. - -As a result of the singleton design, init() will be called multiple times and even for different cases. Ensure that you update local member variables accordingly each time init() is called. Again, this design will likely change, but it is what it is for now. - - -\section ingestmodule_registration Module Registration - -Modules are automatically discovered if they implement the proper interface. -Currently, a restart of Autopsy is required after a module is installed before it is discovered. - -By default, modules that do not come with a standard Autopsy installation will run after the standard modules. No order -is implied. This design will likely change in the future, but currently manual configuration is needed to enforce order. - - -There is an XML pipeline configuration that contains the standard modules and specifies the order that they are run in. -If you need to specify the order of modules, then they needed to be manually addded to this file in the correct order. -This file is the same format as The Sleuth Kit Framework configuration file. -Refer to http://sleuthkit.org/sleuthkit/docs/framework-docs/pipeline_config_page.html which is an official documentation -for the pipeline configuration schema. - -Autopsy will provide tools for reconfiguring the ingest pipeline in the near future, -and user/developer will be able to reload current view of discovered modules, -reorder modules in the pipeline and set their arguments using GUI. - - -\section ingestmodule_services Ingest Services - -Class org.sleuthkit.autopsy.ingest.IngestServices provides services specifically for the ingest modules -and a module developer should use these utilities to send messages, get current case, etc. Refer to its documentation for method details. - -Remember, update references to IngestServices and Cases with each call to init() inside of the module. - -Module developers are encouraged to use Autopsy's org.sleuthkit.autopsy.coreutils.Logger -infrastructure to log errors to the Autopsy log. -The logger can also be accessed using the org.sleuthkit.autopsy.ingest.IngestServices class. - -Certain modules may need need a persistant store (other than for storing results) for storing and reading -module configurations or state. -The ModuleSettings API can be used also via org.sleuthkit.autopsy.ingest.IngestServices class. - - -\section ingestmodule_making_results Making Results Available to User - -Ingest modules run in the background. There are three ways to send messages and save results so that the user can see them: -- Blackboard for long-term storage of analysis results and to display in the results tree. -- Ingest Inbox to notify user of high-value analysis results that were also posted to blackboard. -- Error messages. - -\subsection ingestmodule_making_results_bb Posting Results to Blackboard -The blackboard is used to store results so that they are displayed in the results tree. See \ref platform_blackboard for details on posting results to it. - -The blackboard defines artifacts for specific data types (such as web bookmarks). You can use one of the standard artifact types, create your own, or simply post text with a org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_TOOL_OUTPUT. The later is much easier (for example, you can simply copy in the output from an existing tool), but it forces the user to parse the output themselves. - -When modules add data to the blackboard, -they should notify listeners of the new data by -invoking IngestServices.fireModuleDataEvent() method. -Do so as soon as you have added an artifact to the blackboard. -This allows other modules (and the main UI) to know when to query the blackboard for the latest data. -However, if you are writing a larger number of blackboard artifacts in a loop, it is better to invoke -IngestServices.fireModuleDataEvent() only once after the bulk write, not to flood the system with events. - - -\subsection ingestmodule_making_results_inbox Posting Results to Message Inbox - -Modules should post messages to the inbox when interesting data is found -that has also been posted to the blackboard. The idea behind these -messages are that they are presented in chronological order so that -users can see what was found while they were focusing on something else. -Error messages are also sent here as is summary information after the module has run to give the user some feedback. - - -These messages should only be sent if the result has a low false positive rate and will likely be relevant. -For example, the hash lookup module will send messages if known bad (notable) files are found, -but not if known good (NSRL) files are found. You can provide options to the users on when to make messages. - - -A single message includes the module name, message subject, message details, -a unique message id (in the context of the originating module), and a uniqueness attribute. -The uniqueness attribute is used to group similar messages together -and to determine the overall importance priority of the message -(if the same message is seen repeatedly, it is considered lower priority). - -For example, for a keyword search module, the uniqueness attribute would the keyword that was hit. - -Messages are created using the org.sleuthkit.autopsy.ingest.IngestMessage class and posted to the inbox using org.sleuthkit.autopsy.ingest.IngestServices.postMessage() method. - - -\subsection ingestmodule_making_results_error Reporting Errors - -When an error occurs, you should send a message to the ingest inbox with an error level. The downside of this though is that the ingest inbox was not entirely designed for this goal and it is easy for the user to miss these messages. Therefore, we identify these messages in the IngestInbox and also post a pop-up message that comes up in the lower right. - -You can make your own message in the lower right by using -org.sleuthkit.autopsy.coreutils.MessageNotifyUtil.Notify.show() - - - -\section ingestmodule_making_configuration Module Configuration - -Ingest modules may require user configuration. In \ref mod_dev_adv_options, you wll learn about Autopsy-wide settings. There are some -settings that are specific to ingest modules as well. - -The framework -supports two levels of configuration: simple and advanced. Simple settings enable the user to enable and disable basic things at run-time (using check boxes and such). -Advanced settings require more in-depth configuration with more powerful interface. - -As an example, the advanced configuration for the keyword search module allows you to add and create keyword lists, choose encodings, etc. The simple interface allows -you to enable and disable lists. - -Module configuration is module-specific: every module maintains its own configuration state and is responsible for implementing the graphical interface. -If a module needs simple or advanced configuration, it needs to implement methods in its interface. -The org.sleuthkit.autopsy.ingest.IngestModuleAbstract.hasSimpleConfiguration(), -org.sleuthkit.autopsy.ingest.IngestModuleAbstract.getSimpleConfiguration(), and org.sleuthkit.autopsy.ingest.IngestModuleAbstract.saveSimpleConfiguration() -methods should be used for simple configuration. This panel will be shown when the user chooses which ingest modules to enable. - -The advanced configuration is implemented with the -org.sleuthkit.autopsy.ingest.IngestModuleAbstract.hasAdvancedConfiguration(), -org.sleuthkit.autopsy.ingest.IngestModuleAbstract.getAdvancedConfiguration(), and -org.sleuthkit.autopsy.ingest.IngestModuleAbstract.saveAdvancedConfiguration() -methods. This panel can be accessed from the "Advanced" button when the user chooses which ingest modules to enable. -It is recommended that the advanced panel be the same panel that is used in the Options area (see \ref mod_dev_adv_options). - -Refer to \ref mod_dev_adv_properties for details on saving properties from these panels. - - - - -*/ +/*! \page mod_ingest_page Developing Ingest Modules + + +\section ingestmodule_modules Ingest Module Basics + +This section tells you how to make an Ingest Module. Ingest modules +analyze data from a data source (a disk image or set of logical +files). They typically focus on a specific type of data analysis. +The modules are loaded each time that Autopsy starts. The user can +choose to enable each module when they add an image to the case. +It assumes you have already setup your development environment as +described in \ref mod_dev_page. + +First, you need to choose the type of Ingest Module. + +- Data Source-level modules are passed in a reference to a top-level data source, such as an Image or folder of logical files. +These modules may query the database for a small set of specific files. For example, a Windows registry module that runs on the hive files. It is interested in only a small subset of the hard drive files. + +- File-level modules are passed in a reference to each file. +The Ingest Manager chooses which files to pass and when. +These modules are intended to analyze most of the files on the system +For example, a hash calculation module that reads in the content of every file. + + + +Refer to org.sleuthkit.autopsy.ingest.example for sample source code of dummy modules. + +\section ingest_common Commonalities + +There are several things about these module types that are common and we'll outline those here. For both modules, you will extend an interface and implement some methods. + +Refer to the documentation for each method for its use. +- org.sleuthkit.autopsy.ingest.IngestModuleAbstract.init() is invoked when an ingest session starts. +- org.sleuthkit.autopsy.ingest.IngestModuleAbstract.complete() is invoked when an ingest session completes. +- org.sleuthkit.autopsy.ingest.IngestModuleAbstract.stop() is invoked on a module when an ingest session is interrupted by the user or system. +- org.sleuthkit.autopsy.ingest.IngestModuleAbstract.getName() returns the name of the module. +- org.sleuthkit.autopsy.ingest.IngestModuleAbstract.getDescription() returns a short description of the module. +- org.sleuthkit.autopsy.ingest.IngestModuleAbstract.getVersion() returns the version of the module. + + +The process() method is invoked to analyze the data. This is where +the analysis is done. The specific method depends on the module +type; it is passed either a data source or a file to process. We'll +cover this in later sections. This method will post results to the +blackboard and with inbox messages to the user. + + +\section ingest_datasrc Data Source-level Modules + +To make a data source-level module, make a new Java class either manually or using the NetBeans wizards. Edit the class to extend "org.sleuthkit.autopsy.ingest.IngestModuleDataSource". NetBeans will likely complain that you have not implemented the necessary methods and you can use its "hints" to automatically generate stubs for them. Use the documentation for the org.sleuthkit.autopsy.ingest.IngestModuleDataSource class for details on what each needs to do. +You can also refer to org.sleuthkit.autopsy.examples.SampleDataSourceIngestModule as an example module. + + +Data source-level ingest modules must find the files that they want to analyze. The best way to do that is using one of the findFiles() methods in org.sleuthkit.autopsy.casemodule.services.FileManager. See \ref mod_dev_other_services for more details. + +Example snippet of an ingest-level module process() method: + +\code +@Override +public void process(Content dataSource, IngestDataSourceWorkerController controller) { + + //we have some number workunits / sub-tasks to execute + //in this case, we know the number of total tasks in advance + final int totalTasks = 12; + + //initialize the overall image ingest progress + controller.switchToDeterminate(); + controller.progress(totalTasks); + + for(int subTask = 0; subTask < totalTasks; ++subTask) { + //add cancellation support + if (controller.isCancelled() ) { + break; // break out early to let the thread terminate + } + + //do the work + try { + //sub-task may add blackboard artifacts and create an inbox message + performSubTask(i); + } catch (Exception ex) { + logger.log(Level.WARNING, "Exception occurred in subtask " + subTask, ex); + } + + //update progress + controller.progress(i+1); + } +} +\endcode + + +\section ingest_file File-level Modules + +To make a File-level module, make a new Java class either manually or using the NetBeans wizards. Edit the class to extend "org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile". NetBeans will likely complain that you have not implemented the necessary methods and you can use its "hints" to automatically generate stubs for them. Use the method documentation in the org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile class to fill in the details. +You can also refer to org.sleuthkit.autopsy.examples.SampleFileIngestModule as an example module. + +Unlike Data Source-level modules, file-level modules are singletons. Only a single instance is created for all files. +The same file-level module instance will be used for files in different images and even different cases if new cases are opened. + +Every file-level module should support multiple init() -> process() -> complete(), and init() -> process() -> stop() invocations. It should also support init() -> complete() sequences. A new case could be open for each call of init(). + +Currently (and this is likely to change in the future), File-level ingest modules are Singletons (meaning that only a single instance is created for the runtime of Autopsy). +You will need to implement a public static getDefault() method that returns a static instance of the module. Note that if you skip this step, you will not see an error until Autopsy tries to load your module and the log will say that it does not have a getDefault method. + +The implementation of this method is very standard, example: + +\code +public static synchronized MyIngestModule getDefault() { + + //defaultInstance is a private static class variable + if (defaultInstance == null) { + defaultInstance = new MyIngestModule(); + } + return defaultInstance; +} +\endcode + + +You should also make the constructor private to ensure the singleton status. + +As a result of the singleton design, init() will be called multiple times and even for different cases. Ensure that you update local member variables accordingly each time init() is called. Again, this design will likely change, but it is what it is for now. + + +\section ingestmodule_registration Module Registration + +Modules are automatically discovered if they implement the proper interface. +Currently, a restart of Autopsy is required after a module is installed before it is discovered. + +By default, modules that do not come with a standard Autopsy installation will run after the standard modules. No order +is implied. This design will likely change in the future, but currently manual configuration is needed to enforce order. + + +There is an XML pipeline configuration that contains the standard modules and specifies the order that they are run in. +If you need to specify the order of modules, then they needed to be manually addded to this file in the correct order. +This file is the same format as The Sleuth Kit Framework configuration file. +Refer to http://sleuthkit.org/sleuthkit/docs/framework-docs/pipeline_config_page.html which is an official documentation +for the pipeline configuration schema. + +Autopsy will provide tools for reconfiguring the ingest pipeline in the near future, +and user/developer will be able to reload current view of discovered modules, +reorder modules in the pipeline and set their arguments using GUI. + + +\section ingestmodule_services Ingest Services + +Class org.sleuthkit.autopsy.ingest.IngestServices provides services specifically for the ingest modules +and a module developer should use these utilities to send messages, get current case, etc. Refer to its documentation for method details. + +Remember, update references to IngestServices and Cases with each call to init() inside of the module. + +Module developers are encouraged to use Autopsy's org.sleuthkit.autopsy.coreutils.Logger +infrastructure to log errors to the Autopsy log. +The logger can also be accessed using the org.sleuthkit.autopsy.ingest.IngestServices class. + +Certain modules may need need a persistant store (other than for storing results) for storing and reading +module configurations or state. +The ModuleSettings API can be used also via org.sleuthkit.autopsy.ingest.IngestServices class. + + +\section ingestmodule_making_results Making Results Available to User + +Ingest modules run in the background. There are three ways to send messages and save results so that the user can see them: +- Blackboard for long-term storage of analysis results and to display in the results tree. +- Ingest Inbox to notify user of high-value analysis results that were also posted to blackboard. +- Error messages. + +\subsection ingestmodule_making_results_bb Posting Results to Blackboard +The blackboard is used to store results so that they are displayed in the results tree. See \ref platform_blackboard for details on posting results to it. + +The blackboard defines artifacts for specific data types (such as web bookmarks). You can use one of the standard artifact types, create your own, or simply post text with a org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_TOOL_OUTPUT. The later is much easier (for example, you can simply copy in the output from an existing tool), but it forces the user to parse the output themselves. + +When modules add data to the blackboard, +they should notify listeners of the new data by +invoking IngestServices.fireModuleDataEvent() method. +Do so as soon as you have added an artifact to the blackboard. +This allows other modules (and the main UI) to know when to query the blackboard for the latest data. +However, if you are writing a larger number of blackboard artifacts in a loop, it is better to invoke +IngestServices.fireModuleDataEvent() only once after the bulk write, not to flood the system with events. + + +\subsection ingestmodule_making_results_inbox Posting Results to Message Inbox + +Modules should post messages to the inbox when interesting data is found +that has also been posted to the blackboard. The idea behind these +messages are that they are presented in chronological order so that +users can see what was found while they were focusing on something else. +Error messages are also sent here as is summary information after the module has run to give the user some feedback. + + +These messages should only be sent if the result has a low false positive rate and will likely be relevant. +For example, the hash lookup module will send messages if known bad (notable) files are found, +but not if known good (NSRL) files are found. You can provide options to the users on when to make messages. + + +A single message includes the module name, message subject, message details, +a unique message id (in the context of the originating module), and a uniqueness attribute. +The uniqueness attribute is used to group similar messages together +and to determine the overall importance priority of the message +(if the same message is seen repeatedly, it is considered lower priority). + +For example, for a keyword search module, the uniqueness attribute would the keyword that was hit. + +Messages are created using the org.sleuthkit.autopsy.ingest.IngestMessage class and posted to the inbox using org.sleuthkit.autopsy.ingest.IngestServices.postMessage() method. + + +\subsection ingestmodule_making_results_error Reporting Errors + +When an error occurs, you should send a message to the ingest inbox with an error level. The downside of this though is that the ingest inbox was not entirely designed for this goal and it is easy for the user to miss these messages. Therefore, we identify these messages in the IngestInbox and also post a pop-up message that comes up in the lower right. + +You can make your own message in the lower right by using +org.sleuthkit.autopsy.coreutils.MessageNotifyUtil.Notify.show() + + + +\section ingestmodule_making_configuration Module Configuration + +Ingest modules may require user configuration. In \ref mod_dev_adv_options, you wll learn about Autopsy-wide settings. There are some +settings that are specific to ingest modules as well. + +The framework +supports two levels of configuration: simple and advanced. Simple settings enable the user to enable and disable basic things at run-time (using check boxes and such). +Advanced settings require more in-depth configuration with more powerful interface. + +As an example, the advanced configuration for the keyword search module allows you to add and create keyword lists, choose encodings, etc. The simple interface allows +you to enable and disable lists. + +Module configuration is module-specific: every module maintains its own configuration state and is responsible for implementing the graphical interface. +If a module needs simple or advanced configuration, it needs to implement methods in its interface. +The org.sleuthkit.autopsy.ingest.IngestModuleAbstract.hasSimpleConfiguration(), +org.sleuthkit.autopsy.ingest.IngestModuleAbstract.getSimpleConfiguration(), and org.sleuthkit.autopsy.ingest.IngestModuleAbstract.saveSimpleConfiguration() +methods should be used for simple configuration. This panel will be shown when the user chooses which ingest modules to enable. + +The advanced configuration is implemented with the +org.sleuthkit.autopsy.ingest.IngestModuleAbstract.hasAdvancedConfiguration(), +org.sleuthkit.autopsy.ingest.IngestModuleAbstract.getAdvancedConfiguration(), and +org.sleuthkit.autopsy.ingest.IngestModuleAbstract.saveAdvancedConfiguration() +methods. This panel can be accessed from the "Advanced" button when the user chooses which ingest modules to enable. +It is recommended that the advanced panel be the same panel that is used in the Options area (see \ref mod_dev_adv_options). + +Refer to \ref mod_dev_adv_properties for details on saving properties from these panels. + + + + +*/ From 003d12570b3c32b468f36d37601abf7581c1870a Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Sat, 19 Oct 2013 13:41:30 -0400 Subject: [PATCH 071/179] Minor cleanup --- .../ThunderbirdEmailParser.java | 41 ++++ .../ThunderbirdMboxFileIngestModule.java | 194 ++++++++---------- .../ThunderbirdMboxParser.java | 3 + 3 files changed, 127 insertions(+), 111 deletions(-) diff --git a/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdEmailParser.java b/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdEmailParser.java index 11d2ca91b7..e130a5ba20 100644 --- a/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdEmailParser.java +++ b/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdEmailParser.java @@ -1,3 +1,21 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2011-2013 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package org.sleuthkit.autopsy.thunderbirdparser; import java.io.*; @@ -18,6 +36,10 @@ import org.apache.tika.sax.BodyContentHandler; import org.xml.sax.ContentHandler; import org.xml.sax.SAXException; +/** + * Parses an MBOX file. + * + */ public class ThunderbirdEmailParser { private InputStream stream; @@ -41,6 +63,10 @@ public class ThunderbirdEmailParser { this.tika = new Tika(); } + /** + * + * @param inStream String to MBX file + */ public ThunderbirdEmailParser(InputStream inStream) { this.tika = new Tika(); this.stream = inStream; @@ -61,11 +87,26 @@ public class ThunderbirdEmailParser { this.contentHandler = new BodyContentHandler(10*1024*1024); } + /** + * Parse data passed in via constructor + * @throws FileNotFoundException + * @throws IOException + * @throws SAXException + * @throws TikaException + */ public void parse() throws FileNotFoundException, IOException, SAXException, TikaException { init(); parser.parse(this.stream, this.contentHandler, this.metadata, context); } + /** + * Parse given MBX stream + * @param inStream stream of MBX file + * @throws FileNotFoundException + * @throws IOException + * @throws SAXException + * @throws TikaException + */ public void parse(InputStream inStream) throws FileNotFoundException, IOException, SAXException, TikaException { init(); parser.parseMbox(inStream, this.contentHandler, this.metadata, context); diff --git a/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxFileIngestModule.java b/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxFileIngestModule.java index 926ea686a0..e8dcc2690e 100644 --- a/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxFileIngestModule.java +++ b/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxFileIngestModule.java @@ -18,11 +18,8 @@ */ package org.sleuthkit.autopsy.thunderbirdparser; -import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; -import java.sql.ResultSet; -import java.sql.SQLException; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; @@ -34,7 +31,6 @@ import org.apache.tika.exception.TikaException; import org.apache.tika.metadata.Metadata; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.datamodel.ContentUtils; import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile; import org.sleuthkit.autopsy.ingest.IngestModuleInit; import org.sleuthkit.autopsy.ingest.IngestServices; @@ -44,9 +40,7 @@ import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; -import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.ReadContentInputStream; -import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; import org.sleuthkit.datamodel.TskException; @@ -61,8 +55,6 @@ public class ThunderbirdMboxFileIngestModule extends IngestModuleAbstractFile { private static final Logger logger = Logger.getLogger(ThunderbirdMboxFileIngestModule.class.getName()); private static ThunderbirdMboxFileIngestModule instance = null; private IngestServices services; - private static int messageId = 0; - private Case currentCase; private static final String MODULE_NAME = "Thunderbird Parser"; private final String hashDBModuleName = "Hash Lookup"; final public static String MODULE_VERSION = "1.0"; @@ -76,21 +68,31 @@ public class ThunderbirdMboxFileIngestModule extends IngestModuleAbstractFile { @Override public ProcessResult process(PipelineContextingestContext, AbstractFile abstractFile) { - if (abstractFile.getKnown().equals( - TskData.FileKnown.KNOWN)) { - return ProcessResult.OK; //file is known, stop processing it - } + + // skip known + if (abstractFile.getKnown().equals(TskData.FileKnown.KNOWN)) { + return ProcessResult.OK; + } + + //skip unalloc + if(abstractFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)) { + return ProcessResult.OK; + } + //file has read error, stop processing it + // @@@ I don't really like this + // we don't know if Hash was run or if it had lookup errors IngestModuleAbstractFile.ProcessResult hashDBResult = services.getAbstractFileModuleResult(hashDBModuleName); if (hashDBResult == IngestModuleAbstractFile.ProcessResult.ERROR) { - return ProcessResult.ERROR; //file has read error, stop processing it + return ProcessResult.ERROR; } if (abstractFile.isVirtual()) { return ProcessResult.OK; } + // check its signature boolean isMbox = false; try { byte[] t = new byte[64]; @@ -110,102 +112,82 @@ public class ThunderbirdMboxFileIngestModule extends IngestModuleAbstractFile { logger.log(Level.INFO, "ThunderbirdMboxFileIngestModule: Parsing {0}", abstractFile.getName()); - String mboxName = abstractFile.getName(); - String msfName = mboxName + ".msf"; - //Long mboxId = fsContent.getId(); - String mboxPath = abstractFile.getParentPath(); - Long msfId = 0L; - currentCase = Case.getCurrentCase(); // get the most updated case - SleuthkitCase tskCase = currentCase.getSleuthkitCase(); + String mboxFileName = abstractFile.getName(); + String mboxParentDir = abstractFile.getParentPath(); - try { - ResultSet resultset = tskCase.runQuery("SELECT obj_id FROM tsk_files WHERE parent_path = '" + mboxPath + "' and name = '" + msfName + "'"); - if (!resultset.next()) { - logger.log(Level.WARNING, "Could not find msf file in mbox dir: " + mboxPath + " file: " + msfName); - tskCase.closeRunQuery(resultset); - return ProcessResult.OK; - } else { - msfId = resultset.getLong(1); - tskCase.closeRunQuery(resultset); - } - - } catch (SQLException ex) { - logger.log(Level.WARNING, "Could not find msf file in mbox dir: " + mboxPath + " file: " + msfName); - } - - try { - Content msfContent = tskCase.getContentById(msfId); - if (msfContent != null) { - ContentUtils.writeToFile(msfContent, new File(currentCase.getTempDirectory() + File.separator + msfName)); - } - } catch (IOException ex) { - logger.log(Level.WARNING, "Unable to obtain msf file for mbox parsing:" + msfName, ex); - } catch (TskCoreException ex) { - logger.log(Level.WARNING, "Unable to obtain msf file for mbox parsing:" + msfName, ex); - } - int index = 0; - String replace = ""; - boolean a = mboxPath.indexOf("/ImapMail/") > 0; - boolean b = mboxPath.indexOf("/Mail/") > 0; - if (b == true) { - index = mboxPath.indexOf("/Mail/"); - replace = "/Mail"; - } else if (a == true) { - index = mboxPath.indexOf("/ImapMail/"); - replace = "/ImapMail"; - } else { - replace = ""; - - } - - String folderPath = mboxPath.substring(index); - folderPath = folderPath.replaceAll(replace, ""); - folderPath = folderPath + mboxName; - folderPath = folderPath.replaceAll(".sbd", ""); -// Reader reader = null; -// try { -// reader = new FileReader(currentCase.getTempDirectory() + File.separator + msfName); -// } catch (FileNotFoundException ex) { -// Logger.getLogger(ThunderbirdMboxFileIngestModule.class.getName()).log(Level.WARNING, null, ex); + // Find the .msf file in the same folder + // BC: Commented out because results are not being used Oct '13 + //Long msfId = 0L; + //String msfName = mboxFileName + ".msf"; + //SleuthkitCase tskCase = currentCase.getSleuthkitCase(); + // @@@ We shouldn't bail out here if we dont' find it... +// try { +// // @@@ Replace this with a call to FileManager.findFiles() +// ResultSet resultset = tskCase.runQuery("SELECT obj_id FROM tsk_files WHERE parent_path = '" + mboxParentDir + "' and name = '" + msfName + "'"); +// if (!resultset.next()) { +// logger.log(Level.WARNING, "Could not find msf file in mbox dir: " + mboxParentDir + " file: " + msfName); +// tskCase.closeRunQuery(resultset); +// return ProcessResult.OK; +// } else { +// msfId = resultset.getLong(1); +// tskCase.closeRunQuery(resultset); // } -// MorkDocument morkDocument = new MorkDocument(reader); -// List dicts = morkDocument.getDicts(); -// for(Dict dict : dicts){ -// String path = dict.getValue("81").toString(); -// String account = dict.getValue("8D").toString(); -// } - String emailId = ""; - String content = ""; - String from = ""; - String to = ""; - String stringDate = ""; - Long date = 0L; - String subject = ""; - String cc = ""; - String bcc = ""; - ThunderbirdEmailParser mbox = new ThunderbirdEmailParser(); +// +// } catch (SQLException ex) { +// logger.log(Level.WARNING, "Could not find msf file in mbox dir: " + mboxParentDir + " file: " + msfName); +// } +// +// try { +// Content msfContent = tskCase.getContentById(msfId); +// if (msfContent != null) { +// ContentUtils.writeToFile(msfContent, new File(currentCase.getTempDirectory() + File.separator + msfName)); +// } +// } catch (IOException ex) { +// logger.log(Level.WARNING, "Unable to obtain msf file for mbox parsing:" + msfName, ex); +// } catch (TskCoreException ex) { +// logger.log(Level.WARNING, "Unable to obtain msf file for mbox parsing:" + msfName, ex); +// } + + + // use the local path to determine the e-mail folder structure + String emailFolder = ""; + // email folder is everything after "Mail" or ImapMail + if (mboxParentDir.contains("/Mail/")) { + emailFolder = mboxParentDir.substring(mboxParentDir.indexOf("/Mail/") + 5); + } + else if (mboxParentDir.contains("/ImapMail/")) { + emailFolder = mboxParentDir.substring(mboxParentDir.indexOf("/ImapMail/") + 9); + } + emailFolder = emailFolder + mboxFileName; + emailFolder = emailFolder.replaceAll(".sbd", ""); + try { ReadContentInputStream contentStream = new ReadContentInputStream(abstractFile); + ThunderbirdEmailParser mbox = new ThunderbirdEmailParser(); mbox.parse(contentStream); - HashMap> emailMap = new HashMap>(); - emailMap = mbox.getAllEmails(); + + HashMap>emailMap = mbox.getAllEmails(); for (Entry> entry : emailMap.entrySet()) { - Map propertyMap = new HashMap(); - emailId = ((entry.getKey() != null) ? entry.getKey() : "Not Available"); - propertyMap = entry.getValue(); - content = ((propertyMap.get("content") != null) ? propertyMap.get("content") : ""); - from = ((propertyMap.get(Metadata.AUTHOR) != null) ? propertyMap.get(Metadata.AUTHOR) : ""); - to = ((propertyMap.get(Metadata.MESSAGE_TO) != null) ? propertyMap.get(Metadata.MESSAGE_TO) : ""); - stringDate = ((propertyMap.get("date") != null) ? propertyMap.get("date") : ""); - if (!"".equals(stringDate)) { + /* @@@ I'd rather this code be cleaned up a bit so that we check if the value is + * set and then directly add it to the attribute. otherwise, we end up with a bunch + * of "" attribute values. + */ + Collection bbattributes = new ArrayList<>(); + String emailId = ((entry.getKey() != null) ? entry.getKey() : "Not Available"); + MappropertyMap = entry.getValue(); + String content = ((propertyMap.get("content") != null) ? propertyMap.get("content") : ""); + String from = ((propertyMap.get(Metadata.AUTHOR) != null) ? propertyMap.get(Metadata.AUTHOR) : ""); + String to = ((propertyMap.get(Metadata.MESSAGE_TO) != null) ? propertyMap.get(Metadata.MESSAGE_TO) : ""); + String stringDate = ((propertyMap.get("date") != null) ? propertyMap.get("date") : ""); + Long date = 0L; + if (stringDate.equals("") == false) { date = mbox.getDateCreated(stringDate); } - subject = ((propertyMap.get(Metadata.SUBJECT) != null) ? propertyMap.get(Metadata.SUBJECT) : ""); - cc = ((propertyMap.get(Metadata.MESSAGE_CC) != null) ? propertyMap.get(Metadata.MESSAGE_CC) : ""); - bcc = ((propertyMap.get(Metadata.MESSAGE_BCC) != null) ? propertyMap.get(Metadata.MESSAGE_BCC) : ""); - - Collection bbattributes = new ArrayList(); + String subject = ((propertyMap.get(Metadata.SUBJECT) != null) ? propertyMap.get(Metadata.SUBJECT) : ""); + String cc = ((propertyMap.get(Metadata.MESSAGE_CC) != null) ? propertyMap.get(Metadata.MESSAGE_CC) : ""); + String bcc = ((propertyMap.get(Metadata.MESSAGE_BCC) != null) ? propertyMap.get(Metadata.MESSAGE_BCC) : ""); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_EMAIL_TO.getTypeID(), MODULE_NAME, to)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_EMAIL_CC.getTypeID(), MODULE_NAME, cc)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_EMAIL_BCC.getTypeID(), MODULE_NAME, bcc)); @@ -217,7 +199,7 @@ public class ThunderbirdMboxFileIngestModule extends IngestModuleAbstractFile { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_RCVD.getTypeID(), MODULE_NAME, date)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_SENT.getTypeID(), MODULE_NAME, date)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_SUBJECT.getTypeID(), MODULE_NAME, subject)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH.getTypeID(), MODULE_NAME, folderPath)); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH.getTypeID(), MODULE_NAME, emailFolder)); BlackboardArtifact bbart; try { bbart = abstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG); @@ -240,9 +222,6 @@ public class ThunderbirdMboxFileIngestModule extends IngestModuleAbstractFile { @Override public void complete() { - logger.log(Level.INFO, "complete()"); - - //module specific cleanup due completion here } @Override @@ -263,18 +242,11 @@ public class ThunderbirdMboxFileIngestModule extends IngestModuleAbstractFile { @Override public void init(IngestModuleInit initContext) { - logger.log(Level.INFO, "init()"); services = IngestServices.getDefault(); - - currentCase = Case.getCurrentCase(); - //module specific initialization here } @Override public void stop() { - logger.log(Level.INFO, "stop()"); - - //module specific cleanup due interruption here } @Override diff --git a/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxParser.java b/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxParser.java index 60b9f75ca6..ee61a6952d 100644 --- a/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxParser.java +++ b/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxParser.java @@ -37,6 +37,9 @@ import org.apache.tika.sax.BodyContentHandler; import org.xml.sax.ContentHandler; import org.xml.sax.SAXException; +/** + * Contains the logic to parse an MBOX file. + */ public class ThunderbirdMboxParser { /** Serial version UID */ From ef74ae342581ff01e04db25a5a1e6ef43607dec3 Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Sat, 19 Oct 2013 13:47:39 -0400 Subject: [PATCH 072/179] Thundebird reports errors better now --- .../ThunderbirdMboxFileIngestModule.java | 20 +++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxFileIngestModule.java b/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxFileIngestModule.java index e8dcc2690e..8bcfeaf5c8 100644 --- a/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxFileIngestModule.java +++ b/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxFileIngestModule.java @@ -162,6 +162,7 @@ public class ThunderbirdMboxFileIngestModule extends IngestModuleAbstractFile { emailFolder = emailFolder + mboxFileName; emailFolder = emailFolder.replaceAll(".sbd", ""); + boolean errorsFound = false; try { ReadContentInputStream contentStream = new ReadContentInputStream(abstractFile); ThunderbirdEmailParser mbox = new ThunderbirdEmailParser(); @@ -204,17 +205,28 @@ public class ThunderbirdMboxFileIngestModule extends IngestModuleAbstractFile { try { bbart = abstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG); bbart.addAttributes(bbattributes); + services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG)); } catch (TskCoreException ex) { Logger.getLogger(ThunderbirdMboxFileIngestModule.class.getName()).log(Level.WARNING, null, ex); + errorsFound = true; } - services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG)); } - } catch (FileNotFoundException ex) { + } + catch (FileNotFoundException ex) { Logger.getLogger(ThunderbirdMboxFileIngestModule.class.getName()).log(Level.WARNING, null, ex); - } catch (IOException ex) { + errorsFound = true; + } + catch (IOException ex) { Logger.getLogger(ThunderbirdMboxFileIngestModule.class.getName()).log(Level.WARNING, null, ex); - } catch (SAXException | TikaException ex) { + errorsFound = true; + } + catch (SAXException | TikaException ex) { Logger.getLogger(ThunderbirdMboxFileIngestModule.class.getName()).log(Level.WARNING, null, ex); + errorsFound = true; + } + if (errorsFound) { + // @@@ RECORD THEM... + return ProcessResult.ERROR; } return ProcessResult.OK; From 27fc247da5f55b9869be7d4039ccb62e9d700b2c Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Mon, 21 Oct 2013 10:28:35 -0400 Subject: [PATCH 073/179] Fixed issue with multiple instances of data result panels. --- .../corecomponents/MediaViewVideoPanel.java | 16 ++-------------- 1 file changed, 2 insertions(+), 14 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/MediaViewVideoPanel.java b/Core/src/org/sleuthkit/autopsy/corecomponents/MediaViewVideoPanel.java index 9275ef7c1d..343b7fa672 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/MediaViewVideoPanel.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/MediaViewVideoPanel.java @@ -39,12 +39,6 @@ public abstract class MediaViewVideoPanel extends JPanel implements FrameCapture // 32 bit architectures private static final String[] ARCH32 = new String[]{"x86"}; - // A Gstreamer implementation of MediaViewVideoPanel - private static GstVideoPanel gstVideoPanel = null; - - // A JavaFX implmentation of MediaViewVideoPanel - private static FXVideoPanel fxVideoPanel = null; - /** * Factory Method to create a MediaViewVideoPanel. * @@ -78,10 +72,7 @@ public abstract class MediaViewVideoPanel extends JPanel implements FrameCapture * @return a GstVideoPanel */ private static MediaViewVideoPanel getGstImpl() { - if (gstVideoPanel == null) { - gstVideoPanel = new GstVideoPanel(); - } - return gstVideoPanel; + return new GstVideoPanel(); } /** @@ -90,10 +81,7 @@ public abstract class MediaViewVideoPanel extends JPanel implements FrameCapture * @return a FXVideoPanel */ private static MediaViewVideoPanel getFXImpl() { - if (fxVideoPanel == null) { - fxVideoPanel = new FXVideoPanel(); - } - return fxVideoPanel; + return new FXVideoPanel(); } /** From 7ad39628115134d27c12f027fdf64b2aaa954e68 Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Mon, 21 Oct 2013 11:27:34 -0400 Subject: [PATCH 074/179] Added error reporting for failure to parse recent activity lnk files. --- .../autopsy/coreutils/JLnkParser.java | 301 +++++++++--------- .../coreutils/JLnkParserException.java | 36 +++ .../autopsy/recentactivity/ExtractIE.java | 12 +- 3 files changed, 198 insertions(+), 151 deletions(-) create mode 100755 Core/src/org/sleuthkit/autopsy/coreutils/JLnkParserException.java diff --git a/Core/src/org/sleuthkit/autopsy/coreutils/JLnkParser.java b/Core/src/org/sleuthkit/autopsy/coreutils/JLnkParser.java index e004da65ab..9c7177e780 100644 --- a/Core/src/org/sleuthkit/autopsy/coreutils/JLnkParser.java +++ b/Core/src/org/sleuthkit/autopsy/coreutils/JLnkParser.java @@ -52,165 +52,168 @@ public class JLnkParser { } } - public JLNK parse() { - ByteBuffer bb = ByteBuffer.wrap(content); - bb.order(ByteOrder.LITTLE_ENDIAN); - int header = bb.getInt(); - ByteBuffer linkClassIdentifier = bb.get(new byte[16]); - int linkFlags = bb.getInt(); - int fileAttributes = bb.getInt(); - long crtime = bb.getLong(); - long atime = bb.getLong(); - long mtime = bb.getLong(); - int fileSize = bb.getInt(); - int iconIndex = bb.getInt(); - int showCommand = bb.getInt(); - short hotkey = bb.getShort(); - bb.get(new byte[10]); // reserved (???) - List linkTargetIdList = new ArrayList(); - if((linkFlags & LnkEnums.LinkFlags.HasLinkTargetIDList.getFlag()) == - LnkEnums.LinkFlags.HasLinkTargetIDList.getFlag()) { - int idListSize = bb.getShort(); - int bytesRead = 0; - List linkTargetIdListBytes = new ArrayList(); - while(true) { - short itemIdSize = bb.getShort(); - if(itemIdSize == 0) { - bytesRead += 2; // two null bytes to terminate id list - break; + public JLNK parse() throws JLnkParserException { + try { + ByteBuffer bb = ByteBuffer.wrap(content); + bb.order(ByteOrder.LITTLE_ENDIAN); + int header = bb.getInt(); + ByteBuffer linkClassIdentifier = bb.get(new byte[16]); + int linkFlags = bb.getInt(); + int fileAttributes = bb.getInt(); + long crtime = bb.getLong(); + long atime = bb.getLong(); + long mtime = bb.getLong(); + int fileSize = bb.getInt(); + int iconIndex = bb.getInt(); + int showCommand = bb.getInt(); + short hotkey = bb.getShort(); + bb.get(new byte[10]); // reserved (???) + List linkTargetIdList = new ArrayList(); + if((linkFlags & LnkEnums.LinkFlags.HasLinkTargetIDList.getFlag()) == + LnkEnums.LinkFlags.HasLinkTargetIDList.getFlag()) { + int idListSize = bb.getShort(); + int bytesRead = 0; + List linkTargetIdListBytes = new ArrayList(); + while(true) { + short itemIdSize = bb.getShort(); + if(itemIdSize == 0) { + bytesRead += 2; // two null bytes to terminate id list + break; + } + byte[] theArray = new byte[itemIdSize-2]; + bb.get(theArray); // an idlist data object + linkTargetIdListBytes.add(theArray); + bytesRead = bytesRead + itemIdSize; } - byte[] theArray = new byte[itemIdSize-2]; - bb.get(theArray); // an idlist data object - linkTargetIdListBytes.add(theArray); - bytesRead = bytesRead + itemIdSize; + linkTargetIdList = parseLinkTargetIdList(linkTargetIdListBytes); } - linkTargetIdList = parseLinkTargetIdList(linkTargetIdListBytes); - } - boolean hasUnicodeLocalBaseAndCommonSuffixOffset = false; - String localBasePath = null; - String commonPathSuffix = null; - String localBasePathUnicode = null; - String commonPathSuffixUnicode = null; - int driveSerialNumber = -1; - DriveType driveType = null; - String volumeLabel = null; - int commonNetworkRelativeLinkFlags = -1; - NetworkProviderType networkProviderType = null; - boolean unicodeNetAndDeviceName = false; - String netName = null; - String netNameUnicode = null; - String deviceName = null; - String deviceNameUnicode = null; - - if((linkFlags & LnkEnums.LinkFlags.HasLinkInfo.getFlag()) == - LnkEnums.LinkFlags.HasLinkInfo.getFlag()) { - int startOfLinkInfo = bb.position(); - int linkInfoSize = bb.getInt(); - int linkInfoHeaderSize = bb.getInt(); - hasUnicodeLocalBaseAndCommonSuffixOffset = linkInfoHeaderSize >= 0x24; - int linkInfoFlags = bb.getInt(); - int volumeIdOffset = bb.getInt(); - int localBasePathOffset = bb.getInt(); - int commonNetworkRelativeLinkOffset = bb.getInt(); - int commonPathSuffixOffset = bb.getInt(); - int localBasePathOffsetUnicode = 0; - int commonPathSuffixOffsetUnicode = 0; - if (hasUnicodeLocalBaseAndCommonSuffixOffset) { - localBasePathOffsetUnicode = bb.getInt(); - commonPathSuffixOffsetUnicode = bb.getInt(); - } - if ((linkInfoFlags & LnkEnums.LinkInfoFlags.VolumeIDAndLocalBasePath.getFlag()) - == LnkEnums.LinkInfoFlags.VolumeIDAndLocalBasePath.getFlag()) { - bb.position(startOfLinkInfo+volumeIdOffset); - int volumeIdSize = bb.getInt(); - driveType = DriveType.valueOf(bb.getInt()); - driveSerialNumber = bb.getInt(); - int volumeLabelOffset = bb.getInt(); - if (volumeLabelOffset != 0x14) { - volumeLabel = parseString(startOfLinkInfo + volumeIdOffset + volumeLabelOffset, false, volumeIdSize - 0x10); - } else { - int volumeLabelOffsetUnicode = bb.getInt(); - volumeLabel = parseString(startOfLinkInfo + volumeIdOffset + volumeLabelOffsetUnicode, false, volumeIdSize - 0x14); + boolean hasUnicodeLocalBaseAndCommonSuffixOffset = false; + String localBasePath = null; + String commonPathSuffix = null; + String localBasePathUnicode = null; + String commonPathSuffixUnicode = null; + int driveSerialNumber = -1; + DriveType driveType = null; + String volumeLabel = null; + int commonNetworkRelativeLinkFlags = -1; + NetworkProviderType networkProviderType = null; + boolean unicodeNetAndDeviceName = false; + String netName = null; + String netNameUnicode = null; + String deviceName = null; + String deviceNameUnicode = null; + + if((linkFlags & LnkEnums.LinkFlags.HasLinkInfo.getFlag()) == + LnkEnums.LinkFlags.HasLinkInfo.getFlag()) { + int startOfLinkInfo = bb.position(); + int linkInfoSize = bb.getInt(); + int linkInfoHeaderSize = bb.getInt(); + hasUnicodeLocalBaseAndCommonSuffixOffset = linkInfoHeaderSize >= 0x24; + int linkInfoFlags = bb.getInt(); + int volumeIdOffset = bb.getInt(); + int localBasePathOffset = bb.getInt(); + int commonNetworkRelativeLinkOffset = bb.getInt(); + int commonPathSuffixOffset = bb.getInt(); + int localBasePathOffsetUnicode = 0; + int commonPathSuffixOffsetUnicode = 0; + if (hasUnicodeLocalBaseAndCommonSuffixOffset) { + localBasePathOffsetUnicode = bb.getInt(); + commonPathSuffixOffsetUnicode = bb.getInt(); } - localBasePath = parseLocalBasePath(startOfLinkInfo + localBasePathOffset, false); - } - if ((linkInfoFlags & LnkEnums.LinkInfoFlags.CommonNetworkRelativeLinkAndPathSuffix.getFlag()) - == LnkEnums.LinkInfoFlags.CommonNetworkRelativeLinkAndPathSuffix.getFlag()) { - bb.position(startOfLinkInfo+commonNetworkRelativeLinkOffset); - int commonNetworkRelativeLinkSize = bb.getInt(); - commonNetworkRelativeLinkFlags = bb.getInt(); - int netNameOffset = bb.getInt(); - unicodeNetAndDeviceName = netNameOffset > 0x14; - int deviceNameOffset = bb.getInt(); - int netType = bb.getInt(); - int netNameOffsetUnicode = 0; - int deviceNameOffsetUnicode = 0; - if (unicodeNetAndDeviceName) { - netNameOffsetUnicode = bb.getInt(); - deviceNameOffsetUnicode = bb.getInt(); + if ((linkInfoFlags & LnkEnums.LinkInfoFlags.VolumeIDAndLocalBasePath.getFlag()) + == LnkEnums.LinkInfoFlags.VolumeIDAndLocalBasePath.getFlag()) { + bb.position(startOfLinkInfo+volumeIdOffset); + int volumeIdSize = bb.getInt(); + driveType = DriveType.valueOf(bb.getInt()); + driveSerialNumber = bb.getInt(); + int volumeLabelOffset = bb.getInt(); + if (volumeLabelOffset != 0x14) { + volumeLabel = parseString(startOfLinkInfo + volumeIdOffset + volumeLabelOffset, false, volumeIdSize - 0x10); + } else { + int volumeLabelOffsetUnicode = bb.getInt(); + volumeLabel = parseString(startOfLinkInfo + volumeIdOffset + volumeLabelOffsetUnicode, false, volumeIdSize - 0x14); + } + localBasePath = parseLocalBasePath(startOfLinkInfo + localBasePathOffset, false); } - netName = parseNetName(startOfLinkInfo + commonNetworkRelativeLinkOffset + netNameOffset, false); - if (unicodeNetAndDeviceName) { - netNameUnicode = parseNetName(startOfLinkInfo + commonNetworkRelativeLinkOffset + netNameOffsetUnicode, true); - } - if ((commonNetworkRelativeLinkFlags & LnkEnums.CommonNetworkRelativeLinkFlags.ValidNetType.getFlag()) - == LnkEnums.CommonNetworkRelativeLinkFlags.ValidNetType.getFlag()) { - networkProviderType = LnkEnums.NetworkProviderType.valueOf(netType); - } - if ((commonNetworkRelativeLinkFlags & LnkEnums.CommonNetworkRelativeLinkFlags.ValidDevice.getFlag()) - == LnkEnums.CommonNetworkRelativeLinkFlags.ValidDevice.getFlag()) { - deviceName = parseDeviceName(startOfLinkInfo + commonNetworkRelativeLinkOffset + deviceNameOffset, false); + if ((linkInfoFlags & LnkEnums.LinkInfoFlags.CommonNetworkRelativeLinkAndPathSuffix.getFlag()) + == LnkEnums.LinkInfoFlags.CommonNetworkRelativeLinkAndPathSuffix.getFlag()) { + bb.position(startOfLinkInfo+commonNetworkRelativeLinkOffset); + int commonNetworkRelativeLinkSize = bb.getInt(); + commonNetworkRelativeLinkFlags = bb.getInt(); + int netNameOffset = bb.getInt(); + unicodeNetAndDeviceName = netNameOffset > 0x14; + int deviceNameOffset = bb.getInt(); + int netType = bb.getInt(); + int netNameOffsetUnicode = 0; + int deviceNameOffsetUnicode = 0; if (unicodeNetAndDeviceName) { - deviceNameUnicode = parseDeviceName(startOfLinkInfo + commonNetworkRelativeLinkOffset + deviceNameOffsetUnicode, true); + netNameOffsetUnicode = bb.getInt(); + deviceNameOffsetUnicode = bb.getInt(); + } + netName = parseNetName(startOfLinkInfo + commonNetworkRelativeLinkOffset + netNameOffset, false); + if (unicodeNetAndDeviceName) { + netNameUnicode = parseNetName(startOfLinkInfo + commonNetworkRelativeLinkOffset + netNameOffsetUnicode, true); + } + if ((commonNetworkRelativeLinkFlags & LnkEnums.CommonNetworkRelativeLinkFlags.ValidNetType.getFlag()) + == LnkEnums.CommonNetworkRelativeLinkFlags.ValidNetType.getFlag()) { + networkProviderType = LnkEnums.NetworkProviderType.valueOf(netType); + } + if ((commonNetworkRelativeLinkFlags & LnkEnums.CommonNetworkRelativeLinkFlags.ValidDevice.getFlag()) + == LnkEnums.CommonNetworkRelativeLinkFlags.ValidDevice.getFlag()) { + deviceName = parseDeviceName(startOfLinkInfo + commonNetworkRelativeLinkOffset + deviceNameOffset, false); + if (unicodeNetAndDeviceName) { + deviceNameUnicode = parseDeviceName(startOfLinkInfo + commonNetworkRelativeLinkOffset + deviceNameOffsetUnicode, true); + } } } + commonPathSuffix = parseCommonPathSuffix(startOfLinkInfo + commonPathSuffixOffset, false); + if (((linkInfoFlags & LnkEnums.LinkInfoFlags.VolumeIDAndLocalBasePath.getFlag()) + == LnkEnums.LinkInfoFlags.VolumeIDAndLocalBasePath.getFlag()) + && hasUnicodeLocalBaseAndCommonSuffixOffset) { + localBasePathUnicode = parseLocalBasePath(startOfLinkInfo + localBasePathOffsetUnicode, true); + commonPathSuffixUnicode = parseCommonPathSuffix(startOfLinkInfo + commonPathSuffixOffsetUnicode, true); + } + + bb.position(startOfLinkInfo+linkInfoSize); } - commonPathSuffix = parseCommonPathSuffix(startOfLinkInfo + commonPathSuffixOffset, false); - if (((linkInfoFlags & LnkEnums.LinkInfoFlags.VolumeIDAndLocalBasePath.getFlag()) - == LnkEnums.LinkInfoFlags.VolumeIDAndLocalBasePath.getFlag()) - && hasUnicodeLocalBaseAndCommonSuffixOffset) { - localBasePathUnicode = parseLocalBasePath(startOfLinkInfo + localBasePathOffsetUnicode, true); - commonPathSuffixUnicode = parseCommonPathSuffix(startOfLinkInfo + commonPathSuffixOffsetUnicode, true); + String name = null; + if((linkFlags & LnkEnums.LinkFlags.HasName.getFlag()) == + LnkEnums.LinkFlags.HasName.getFlag()) { + name = readStringData(bb); + } + String relativePath = null; + if((linkFlags & LnkEnums.LinkFlags.HasRelativePath.getFlag()) == + LnkEnums.LinkFlags.HasRelativePath.getFlag()) { + relativePath = readStringData(bb); + } + String workingDir = null; + if((linkFlags & LnkEnums.LinkFlags.HasWorkingDir.getFlag()) == + LnkEnums.LinkFlags.HasWorkingDir.getFlag()) { + workingDir = readStringData(bb); + } + String arguments = null; + if((linkFlags & LnkEnums.LinkFlags.HasArguments.getFlag()) == + LnkEnums.LinkFlags.HasArguments.getFlag()) { + arguments = readStringData(bb); + } + String iconLocation = null; + if((linkFlags & LnkEnums.LinkFlags.HasIconLocation.getFlag()) == + LnkEnums.LinkFlags.HasIconLocation.getFlag()) { + iconLocation = readStringData(bb); } - bb.position(startOfLinkInfo+linkInfoSize); + return new JLNK(header, linkClassIdentifier.array(), linkFlags, fileAttributes, + crtime, atime, mtime, fileSize, iconIndex, showCommand, hotkey, + linkTargetIdList, + hasUnicodeLocalBaseAndCommonSuffixOffset, localBasePath, + commonPathSuffix, localBasePathUnicode, commonPathSuffixUnicode, + name, relativePath, workingDir, arguments, iconLocation, driveSerialNumber, + driveType, volumeLabel, commonNetworkRelativeLinkFlags, + networkProviderType, unicodeNetAndDeviceName, netName, netNameUnicode, + deviceName, deviceNameUnicode); + } catch (Exception e) { + throw new JLnkParserException(e); } - String name = null; - if((linkFlags & LnkEnums.LinkFlags.HasName.getFlag()) == - LnkEnums.LinkFlags.HasName.getFlag()) { - name = readStringData(bb); - } - String relativePath = null; - if((linkFlags & LnkEnums.LinkFlags.HasRelativePath.getFlag()) == - LnkEnums.LinkFlags.HasRelativePath.getFlag()) { - relativePath = readStringData(bb); - } - String workingDir = null; - if((linkFlags & LnkEnums.LinkFlags.HasWorkingDir.getFlag()) == - LnkEnums.LinkFlags.HasWorkingDir.getFlag()) { - workingDir = readStringData(bb); - } - String arguments = null; - if((linkFlags & LnkEnums.LinkFlags.HasArguments.getFlag()) == - LnkEnums.LinkFlags.HasArguments.getFlag()) { - arguments = readStringData(bb); - } - String iconLocation = null; - if((linkFlags & LnkEnums.LinkFlags.HasIconLocation.getFlag()) == - LnkEnums.LinkFlags.HasIconLocation.getFlag()) { - iconLocation = readStringData(bb); - } - - return new JLNK(header, linkClassIdentifier.array(), linkFlags, fileAttributes, - crtime, atime, mtime, fileSize, iconIndex, showCommand, hotkey, - linkTargetIdList, - hasUnicodeLocalBaseAndCommonSuffixOffset, localBasePath, - commonPathSuffix, localBasePathUnicode, commonPathSuffixUnicode, - name, relativePath, workingDir, arguments, iconLocation, driveSerialNumber, - driveType, volumeLabel, commonNetworkRelativeLinkFlags, - networkProviderType, unicodeNetAndDeviceName, netName, netNameUnicode, - deviceName, deviceNameUnicode); - } private String readStringData(ByteBuffer bb) { @@ -337,3 +340,5 @@ public class JLnkParser { return new String(nameArr).split("\0")[0]; } } + + diff --git a/Core/src/org/sleuthkit/autopsy/coreutils/JLnkParserException.java b/Core/src/org/sleuthkit/autopsy/coreutils/JLnkParserException.java new file mode 100755 index 0000000000..4f9f5b646b --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/coreutils/JLnkParserException.java @@ -0,0 +1,36 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2012 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.coreutils; + +/** + * + * @author jwallace + */ +public class JLnkParserException extends Exception { + + /** + * Constructs an instance of + * JLnkParserException caused by the given exception. + * + * @param msg the detail message. + */ + public JLnkParserException(Exception cause) { + super(cause); + } +} diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java index 4215d05b18..12df6e693d 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java @@ -48,6 +48,7 @@ import org.openide.modules.InstalledFileLocator; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.JLNK; import org.sleuthkit.autopsy.coreutils.JLnkParser; +import org.sleuthkit.autopsy.coreutils.JLnkParserException; import org.sleuthkit.autopsy.datamodel.ContentUtils; import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController; import org.sleuthkit.autopsy.ingest.IngestServices; @@ -239,10 +240,14 @@ public class ExtractIE extends Extract { JLnkParser lnkParser = new JLnkParser(new ReadContentInputStream(fav), (int) fav.getSize()); try { lnk = lnkParser.parse(); - } catch (Exception e) { + } catch (JLnkParserException e) { //TODO should throw a specific checked exception - logger.log(Level.SEVERE, "Error lnk parsing the file to get recent files" + recentFile, e); - this.addErrorMessage(this.getName() + ": Error parsing Recent File " + recentFile.getName()); + boolean unalloc = recentFile.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.UNALLOC) + || recentFile.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC); + if (unalloc == false) { + logger.log(Level.SEVERE, "Error lnk parsing the file to get recent files" + recentFile, e); + this.addErrorMessage(this.getName() + ": Error parsing Recent File " + recentFile.getName()); + } continue; } String path = lnk.getBestPath(); @@ -332,6 +337,7 @@ public class ExtractIE extends Extract { //Delete index.dat file since it was succcessfully by Pasco datFile.delete(); } else { + logger.log(Level.WARNING, "pasco execution failed on: " + this.getName()); this.addErrorMessage(this.getName() + ": Error processing Internet Explorer history."); } } From b2d7c471bfbfb83395ba0c4aa12cea8379931b82 Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Mon, 21 Oct 2013 11:50:04 -0400 Subject: [PATCH 075/179] Renamed parseReg method to be more specific parseAutopsyPluginOutput. --- .../sleuthkit/autopsy/recentactivity/ExtractRegistry.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java index 0b7638ee23..f047928918 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java @@ -175,7 +175,7 @@ public class ExtractRegistry extends Extract { RegOutputFiles regOutputFiles = executeRegRip(regFileNameLocal, outputPathBase); if (regOutputFiles.autopsyPlugins.isEmpty() == false) { - if (parseReg(regOutputFiles.autopsyPlugins, regFile.getId(), extrctr) == false) { + if (parseAutopsyPluginOutput(regOutputFiles.autopsyPlugins, regFile.getId(), extrctr) == false) { this.addErrorMessage(this.getName() + ": Failed parsing registry file results " + regFileName); continue; } @@ -217,8 +217,8 @@ public class ExtractRegistry extends Extract { // TODO - add error message here? java.util.logging.Logger.getLogger(ExtractRegistry.class.getName()).log(Level.SEVERE, null, ex); } - regFileNameLocalFile.delete(); } + regFileNameLocalFile.delete(); } try { @@ -328,7 +328,7 @@ public class ExtractRegistry extends Extract { // @@@ VERIFY that we are doing the right thing when we parse multiple NTUSER.DAT - private boolean parseReg(String regRecord, long orgId, ExtractUSB extrctr) { + private boolean parseAutopsyPluginOutput(String regRecord, long orgId, ExtractUSB extrctr) { FileInputStream fstream = null; try { Case currentCase = Case.getCurrentCase(); // get the most updated case From 753fb9a30056aeecd74ee6032a917fb33d5fb0b6 Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Mon, 21 Oct 2013 15:40:08 -0400 Subject: [PATCH 076/179] Added date and time to firefox bookmark data. --- .../src/org/sleuthkit/autopsy/recentactivity/Firefox.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java index 4322a2b926..16d8916343 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java @@ -56,7 +56,7 @@ public class Firefox extends Extract { private static final String ffquery = "SELECT moz_historyvisits.id,url,title,visit_count,(visit_date/1000000) as visit_date,from_visit,(SELECT url FROM moz_places WHERE id=moz_historyvisits.from_visit) as ref FROM moz_places, moz_historyvisits WHERE moz_places.id = moz_historyvisits.place_id AND hidden = 0"; private static final String ffcookiequery = "SELECT name,value,host,expiry,(lastAccessed/1000000) as lastAccessed,(creationTime/1000000) as creationTime FROM moz_cookies"; private static final String ff3cookiequery = "SELECT name,value,host,expiry,(lastAccessed/1000000) as lastAccessed FROM moz_cookies"; - private static final String ffbookmarkquery = "SELECT fk, moz_bookmarks.title, url FROM moz_bookmarks INNER JOIN moz_places ON moz_bookmarks.fk=moz_places.id"; + private static final String ffbookmarkquery = "SELECT fk, moz_bookmarks.title, url, (moz_bookmarks.dateAdded/1000000) as dateAdded FROM moz_bookmarks INNER JOIN moz_places ON moz_bookmarks.fk=moz_places.id"; private static final String ffdownloadquery = "select target, source,(startTime/1000000) as startTime, maxBytes from moz_downloads"; public int FireFoxCount = 0; final public static String MODULE_VERSION = "1.0"; @@ -178,6 +178,7 @@ public class Firefox extends Extract { Collection bbattributes = new ArrayList(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", ((result.get("url").toString() != null) ? result.get("url").toString() : ""))); //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "RecentActivity", ((result.get("url").toString() != null) ? EscapeUtil.decodeURL(result.get("url").toString()) : ""))); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "RecentActivity", (Long.valueOf(result.get("dateAdded").toString())))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "RecentActivity", ((result.get("title").toString() != null) ? result.get("title").toString() : ""))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "FireFox")); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", (Util.extractDomain((result.get("url").toString() != null) ? result.get("url").toString() : "")))); From dfd00f2c55a9802e9eab09ad40704c03f2c78488 Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Mon, 21 Oct 2013 15:45:53 -0400 Subject: [PATCH 077/179] Add Firefox Downloads artifact even if URL cannot be decoded. --- .../autopsy/recentactivity/Firefox.java | 25 +++++++++++-------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java index 16d8916343..fb0815f689 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java @@ -309,23 +309,28 @@ public class Firefox extends Extract { List> tempList = this.dbConnect(temps, ffdownloadquery); logger.log(Level.INFO, moduleName + "- Now getting downloads from " + temps + " with " + tempList.size() + "artifacts identified."); for (HashMap result : tempList) { + + Collection bbattributes = new ArrayList(); + + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", ((result.get("source").toString() != null) ? result.get("source").toString() : ""))); + //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "RecentActivity", ((result.get("source").toString() != null) ? EscapeUtil.decodeURL(result.get("source").toString()) : ""))); + //TODO Revisit usage of deprecated constructor as per TSK-583 + //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_LAST_ACCESSED.getTypeID(), "RecentActivity", "Last Visited", (Long.valueOf(result.get("startTime").toString())))); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "RecentActivity", (Long.valueOf(result.get("startTime").toString())))); + try { - Collection bbattributes = new ArrayList(); String urldecodedtarget = URLDecoder.decode(result.get("source").toString().replaceAll("file:///", ""), "UTF-8"); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", ((result.get("source").toString() != null) ? result.get("source").toString() : ""))); - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "RecentActivity", ((result.get("source").toString() != null) ? EscapeUtil.decodeURL(result.get("source").toString()) : ""))); - //TODO Revisit usage of deprecated constructor as per TSK-583 - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_LAST_ACCESSED.getTypeID(), "RecentActivity", "Last Visited", (Long.valueOf(result.get("startTime").toString())))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "RecentActivity", (Long.valueOf(result.get("startTime").toString())))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH_ID.getTypeID(), "RecentActivity", Util.findID(dataSource, urldecodedtarget))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH.getTypeID(), "RecentActivity", ((result.get("target").toString() != null) ? result.get("target").toString() : ""))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "FireFox")); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", (Util.extractDomain((result.get("source").toString() != null) ? result.get("source").toString() : "")))); - this.addArtifact(ARTIFACT_TYPE.TSK_WEB_DOWNLOAD, downloadsFile, bbattributes); } catch (UnsupportedEncodingException ex) { logger.log(Level.SEVERE, "Error decoding Firefox download URL in " + temps, ex); errors++; } + + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH.getTypeID(), "RecentActivity", ((result.get("target").toString() != null) ? result.get("target").toString() : ""))); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "FireFox")); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", (Util.extractDomain((result.get("source").toString() != null) ? result.get("source").toString() : "")))); + this.addArtifact(ARTIFACT_TYPE.TSK_WEB_DOWNLOAD, downloadsFile, bbattributes); + } if (errors > 0) { this.addErrorMessage(this.getName() + ": Error parsing " + errors + " Firefox web history artifacts."); From 2fbdb1c53c10324e3e03165bf7c85913aa95cf01 Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Mon, 21 Oct 2013 16:01:36 -0400 Subject: [PATCH 078/179] Resets image panel correctly when resetComponent is called. --- .../autopsy/corecomponents/DataContentViewerMedia.java | 1 + .../autopsy/corecomponents/MediaViewImagePanel.java | 9 +++++++++ 2 files changed, 10 insertions(+) diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataContentViewerMedia.java b/Core/src/org/sleuthkit/autopsy/corecomponents/DataContentViewerMedia.java index e9f470ed6e..3e1da04052 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataContentViewerMedia.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataContentViewerMedia.java @@ -185,6 +185,7 @@ public class DataContentViewerMedia extends javax.swing.JPanel implements DataCo @Override public void resetComponent() { videoPanel.reset(); + imagePanel.reset(); lastFile = null; } diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/MediaViewImagePanel.java b/Core/src/org/sleuthkit/autopsy/corecomponents/MediaViewImagePanel.java index bcaa1d280c..f621c1859f 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/MediaViewImagePanel.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/MediaViewImagePanel.java @@ -106,6 +106,15 @@ public class MediaViewImagePanel extends javax.swing.JPanel { } + public void reset() { + Platform.runLater(new Runnable() { + @Override + public void run() { + fxImageView.setImage(null); + } + }); + } + /** * Show image * From bee91a40ce5dfd392129886586b40e1d2097d9ae Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Mon, 21 Oct 2013 19:01:25 -0400 Subject: [PATCH 079/179] Updated reporting to work with new tags API --- .../casemodule/services/TagsManager.java | 30 +- .../sleuthkit/autopsy/report/ReportExcel.java | 41 +-- .../autopsy/report/ReportGenerator.java | 329 +++++++++++------- .../sleuthkit/autopsy/report/ReportHTML.java | 224 +++++------- .../autopsy/report/ReportWizardAction.java | 2 +- .../autopsy/report/TableReportModule.java | 3 +- 6 files changed, 321 insertions(+), 308 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java b/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java index 22b2c1b98b..a41743784c 100755 --- a/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java @@ -97,7 +97,7 @@ public class TagsManager implements Closeable { tagNames.clear(); tskCase.getTagNamesInUse(tagNames); } - + /** * Checks whether a tag name with a given display name exists. * @param [in] tagDisplayName The display name for which to check. @@ -227,6 +227,20 @@ public class TagsManager implements Closeable { tskCase.deleteContentTag(tag); } + /** + * Gets all content tags for the current case. + * @param [out] tags A list, possibly empty, of content tags. + * @throws TskCoreException + */ + public void getAllContentTags(List tags) throws TskCoreException { + // @@@ This is a work around to be removed when database access on the EDT is correctly synchronized. + if (!tagNamesInitialized) { + getExistingTagNames(); + } + + tskCase.getAllContentTags(tags); + } + /** * Gets content tags count by tag name. * @param [in] tagName The tag name of interest. @@ -297,6 +311,20 @@ public class TagsManager implements Closeable { tskCase.deleteBlackboardArtifactTag(tag); } + /** + * Gets all blackboard artifact tags for the current case. + * @param [out] tags A list, possibly empty, of blackboard artifact tags. + * @throws TskCoreException + */ + public void getAllBlackboardArtifactTags(List tags) throws TskCoreException { + // @@@ This is a work around to be removed when database access on the EDT is correctly synchronized. + if (!tagNamesInitialized) { + getExistingTagNames(); + } + + tskCase.getAllBlackboardArtifactTags(tags); + } + /** * Gets blackboard artifact tags count by tag name. * @param [in] tagName The tag name of interest. diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportExcel.java b/Core/src/org/sleuthkit/autopsy/report/ReportExcel.java index fbf1ba19f6..5a221e6ef6 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ReportExcel.java +++ b/Core/src/org/sleuthkit/autopsy/report/ReportExcel.java @@ -121,40 +121,6 @@ public class ReportExcel implements TableReportModule { } } - - /** - * Start a new worksheet for the given data type. - * @param name data type name - */ - @Override - public void startDataType(String name) { - // Create a worksheet for the data type (assumed to be an artifact type). - name = escapeForExcel(name); - sheet = wb.createSheet(name); - sheet.setAutobreaks(true); - rowIndex = 0; - artifactsCount = 0; - - // Add a title row to the worksheet. - Row row = sheet.createRow(rowIndex); - row.setRowStyle(setStyle); - row.createCell(0).setCellValue(name); - ++rowIndex; - - // Add an artifacts count row. The actual count will be filled in later. - row = sheet.createRow(rowIndex); - row.setRowStyle(setStyle); - row.createCell(0).setCellValue("Number of artifacts:"); - ++rowIndex; - - // Add an empty row as a separator. - sheet.createRow(rowIndex); - ++rowIndex; - - // There will be at least two columns, one each for the artifacts count and its label. - sheetColCount = 2; - } - /** * Start a new worksheet for the given data type. * Note: This method is a temporary workaround to avoid modifying the TableReportModule interface. @@ -162,7 +128,8 @@ public class ReportExcel implements TableReportModule { * @param name Name of the data type * @param comment Comment on the data type, may be the empty string */ - public void startDataType(String name, String comment) { + @Override + public void startDataType(String name, String description) { // Create a worksheet for the data type (assumed to be an artifact type). name = escapeForExcel(name); sheet = wb.createSheet(name); @@ -183,10 +150,10 @@ public class ReportExcel implements TableReportModule { ++rowIndex; // Add a comment row, if a comment was supplied. - if (!comment.isEmpty()) { + if (!description.isEmpty()) { row = sheet.createRow(rowIndex); row.setRowStyle(setStyle); - row.createCell(0).setCellValue(comment); + row.createCell(0).setCellValue(description); ++rowIndex; } diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java b/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java index 45a551759d..792bfa169b 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java +++ b/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java @@ -50,7 +50,6 @@ import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.EscapeUtil; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; -import org.sleuthkit.autopsy.datamodel.Tags; import org.sleuthkit.autopsy.report.ReportProgressPanel.ReportStatus; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; @@ -58,16 +57,15 @@ import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; import org.sleuthkit.datamodel.BlackboardArtifactTag; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; +import org.sleuthkit.datamodel.ContentTag; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; /** - * Generates all TableReportModules and GeneralReportModules, given whether each module for both - * types is enabled or disabled, and the base report path to save them at. - * - * After creating an instance of ReportGenerator, one must tell it which reports to run, - * TableReportModules on Tags or Artifacts, and the GeneralReportModules. - * Then, one calls displayProgressPanels() to display the progress to the user. + * Instances of this class use GeneralReportModules, TableReportModules and + * FileReportModules to generate a report. If desired, displayProgressPanels() + * can be called to show report generation progress using ReportProgressPanel + * objects displayed using a dialog box. */ public class ReportGenerator { private static final Logger logger = Logger.getLogger(ReportGenerator.class.getName()); @@ -106,10 +104,11 @@ public class ReportGenerator { } /** - * For every ReportModule which the user enabled, create a ReportProgressPanel for that report. + * Create a ReportProgressPanel for each report generation module selected by the user. * - * @param tableModuleStates the enabled/disabled state of each TableReportModule - * @param generalModuleStates the enabled/disabled state of each GeneralReportModule + * @param tableModuleStates The enabled/disabled state of each TableReportModule + * @param generalModuleStates The enabled/disabled state of each GeneralReportModule + * @param fileListModuleStates The enabled/disabled state of each FileReportModule */ private void setupProgressPanels(Map tableModuleStates, Map generalModuleStates, Map fileListModuleStates) { if (null != tableModuleStates) { @@ -174,28 +173,28 @@ public class ReportGenerator { } /** - * Generate the GeneralReportModule reports in a new SwingWorker. + * Run the GeneralReportModules using a SwingWorker. */ public void generateGeneralReports() { - GeneralWorker worker = new GeneralWorker(); + GeneralReportsWorker worker = new GeneralReportsWorker(); worker.execute(); } /** - * Generate the TableReportModule reports on Blackboard Artifacts in a new SwingWorker. + * Run the TableReportModules using a SwingWorker. * * @param artifactTypeSelections the enabled/disabled state of the artifact types to be included in the report - * @param tagSelections the enabled/disabled state of the tags to be included in the report + * @param tagSelections the enabled/disabled state of the tag names to be included in the report */ - public void generateArtifactTableReports(Map artifactTypeSelections, Map tagSelections) { + public void generateBlackboardArtifactsReports(Map artifactTypeSelections, Map tagNameSelections) { if (!tableProgress.isEmpty() && null != artifactTypeSelections) { - ArtifactsReportsWorker worker = new ArtifactsReportsWorker(artifactTypeSelections, tagSelections); + TableReportsWorker worker = new TableReportsWorker(artifactTypeSelections, tagNameSelections); worker.execute(); } } /** - * Generate the FileReportModule reports in a new SwingWorker. + * Run the FileReportModules using a SwingWorker. * * @param enabledInfo the Information that should be included about each file * in the report. @@ -214,9 +213,9 @@ public class ReportGenerator { } /** - * SwingWorker to generate a report on all GeneralReportModules. + * SwingWorker to run GeneralReportModules. */ - private class GeneralWorker extends SwingWorker { + private class GeneralReportsWorker extends SwingWorker { @Override protected Integer doInBackground() throws Exception { @@ -232,7 +231,7 @@ public class ReportGenerator { } /** - * SwingWorker to generate a FileReport. + * SwingWorker to run FileReportModules. */ private class FileReportsWorker extends SwingWorker { private List enabledInfo = Arrays.asList(FileReportDataTypes.values()); @@ -317,15 +316,15 @@ public class ReportGenerator { } /** - * SwingWorker to generate reports on blackboard artifacts. + * SwingWorker to run TableReportModules to report on blackboard artifacts, + * content tags, and blackboard artifact tags. */ - private class ArtifactsReportsWorker extends SwingWorker { + private class TableReportsWorker extends SwingWorker { private List tableModules = new ArrayList<>(); private List artifactTypes = new ArrayList<>(); private HashSet tagNamesFilter = new HashSet<>(); - // Create an ArtifactWorker with the enabled/disabled state of all Artifacts - ArtifactsReportsWorker(Map artifactTypeSelections, Map tagSelections) { + TableReportsWorker(Map artifactTypeSelections, Map tagNameSelections) { // Get the report modules selected by the user. for (Entry entry : tableProgress.entrySet()) { tableModules.add(entry.getKey()); @@ -338,9 +337,9 @@ public class ReportGenerator { } } - // Get the tags selected by the user. - if (null != tagSelections) { - for (Entry entry : tagSelections.entrySet()) { + // Get the tag names selected by the user and make a tag names filter. + if (null != tagNameSelections) { + for (Entry entry : tagNameSelections.entrySet()) { if (entry.getValue() == true) { tagNamesFilter.add(entry.getKey()); } @@ -350,39 +349,46 @@ public class ReportGenerator { @Override protected Integer doInBackground() throws Exception { - // Start the report + // Start the progress indicators for each active TableReportModule. for (TableReportModule module : tableModules) { ReportProgressPanel progress = tableProgress.get(module); if (progress.getStatus() != ReportStatus.CANCELED) { module.startReport(reportPath); progress.start(); progress.setIndeterminate(false); - progress.setMaximumProgress(ARTIFACT_TYPE.values().length); + progress.setMaximumProgress(ARTIFACT_TYPE.values().length + 2); // +2 for content and blackboard artifact tags } } + + makeBlackboardArtifactTables(); + makeContentTagsTables(); + makeBlackboardArtifactTagsTables(); - // Make a comment on the tags filter. + for (TableReportModule module : tableModules) { + tableProgress.get(module).complete(); + module.endReport(); + } + + return 0; + } + + private void makeBlackboardArtifactTables() { + // Make a comment string describing the tag names filter in effect. StringBuilder comment = new StringBuilder(); if (!tagNamesFilter.isEmpty()) { - comment.append("This report only includes files and artifacts tagged with: "); + comment.append("This report only includes results tagged with: "); comment.append(makeCommaSeparatedList(tagNamesFilter)); } - - // For every enabled artifact type + + // Add a table to the report for every enabled blackboard artifact type. for (ARTIFACT_TYPE type : artifactTypes) { - // Check to see if all the TableReportModules have been canceled + // Check for cancellaton. + removeCancelledTableReportModules(); if (tableModules.isEmpty()) { - break; + return; } - Iterator iter = tableModules.iterator(); - while (iter.hasNext()) { - TableReportModule module = iter.next(); - if (tableProgress.get(module).getStatus() == ReportStatus.CANCELED) { - iter.remove(); - } - } - - // If the type is keyword hit or hashset hit, use the helper + + // Keyword hits and hashset hit artifacts get sepcial handling. if (type.equals(ARTIFACT_TYPE.TSK_KEYWORD_HIT)) { writeKeywordHits(tableModules, comment.toString(), tagNamesFilter); continue; @@ -392,20 +398,19 @@ public class ReportGenerator { } List unsortedArtifacts = getFilteredArtifacts(type, tagNamesFilter); - if (unsortedArtifacts.isEmpty()) { - // Don't report on this artifact type if there are no results continue; } + // The most efficient way to sort all the Artifacts is to add them to a List, and then // sort that List based off a Comparator. Adding to a TreeMap/Set/List sorts the list // each time an element is added, which adds unnecessary overhead if we only need it sorted once. Collections.sort(unsortedArtifacts); // Get the column headers appropriate for the artifact type. - /* @@@ BC: Seems like a better design here woudl be to have a method that - * takes in teh artifact as an argument andreturns the attributes. We then use that - * to make the headers and to make each row afterwards so that we don't ahve artifact-specific + /* @@@ BC: Seems like a better design here would be to have a method that + * takes in the artifact as an argument and returns the attributes. We then use that + * to make the headers and to make each row afterwards so that we don't have artifact-specific * logic in both getArtifactTableCoumnHeaders and getArtifactRow() */ List columnHeaders = getArtifactTableColumnHeaders(type.getTypeID()); @@ -414,37 +419,17 @@ public class ReportGenerator { MessageNotifyUtil.Notify.show("Skipping artifact type " + type + " in reports", "Unknown columns to report on", MessageNotifyUtil.MessageType.ERROR); continue; } - + for (TableReportModule module : tableModules) { tableProgress.get(module).updateStatusLabel("Now processing " + type.getDisplayName() + "..."); - - // This is a temporary workaround to avoid modifying the TableReportModule interface. - if (module instanceof ReportHTML) { - ReportHTML htmlReportModule = (ReportHTML)module; - htmlReportModule.startDataType(type.getDisplayName(), comment.toString()); - htmlReportModule.startTable(columnHeaders, type); - } - else if (module instanceof ReportExcel) { - ReportExcel excelReportModule = (ReportExcel)module; - excelReportModule.startDataType(type.getDisplayName(), comment.toString()); - excelReportModule.startTable(columnHeaders); - } - else { - module.startDataType(type.getDisplayName()); - module.startTable(columnHeaders); - } + module.startDataType(type.getDisplayName(), comment.toString()); + module.startTable(columnHeaders); } - + boolean msgSent = false; - for(ArtifactData artifactData : unsortedArtifacts) { -// HashSet tags = artifactData.getTags(); -// -// String tagsList = makeCommaSeparatedList(tags); - + for(ArtifactData artifactData : unsortedArtifacts) { // Add the row data to all of the reports. - for (TableReportModule module : tableModules) { - - // Get the row data for this type of artifact. + for (TableReportModule module : tableModules) { List rowData; rowData = getArtifactRow(artifactData, module); if (rowData.isEmpty()) { @@ -455,44 +440,160 @@ public class ReportGenerator { continue; } -// // This is a temporary workaround to avoid modifying the TableReportModule interface. -// if (module instanceof ReportHTML) { -// ReportHTML htmlReportModule = (ReportHTML)module; -// htmlReportModule.addRow(rowData, artifactData.getArtifact()); -// } -// else { - module.addRow(rowData); -// } + module.addRow(rowData); } } - + // Finish up this data type for (TableReportModule module : tableModules) { tableProgress.get(module).increment(); module.endTable(); module.endDataType(); } - } - - // End the report - for (TableReportModule module : tableModules) { - tableProgress.get(module).complete(); - module.endReport(); - } - - return 0; + } } + + private void makeContentTagsTables() { + // Check for cancellaton. + removeCancelledTableReportModules(); + if (tableModules.isEmpty()) { + return; + } + + // Get the content tags. + ArrayList tags = new ArrayList<>(); + try { + Case.getCurrentCase().getServices().getTagsManager().getAllContentTags(tags); + } + catch (TskCoreException ex) { + logger.log(Level.SEVERE, "failed to get content tags", ex); + return; + } + + // Tell the modules reporting on content tags is beginning. + for (TableReportModule module : tableModules) { + // @@@ This casting is a tricky little workaround to allow the HTML report module to slip in a content hyperlink. + // @@@ Alos Using the obsolete ARTIFACT_TYPE.TSK_TAG_FILE is also an expedient hack. + tableProgress.get(module).updateStatusLabel("Now processing " + ARTIFACT_TYPE.TSK_TAG_FILE.getDisplayName() + "..."); + ArrayList columnHeaders = new ArrayList<>(Arrays.asList("File", "Tag", "Comment")); + StringBuilder comment = new StringBuilder(); + if (!tagNamesFilter.isEmpty()) { + comment.append("This report only includes file tagged with: "); + comment.append(makeCommaSeparatedList(tagNamesFilter)); + } + if (module instanceof ReportHTML) { + ReportHTML htmlReportModule = (ReportHTML)module; + htmlReportModule.startDataType(ARTIFACT_TYPE.TSK_TAG_FILE.getDisplayName(), comment.toString()); + htmlReportModule.startContentTagsTable(columnHeaders); + } + else { + module.startDataType(ARTIFACT_TYPE.TSK_TAG_FILE.getDisplayName(), comment.toString()); + module.startTable(columnHeaders); + } + } + + // Give the modules the rows for the content tags. + for (ContentTag tag : tags) { + // Apply the tag names filter. + if (!tagNamesFilter.isEmpty()) { + if (tagNamesFilter.contains(tag.getName().getDisplayName())) { + continue; + } + } + + ArrayList rowData = new ArrayList<>(Arrays.asList(tag.getContent().getName(), tag.getName().getDisplayName(), tag.getComment())); + for (TableReportModule module : tableModules) { + // @@@ This casting is a tricky little workaround to allow the HTML report module to slip in a content hyperlink. + if (module instanceof ReportHTML) { + ReportHTML htmlReportModule = (ReportHTML)module; + htmlReportModule.addRowWithTaggedContentHyperlink(rowData, tag); + } + else { + module.addRow(rowData); + } + } + } + + // The the modules content tags reporting is ended. + for (TableReportModule module : tableModules) { + tableProgress.get(module).increment(); + module.endTable(); + module.endDataType(); + } + } + + private void makeBlackboardArtifactTagsTables() { + // Check for cancellaton. + removeCancelledTableReportModules(); + if (tableModules.isEmpty()) { + return; + } + + ArrayList tags = new ArrayList<>(); + try { + Case.getCurrentCase().getServices().getTagsManager().getAllBlackboardArtifactTags(tags); + } + catch (TskCoreException ex) { + logger.log(Level.SEVERE, "failed to get blackboard artifact tags", ex); + return; + } + + // Tell the modules reporting on blackboard artifact tags data type is beginning. + // @@@ Using the obsolete ARTIFACT_TYPE.TSK_TAG_ARTIFACT is an expedient hack. + for (TableReportModule module : tableModules) { + tableProgress.get(module).updateStatusLabel("Now processing " + ARTIFACT_TYPE.TSK_TAG_ARTIFACT.getDisplayName() + "..."); + StringBuilder comment = new StringBuilder(); + if (!tagNamesFilter.isEmpty()) { + comment.append("This report only includes results tagged with: "); + comment.append(makeCommaSeparatedList(tagNamesFilter)); + } + module.startDataType(ARTIFACT_TYPE.TSK_TAG_ARTIFACT.getDisplayName(), comment.toString()); + module.startTable(new ArrayList<>(Arrays.asList("Result Type", "Tag", "Comment", "Source File"))); + } + + // Give the modules the rows for the content tags. + for (BlackboardArtifactTag tag : tags) { + // Apply the tag names filter. + if (!tagNamesFilter.isEmpty()) { + if (tagNamesFilter.contains(tag.getName().getDisplayName())) { + continue; + } + } + + for (TableReportModule module : tableModules) { + module.addRow(new ArrayList<>(Arrays.asList(tag.getArtifact().getArtifactTypeName(), tag.getName().getDisplayName(), tag.getComment(), tag.getContent().getName()))); + } + } + + // The the modules blackboard artifact tags reporting is ended. + for (TableReportModule module : tableModules) { + tableProgress.get(module).increment(); + module.endTable(); + module.endDataType(); + } + } + + void removeCancelledTableReportModules() { + Iterator iter = tableModules.iterator(); + while (iter.hasNext()) { + TableReportModule module = iter.next(); + if (tableProgress.get(module).getStatus() == ReportStatus.CANCELED) { + iter.remove(); + } + } + } } - - private Boolean failsTagFilter(HashSet tags, HashSet tagsFilter) + + /// @@@ Should move the methods specific to TableReportsWorker into that scope. + private Boolean failsTagFilter(HashSet tagNames, HashSet tagsNamesFilter) { - if (null == tagsFilter || tagsFilter.isEmpty()) { + if (null == tagsNamesFilter || tagsNamesFilter.isEmpty()) { return false; } - HashSet filteredTags = new HashSet<>(tags); - filteredTags.retainAll(tagsFilter); - return filteredTags.isEmpty(); + HashSet filteredTagNames = new HashSet<>(tagNames); + filteredTagNames.retainAll(tagsNamesFilter); + return filteredTagNames.isEmpty(); } /** @@ -554,18 +655,7 @@ public class ReportGenerator { // Make keyword data type and give them set index for (TableReportModule module : tableModules) { - // This is a temporary workaround to avoid modifying the TableReportModule interface. - if (module instanceof ReportHTML) { - ReportHTML htmlReportModule = (ReportHTML)module; - htmlReportModule.startDataType(ARTIFACT_TYPE.TSK_KEYWORD_HIT.getDisplayName(), comment); - } - else if (module instanceof ReportExcel) { - ReportExcel excelReportModule = (ReportExcel)module; - excelReportModule.startDataType(ARTIFACT_TYPE.TSK_KEYWORD_HIT.getDisplayName(), comment); - } - else { - module.startDataType(ARTIFACT_TYPE.TSK_KEYWORD_HIT.getDisplayName()); - } + module.startDataType(ARTIFACT_TYPE.TSK_KEYWORD_HIT.getDisplayName(), comment); module.addSetIndex(lists); tableProgress.get(module).updateStatusLabel("Now processing " + ARTIFACT_TYPE.TSK_KEYWORD_HIT.getDisplayName() + "..."); @@ -708,18 +798,7 @@ public class ReportGenerator { } for (TableReportModule module : tableModules) { - // This is a temporary workaround to avoid modifying the TableReportModule interface. - if (module instanceof ReportHTML) { - ReportHTML htmlReportModule = (ReportHTML)module; - htmlReportModule.startDataType(ARTIFACT_TYPE.TSK_HASHSET_HIT.getDisplayName(), comment); - } - else if (module instanceof ReportExcel) { - ReportExcel excelReportModule = (ReportExcel)module; - excelReportModule.startDataType(ARTIFACT_TYPE.TSK_HASHSET_HIT.getDisplayName(), comment); - } - else { - module.startDataType(ARTIFACT_TYPE.TSK_HASHSET_HIT.getDisplayName()); - } + module.startDataType(ARTIFACT_TYPE.TSK_HASHSET_HIT.getDisplayName(), comment); module.addSetIndex(lists); tableProgress.get(module).updateStatusLabel("Now processing " + ARTIFACT_TYPE.TSK_HASHSET_HIT.getDisplayName() + "..."); @@ -981,7 +1060,7 @@ public class ReportGenerator { * @return List row values * @throws TskCoreException */ - private List getArtifactRow(ArtifactData artifactData, TableReportModule module) throws TskCoreException { + private List getArtifactRow(ArtifactData artifactData, TableReportModule module) { Map attributes = getMappedAttributes(artifactData.getAttributes(), module); List rowData = new ArrayList<>(); @@ -1152,7 +1231,7 @@ public class ReportGenerator { } rowData.add(makeCommaSeparatedList(artifactData.getTags())); - return rowData; // RJCTODO: Is anyone checking for null here? + return rowData; } /** diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportHTML.java b/Core/src/org/sleuthkit/autopsy/report/ReportHTML.java index c3d0466119..14f47a798b 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ReportHTML.java +++ b/Core/src/org/sleuthkit/autopsy/report/ReportHTML.java @@ -28,7 +28,6 @@ import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; -import java.io.FileInputStream; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.UnsupportedEncodingException; @@ -45,17 +44,17 @@ import org.openide.filesystems.FileUtil; import org.openide.util.Exceptions; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.datamodel.Tags; -import org.sleuthkit.autopsy.datamodel.ContentUtils; import org.sleuthkit.autopsy.datamodel.ContentUtils.ExtractFscContentVisitor; +import org.sleuthkit.autopsy.datamodel.Tags; import org.sleuthkit.autopsy.ingest.IngestManager; +import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Image; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; -import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.TskData; +import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.ContentTag; import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM; public class ReportHTML implements TableReportModule { @@ -91,7 +90,7 @@ public class ReportHTML implements TableReportModule { currentCase = Case.getCurrentCase(); skCase = currentCase.getSleuthkitCase(); - dataTypes = new TreeMap(); + dataTypes = new TreeMap<>(); path = ""; currentDataType = ""; @@ -129,10 +128,10 @@ public class ReportHTML implements TableReportModule { { String iconFilePath; String iconFileName; - InputStream in = null; + InputStream in; OutputStream output = null; - logger.log(Level.INFO, "useDataTypeIcon: dataType = " + dataType); + logger.log(Level.INFO, "useDataTypeIcon: dataType = {0}", dataType); // find the artifact with matching display name BlackboardArtifact.ARTIFACT_TYPE artifactType = null; @@ -296,39 +295,6 @@ public class ReportHTML implements TableReportModule { } } } - - /** - * Start a new HTML page for the given data type. Update the output stream to this page, - * and setup the web page header. - * @param title title of the data type - */ - @Override - public void startDataType(String title) { - String fTitle = dataTypeToFileName(title); - // Make a new out for this page - try { - //escape out slashes tha that appear in title - - out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(path + fTitle + getExtension()), "UTF-8")); - } catch (FileNotFoundException ex) { - logger.log(Level.SEVERE, "File not found: {0}", ex); - } catch (UnsupportedEncodingException ex) { - logger.log(Level.SEVERE, "Unrecognized encoding"); - } - - // Write the beginnings of a page - // Like , header, title, any content divs - try { - StringBuilder page = new StringBuilder(); - page.append("\n\n\t").append(title).append("\n\t\n\n\n\n"); - page.append("
").append(title).append("
\n
\n"); - out.write(page.toString()); - currentDataType = title; - rowCount = 0; - } catch (IOException ex) { - logger.log(Level.SEVERE, "Failed to write page head: {0}", ex); - } - } /** * Start a new HTML page for the given data type. Update the output stream to this page, @@ -338,7 +304,8 @@ public class ReportHTML implements TableReportModule { * @param name Name of the data type * @param comment Comment on the data type, may be the empty string */ - public void startDataType(String name, String comment) { + @Override + public void startDataType(String name, String description) { String title = dataTypeToFileName(name); try { out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(path + title + getExtension()), "UTF-8")); @@ -352,9 +319,9 @@ public class ReportHTML implements TableReportModule { StringBuilder page = new StringBuilder(); page.append("\n\n\t").append(name).append("\n\t\n\n\n\n"); page.append("
").append(name).append("
\n
\n"); - if (!comment.isEmpty()) { + if (!description.isEmpty()) { page.append("

"); - page.append(comment); + page.append(description); page.append("

\n"); } out.write(page.toString()); @@ -477,17 +444,17 @@ public class ReportHTML implements TableReportModule { * @param columnHeaders column headers * @param sourceArtifact source blackboard artifact for the table data */ - public void startTable(List columnHeaders, ARTIFACT_TYPE artifactType) { + public void startContentTagsTable(List columnHeaders) { StringBuilder htmlOutput = new StringBuilder(); htmlOutput.append("\n\n\t\n"); + + // Add the specified columns. for(String columnHeader : columnHeaders) { htmlOutput.append("\t\t\n"); } - // For file tag artifacts, add a column for a hyperlink to a local copy of the tagged file. - if (artifactType.equals(ARTIFACT_TYPE.TSK_TAG_FILE)) { - htmlOutput.append("\t\t\n"); - } + // Add a column for a hyperlink to a local copy of the tagged content. + htmlOutput.append("\t\t\n"); htmlOutput.append("\t\n\n"); @@ -527,20 +494,75 @@ public class ReportHTML implements TableReportModule { try { out.write(builder.toString()); } catch (IOException ex) { - logger.log(Level.SEVERE, "Failed to write row to out."); + logger.log(Level.SEVERE, "Failed to write row to out.", ex); } catch (NullPointerException ex) { - logger.log(Level.SEVERE, "Output writer is null. Page was not initialized before writing."); + logger.log(Level.SEVERE, "Output writer is null. Page was not initialized before writing.", ex); } } /** - * Add a row to the current table. + * Saves a local copy of a tagged file and adds a row with a hyper link to + * the file. * - * @param row values for each cell in the row - * @param sourceArtifact source blackboard artifact for the table data + * @param row Values for each data cell in the row. + * @param contentTag A content tag to use to make the hyper link. */ - public void addRow(List row, BlackboardArtifact sourceArtifact) { - addRowDataForSourceArtifact(row, sourceArtifact); + public void addRowWithTaggedContentHyperlink(List row, ContentTag contentTag) { + // Only handling AbstractFiles at present. + AbstractFile file; + if (contentTag.getContent() instanceof AbstractFile) { + file = (AbstractFile)contentTag.getContent(); + } + else { + return; + } + + // Don't make a local copy of the file if it is a directory or unallocated space. + if (file.isDir() || + file.getType() == TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS || + file.getType() == TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) { + row.add(""); + return; + } + + // Make a folder for the local file with the same name as the tag. + StringBuilder localFilePath = new StringBuilder(); + localFilePath.append(path); + localFilePath.append(contentTag.getName().getDisplayName()); + File localFileFolder = new File(localFilePath.toString()); + if (!localFileFolder.exists()) { + localFileFolder.mkdirs(); + } + + // Construct a file name for the local file that incorporates the file id to ensure uniqueness. + String fileName = file.getName(); + String objectIdSuffix = "_" + file.getId(); + int lastDotIndex = fileName.lastIndexOf("."); + if (lastDotIndex != -1 && lastDotIndex != 0) { + // The file name has a conventional extension. Insert the object id before the '.' of the extension. + fileName = fileName.substring(0, lastDotIndex) + objectIdSuffix + fileName.substring(lastDotIndex, fileName.length()); + } + else { + // The file has no extension or the only '.' in the file is an initial '.', as in a hidden file. + // Add the object id to the end of the file name. + fileName += objectIdSuffix; + } + localFilePath.append(File.separator); + localFilePath.append(fileName); + + // If the local file doesn't already exist, create it now. + // The existence check is necessary because it is possible to apply multiple tags with the same name to a file. + File localFile = new File(localFilePath.toString()); + if (!localFile.exists()) { + ExtractFscContentVisitor.extract(file, localFile, null, null); + } + + // Add the hyperlink to the row. A column header for it was created in startTable(). + StringBuilder localFileLink = new StringBuilder(); + localFileLink.append("View File"); + row.add(localFileLink.toString()); StringBuilder builder = new StringBuilder(); builder.append("\t\n"); @@ -561,90 +583,6 @@ public class ReportHTML implements TableReportModule { } } - /** - * Add cells particular to a type of artifact associated with the row. Assumes that the overload of startTable() that takes an artifact type was called. - * - * @param row The row. - * @param sourceArtifact The artifact associated with the row. - */ - private void addRowDataForSourceArtifact(List row, BlackboardArtifact sourceArtifact) { -// int artifactTypeID = sourceArtifact.getArtifactTypeID(); -// BlackboardArtifact.ARTIFACT_TYPE type = BlackboardArtifact.ARTIFACT_TYPE.fromID(artifactTypeID); -// switch (type) { -// case TSK_TAG_FILE: -// addRowDataForFileTagArtifact(row, sourceArtifact); -// break; -// default: -// break; -// } - } - - /** - * Saves a local copy of a tagged file and adds a hyper link to the file to the row. - * - * @param row The row. - * @param sourceArtifact The artifact associated with the row. - */ - private void addRowDataForFileTagArtifact(List row, BlackboardArtifact sourceArtifact) { -// try { -// AbstractFile file = Case.getCurrentCase().getSleuthkitCase().getAbstractFileById(sourceArtifact.getObjectID()); -// -// // Don't make a local copy of the file if it is a directory or unallocated space. -// if (file.isDir() || -// file.getType() == TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS || -// file.getType() == TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) { -// row.add(""); -// return; -// } -// -// // Make a folder for the local file with the same name as the tag. -// StringBuilder localFilePath = new StringBuilder(); -// localFilePath.append(path); -// HashSet tagNames = Tags.getUniqueTagNamesForArtifact(sourceArtifact); -// if (!tagNames.isEmpty()) { -// localFilePath.append(tagNames.iterator().next()); -// } -// File localFileFolder = new File(localFilePath.toString()); -// if (!localFileFolder.exists()) { -// localFileFolder.mkdirs(); -// } -// -// // Construct a file name for the local file that incorporates the corresponding object id to ensure uniqueness. -// String fileName = file.getName(); -// String objectIdSuffix = "_" + sourceArtifact.getObjectID(); -// int lastDotIndex = fileName.lastIndexOf("."); -// if (lastDotIndex != -1 && lastDotIndex != 0) { -// // The file name has a conventional extension. Insert the object id before the '.' of the extension. -// fileName = fileName.substring(0, lastDotIndex) + objectIdSuffix + fileName.substring(lastDotIndex, fileName.length()); -// } -// else { -// // The file has no extension or the only '.' in the file is an initial '.', as in a hidden file. -// // Add the object id to the end of the file name. -// fileName += objectIdSuffix; -// } -// localFilePath.append(File.separator); -// localFilePath.append(fileName); -// -// // If the local file doesn't already exist, create it now. -// // The existence check is necessary because it is possible to apply multiple tags with the same name to a file. -// File localFile = new File(localFilePath.toString()); -// if (!localFile.exists()) { -// ExtractFscContentVisitor.extract(file, localFile, null, null); -// } -// -// // Add the hyperlink to the row. A column header for it was created in startTable(). -// StringBuilder localFileLink = new StringBuilder(); -// localFileLink.append("View File"); -// row.add(localFileLink.toString()); -// } -// catch (TskCoreException ex) { -// logger.log(Level.WARNING, "Failed to get AbstractFile by ID.", ex); -// row.add(""); -// } - } - /** * Return a String date for the long date given. * @param date date as a long @@ -704,11 +642,11 @@ public class ReportHTML implements TableReportModule { "table tr:nth-child(even) td {background: #f3f3f3;}"; cssOut.write(css); } catch (FileNotFoundException ex) { - logger.log(Level.SEVERE, "Could not find index.css file to write to."); + logger.log(Level.SEVERE, "Could not find index.css file to write to.", ex); } catch (UnsupportedEncodingException ex) { - logger.log(Level.SEVERE, "Did not recognize encoding when writing index.css."); + logger.log(Level.SEVERE, "Did not recognize encoding when writing index.css.", ex); } catch (IOException ex) { - logger.log(Level.SEVERE, "Error creating Writer for index.css."); + logger.log(Level.SEVERE, "Error creating Writer for index.css.", ex); } finally { try { if(cssOut != null) { diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportWizardAction.java b/Core/src/org/sleuthkit/autopsy/report/ReportWizardAction.java index 67a91e6521..23d9020de1 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ReportWizardAction.java +++ b/Core/src/org/sleuthkit/autopsy/report/ReportWizardAction.java @@ -71,7 +71,7 @@ public final class ReportWizardAction extends CallableSystemAction implements P ReportGenerator generator = new ReportGenerator((Map)wiz.getProperty("tableModuleStates"), (Map)wiz.getProperty("generalModuleStates"), (Map)wiz.getProperty("fileModuleStates")); - generator.generateArtifactTableReports((Map)wiz.getProperty("artifactStates"), (Map)wiz.getProperty("tagStates")); + generator.generateBlackboardArtifactsReports((Map)wiz.getProperty("artifactStates"), (Map)wiz.getProperty("tagStates")); generator.generateFileListReports((Map)wiz.getProperty("fileReportOptions")); generator.generateGeneralReports(); generator.displayProgressPanels(); diff --git a/Core/src/org/sleuthkit/autopsy/report/TableReportModule.java b/Core/src/org/sleuthkit/autopsy/report/TableReportModule.java index 13397d4cd5..037b6f16e9 100644 --- a/Core/src/org/sleuthkit/autopsy/report/TableReportModule.java +++ b/Core/src/org/sleuthkit/autopsy/report/TableReportModule.java @@ -51,8 +51,9 @@ public interface TableReportModule extends ReportModule { * It is up to the report how the differentiation is shown. * * @param title String name of the data type + * @param description RJCTODO: fix this header comment */ - public void startDataType(String title); + public void startDataType(String title, String description); /** * End the current data type and prepare for either the end of the report From 054ce554800368e54d249ce1e609a6563125a3d7 Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Mon, 21 Oct 2013 23:42:04 -0400 Subject: [PATCH 080/179] Minor cleanup and commenting --- .../autopsy/recentactivity/Chrome.java | 61 +++++++++++++------ .../autopsy/recentactivity/ExtractIE.java | 4 +- .../autopsy/recentactivity/Firefox.java | 57 +++++++++++------ 3 files changed, 83 insertions(+), 39 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java index 823d32454d..3412977112 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java @@ -57,12 +57,12 @@ import org.sleuthkit.datamodel.TskData; */ public class Chrome extends Extract { - private static final String chquery = "SELECT urls.url, urls.title, urls.visit_count, urls.typed_count, " + private static final String historyQuery = "SELECT urls.url, urls.title, urls.visit_count, urls.typed_count, " + "last_visit_time, urls.hidden, visits.visit_time, (SELECT urls.url FROM urls WHERE urls.id=visits.url) as from_visit, visits.transition FROM urls, visits WHERE urls.id = visits.url"; - private static final String chcookiequery = "select name, value, host_key, expires_utc,last_access_utc, creation_utc from cookies"; - private static final String chbookmarkquery = "SELECT starred.title, urls.url, starred.date_added, starred.date_modified, urls.typed_count,urls._last_visit_time FROM starred INNER JOIN urls ON urls.id = starred.url_id"; - private static final String chdownloadquery = "select full_path, url, start_time, received_bytes from downloads"; - private static final String chloginquery = "select origin_url, username_value, signon_realm from logins"; + private static final String cookieQuery = "select name, value, host_key, expires_utc,last_access_utc, creation_utc from cookies"; + private static final String bookmarkQuery = "SELECT starred.title, urls.url, starred.date_added, starred.date_modified, urls.typed_count,urls._last_visit_time FROM starred INNER JOIN urls ON urls.id = starred.url_id"; + private static final String downloadQuery = "select full_path, url, start_time, received_bytes from downloads"; + private static final String loginQuery = "select origin_url, username_value, signon_realm from logins"; private final Logger logger = Logger.getLogger(this.getClass().getName()); public int ChromeCount = 0; final public static String MODULE_VERSION = "1.0"; @@ -88,6 +88,11 @@ public class Chrome extends Extract { this.getDownload(dataSource, controller); } + /** + * Query for history databases and add artifacts + * @param dataSource + * @param controller + */ private void getHistory(Content dataSource, IngestDataSourceWorkerController controller) { FileManager fileManager = currentCase.getServices().getFileManager(); @@ -135,7 +140,7 @@ public class Chrome extends Extract { break; } List> tempList = null; - tempList = this.dbConnect(temps, chquery); + tempList = this.dbConnect(temps, historyQuery); logger.log(Level.INFO, moduleName + "- Now getting history from " + temps + " with " + tempList.size() + "artifacts identified."); for (HashMap result : tempList) { @@ -158,6 +163,11 @@ public class Chrome extends Extract { services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY)); } + /** + * Search for bookmark files and make artifacts. + * @param dataSource + * @param controller + */ private void getBookmark(Content dataSource, IngestDataSourceWorkerController controller) { FileManager fileManager = currentCase.getServices().getFileManager(); @@ -175,6 +185,9 @@ public class Chrome extends Extract { while (j < bookmarkFiles.size()) { AbstractFile bookmarkFile = bookmarkFiles.get(j++); + if (bookmarkFile.getSize() == 0) { + continue; + } String temps = RAImageIngestModule.getRATempPath(currentCase, "chrome") + File.separator + bookmarkFile.getName().toString() + j + ".db"; try { ContentUtils.writeToFile(bookmarkFile, new File(temps)); @@ -270,8 +283,11 @@ public class Chrome extends Extract { services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK)); } - //COOKIES section - // This gets the cookie info + /** + * Queries for cookie files and adds artifacts + * @param dataSource + * @param controller + */ private void getCookie(Content dataSource, IngestDataSourceWorkerController controller) { FileManager fileManager = currentCase.getServices().getFileManager(); @@ -288,6 +304,9 @@ public class Chrome extends Extract { int j = 0; while (j < cookiesFiles.size()) { AbstractFile cookiesFile = cookiesFiles.get(j++); + if (cookiesFile.getSize() == 0) { + continue; + } String temps = RAImageIngestModule.getRATempPath(currentCase, "chrome") + File.separator + cookiesFile.getName().toString() + j + ".db"; try { ContentUtils.writeToFile(cookiesFile, new File(temps)); @@ -302,7 +321,7 @@ public class Chrome extends Extract { break; } - List> tempList = this.dbConnect(temps, chcookiequery); + List> tempList = this.dbConnect(temps, cookieQuery); logger.log(Level.INFO, moduleName + "- Now getting cookies from " + temps + " with " + tempList.size() + "artifacts identified."); for (HashMap result : tempList) { Collection bbattributes = new ArrayList(); @@ -327,8 +346,11 @@ public class Chrome extends Extract { services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE)); } - //Downloads section - // This gets the downloads info + /** + * Queries for download files and adds artifacts + * @param dataSource + * @param controller + */ private void getDownload(Content dataSource, IngestDataSourceWorkerController controller) { FileManager fileManager = currentCase.getServices().getFileManager(); @@ -362,7 +384,7 @@ public class Chrome extends Extract { break; } - List> tempList = this.dbConnect(temps, chdownloadquery); + List> tempList = this.dbConnect(temps, downloadQuery); logger.log(Level.INFO, moduleName + "- Now getting downloads from " + temps + " with " + tempList.size() + "artifacts identified."); for (HashMap result : tempList) { Collection bbattributes = new ArrayList(); @@ -380,7 +402,6 @@ public class Chrome extends Extract { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "Recent Activity", domain)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "Recent Activity", "Chrome")); this.addArtifact(ARTIFACT_TYPE.TSK_WEB_DOWNLOAD, historyFile, bbattributes); - } dbFile.delete(); @@ -389,8 +410,11 @@ public class Chrome extends Extract { services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_DOWNLOAD)); } - //Login/Password section - // This gets the user info + /** + * Queries for login files and adds artifacts + * @param dataSource + * @param controller + */ private void getLogin(Content dataSource, IngestDataSourceWorkerController controller) { FileManager fileManager = currentCase.getServices().getFileManager(); @@ -407,6 +431,9 @@ public class Chrome extends Extract { int j = 0; while (j < signonFiles.size()) { AbstractFile signonFile = signonFiles.get(j++); + if (signonFile.getSize() == 0) { + continue; + } String temps = RAImageIngestModule.getRATempPath(currentCase, "chrome") + File.separator + signonFile.getName().toString() + j + ".db"; try { ContentUtils.writeToFile(signonFile, new File(temps)); @@ -420,7 +447,7 @@ public class Chrome extends Extract { dbFile.delete(); break; } - List> tempList = this.dbConnect(temps, chloginquery); + List> tempList = this.dbConnect(temps, loginQuery); logger.log(Level.INFO, moduleName + "- Now getting login information from " + temps + " with " + tempList.size() + "artifacts identified."); for (HashMap result : tempList) { Collection bbattributes = new ArrayList(); @@ -451,12 +478,10 @@ public class Chrome extends Extract { @Override public void complete() { - logger.info("Chrome Extract has completed"); } @Override public void stop() { - logger.info("Attempted to stop chrome extract, but operation is not supported; skipping..."); } @Override diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java index 93997d8bdd..858200bca3 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java @@ -222,8 +222,8 @@ public class ExtractIE extends Extract { try { recentFiles = fileManager.findFiles(dataSource, "%.lnk", "Recent"); } catch (TskCoreException ex) { - logger.log(Level.WARNING, "Error fetching 'index.data' files for Internet Explorer history."); - this.addErrorMessage(this.getName() + ": Error getting Recent Files."); + logger.log(Level.WARNING, "Error searching for .lnk files."); + this.addErrorMessage(this.getName() + ": Error getting lnk Files."); return; } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java index fa9b915332..329a391752 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java @@ -52,11 +52,11 @@ import org.sleuthkit.datamodel.TskCoreException; */ public class Firefox extends Extract { - private static final String ffquery = "SELECT moz_historyvisits.id,url,title,visit_count,(visit_date/1000000) as visit_date,from_visit,(SELECT url FROM moz_places WHERE id=moz_historyvisits.from_visit) as ref FROM moz_places, moz_historyvisits WHERE moz_places.id = moz_historyvisits.place_id AND hidden = 0"; - private static final String ffcookiequery = "SELECT name,value,host,expiry,(lastAccessed/1000000) as lastAccessed,(creationTime/1000000) as creationTime FROM moz_cookies"; - private static final String ff3cookiequery = "SELECT name,value,host,expiry,(lastAccessed/1000000) as lastAccessed FROM moz_cookies"; - private static final String ffbookmarkquery = "SELECT fk, moz_bookmarks.title, url FROM moz_bookmarks INNER JOIN moz_places ON moz_bookmarks.fk=moz_places.id"; - private static final String ffdownloadquery = "select target, source,(startTime/1000000) as startTime, maxBytes from moz_downloads"; + private static final String historyQuery = "SELECT moz_historyvisits.id,url,title,visit_count,(visit_date/1000000) as visit_date,from_visit,(SELECT url FROM moz_places WHERE id=moz_historyvisits.from_visit) as ref FROM moz_places, moz_historyvisits WHERE moz_places.id = moz_historyvisits.place_id AND hidden = 0"; + private static final String cookieQuery = "SELECT name,value,host,expiry,(lastAccessed/1000000) as lastAccessed,(creationTime/1000000) as creationTime FROM moz_cookies"; + private static final String cookieQueryV3 = "SELECT name,value,host,expiry,(lastAccessed/1000000) as lastAccessed FROM moz_cookies"; + private static final String bookmarkQuery = "SELECT fk, moz_bookmarks.title, url FROM moz_bookmarks INNER JOIN moz_places ON moz_bookmarks.fk=moz_places.id"; + private static final String downloadQuery = "SELECT target, source,(startTime/1000000) as startTime, maxBytes FROM moz_downloads"; public int FireFoxCount = 0; final public static String MODULE_VERSION = "1.0"; private IngestServices services; @@ -96,6 +96,10 @@ public class Firefox extends Extract { int j = 0; for (AbstractFile historyFile : historyFiles) { + if (historyFile.getSize() == 0) { + continue; + } + String fileName = historyFile.getName(); String temps = RAImageIngestModule.getRATempPath(currentCase, "firefox") + File.separator + fileName + j + ".db"; try { @@ -110,7 +114,7 @@ public class Firefox extends Extract { dbFile.delete(); break; } - List> tempList = this.dbConnect(temps, ffquery); + List> tempList = this.dbConnect(temps, historyQuery); logger.log(Level.INFO, moduleName + "- Now getting history from " + temps + " with " + tempList.size() + "artifacts identified."); for (HashMap result : tempList) { Collection bbattributes = new ArrayList(); @@ -133,6 +137,11 @@ public class Firefox extends Extract { services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY)); } + /** + * Queries for bookmark files and adds artifacts + * @param dataSource + * @param controller + */ private void getBookmark(Content dataSource, IngestDataSourceWorkerController controller) { FileManager fileManager = currentCase.getServices().getFileManager(); @@ -148,6 +157,9 @@ public class Firefox extends Extract { int j = 0; for (AbstractFile bookmarkFile : bookmarkFiles) { + if (bookmarkFile.getSize() == 0) { + continue; + } String fileName = bookmarkFile.getName(); String temps = RAImageIngestModule.getRATempPath(currentCase, "firefox") + File.separator + fileName + j + ".db"; try { @@ -162,7 +174,7 @@ public class Firefox extends Extract { dbFile.delete(); break; } - List> tempList = this.dbConnect(temps, ffbookmarkquery); + List> tempList = this.dbConnect(temps, bookmarkQuery); logger.log(Level.INFO, moduleName + "- Now getting bookmarks from " + temps + " with " + tempList.size() + "artifacts identified."); for (HashMap result : tempList) { @@ -182,10 +194,12 @@ public class Firefox extends Extract { services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK)); } - //COOKIES section - // This gets the cookie info + /** + * Queries for cookies file and adds artifacts + * @param dataSource + * @param controller + */ private void getCookie(Content dataSource, IngestDataSourceWorkerController controller) { - FileManager fileManager = currentCase.getServices().getFileManager(); List cookiesFiles = null; try { @@ -199,6 +213,9 @@ public class Firefox extends Extract { int j = 0; for (AbstractFile cookiesFile : cookiesFiles) { + if (cookiesFile.getSize() == 0) { + continue; + } String fileName = cookiesFile.getName(); String temps = RAImageIngestModule.getRATempPath(currentCase, "firefox") + File.separator + fileName + j + ".db"; try { @@ -216,9 +233,9 @@ public class Firefox extends Extract { boolean checkColumn = Util.checkColumn("creationTime", "moz_cookies", temps); String query = null; if (checkColumn) { - query = ffcookiequery; + query = cookieQuery; } else { - query = ff3cookiequery; + query = cookieQueryV3; } List> tempList = this.dbConnect(temps, query); @@ -239,13 +256,11 @@ public class Firefox extends Extract { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", (Long.valueOf(result.get("creationTime").toString())))); } bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "FireFox")); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", ((result.get("host").toString() != null) ? result.get("host").toString() : ""))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_VALUE.getTypeID(), "RecentActivity", ((result.get("value").toString() != null) ? result.get("value").toString() : ""))); String domain = Util.extractDomain(result.get("host").toString()); domain = domain.replaceFirst("^\\.+(?!$)", ""); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", domain)); this.addArtifact(ARTIFACT_TYPE.TSK_WEB_COOKIE, cookiesFile, bbattributes); - } ++j; dbFile.delete(); @@ -254,8 +269,11 @@ public class Firefox extends Extract { services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE)); } - //Downloads section - // This gets the downloads info + /** + * Queries for downloads files and adds artifacts + * @param dataSource + * @param controller + */ private void getDownload(Content dataSource, IngestDataSourceWorkerController controller) { FileManager fileManager = currentCase.getServices().getFileManager(); @@ -271,6 +289,9 @@ public class Firefox extends Extract { int j = 0; for (AbstractFile downloadsFile : downloadsFiles) { + if (downloadsFile.getSize() == 0) { + continue; + } String fileName = downloadsFile.getName(); String temps = RAImageIngestModule.getRATempPath(currentCase, "firefox") + File.separator + fileName + j + ".db"; int errors = 0; @@ -287,7 +308,7 @@ public class Firefox extends Extract { break; } - List> tempList = this.dbConnect(temps, ffdownloadquery); + List> tempList = this.dbConnect(temps, downloadQuery); logger.log(Level.INFO, moduleName + "- Now getting downloads from " + temps + " with " + tempList.size() + "artifacts identified."); for (HashMap result : tempList) { try { @@ -326,12 +347,10 @@ public class Firefox extends Extract { @Override public void complete() { - logger.info("Firefox Extract has completed."); } @Override public void stop() { - logger.info("Attmped to stop Firefox extract, but operation is not supported; skipping..."); } @Override From d992fbe2f754b2105f443c96a4bea37fccdda535 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Tue, 22 Oct 2013 10:42:18 -0400 Subject: [PATCH 081/179] Removed old Tags class from datamodel package --- .../AddBlackboardArtifactTagAction.java | 1 - .../autopsy/actions/AddContentTagAction.java | 1 - .../datamodel/RootContentChildren.java | 2 - .../org/sleuthkit/autopsy/datamodel/Tags.java | 84 ------------------- .../sleuthkit/autopsy/datamodel/TagsNode.java | 1 - .../autopsy/datamodel/TagsNodeKey.java | 2 +- .../sleuthkit/autopsy/report/ReportHTML.java | 3 - 7 files changed, 1 insertion(+), 93 deletions(-) delete mode 100644 Core/src/org/sleuthkit/autopsy/datamodel/Tags.java diff --git a/Core/src/org/sleuthkit/autopsy/actions/AddBlackboardArtifactTagAction.java b/Core/src/org/sleuthkit/autopsy/actions/AddBlackboardArtifactTagAction.java index d81e978bc1..8a5a0329a4 100755 --- a/Core/src/org/sleuthkit/autopsy/actions/AddBlackboardArtifactTagAction.java +++ b/Core/src/org/sleuthkit/autopsy/actions/AddBlackboardArtifactTagAction.java @@ -24,7 +24,6 @@ import javax.swing.JOptionPane; import org.openide.util.Utilities; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.datamodel.Tags; import org.sleuthkit.datamodel.TagName; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.TskCoreException; diff --git a/Core/src/org/sleuthkit/autopsy/actions/AddContentTagAction.java b/Core/src/org/sleuthkit/autopsy/actions/AddContentTagAction.java index 1483ce36c9..20fdc03a72 100755 --- a/Core/src/org/sleuthkit/autopsy/actions/AddContentTagAction.java +++ b/Core/src/org/sleuthkit/autopsy/actions/AddContentTagAction.java @@ -24,7 +24,6 @@ import javax.swing.JOptionPane; import org.openide.util.Utilities; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.datamodel.Tags; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.TagName; import org.sleuthkit.datamodel.TskCoreException; diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/RootContentChildren.java b/Core/src/org/sleuthkit/autopsy/datamodel/RootContentChildren.java index ff91e44112..8b1e2ecc47 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/RootContentChildren.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/RootContentChildren.java @@ -98,8 +98,6 @@ public class RootContentChildren extends AbstractContentChildren { this.refreshKey(o); else if (o instanceof EmailExtracted) this.refreshKey(o); - else if (o instanceof Tags) - this.refreshKey(o); else if (o instanceof TagsNodeKey) this.refreshKey(o); else if (o instanceof ExtractedContent) diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/Tags.java b/Core/src/org/sleuthkit/autopsy/datamodel/Tags.java deleted file mode 100644 index 03a749cfb3..0000000000 --- a/Core/src/org/sleuthkit/autopsy/datamodel/Tags.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2013 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.datamodel; - -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.logging.Level; -import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; -import org.sleuthkit.datamodel.BlackboardAttribute; -import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; -import org.sleuthkit.datamodel.TskCoreException; - -public class Tags { - - private static final Logger logger = Logger.getLogger(Tags.class.getName()); - public static final String BOOKMARK_TAG_NAME = "Bookmark"; - - /** - * Looks up the tag names associated with either a tagged artifact or a tag artifact. - * - * @param artifact The artifact - * @return A set of unique tag names - */ - public static HashSet getUniqueTagNamesForArtifact(BlackboardArtifact artifact) { - return getUniqueTagNamesForArtifact(artifact.getArtifactID(), artifact.getArtifactTypeID()); - } - - /** - * Looks up the tag names associated with either a tagged artifact or a tag artifact. - * - * @param artifactID The ID of the artifact - * @param artifactTypeID The ID of the artifact type - * @return A set of unique tag names - */ - public static HashSet getUniqueTagNamesForArtifact(long artifactID, int artifactTypeID) { - HashSet tagNames = new HashSet<>(); - - try { - ArrayList tagArtifactIDs = new ArrayList<>(); - if (artifactTypeID == ARTIFACT_TYPE.TSK_TAG_FILE.getTypeID() || - artifactTypeID == ARTIFACT_TYPE.TSK_TAG_ARTIFACT.getTypeID()) { - tagArtifactIDs.add(artifactID); - } else { - List tags = Case.getCurrentCase().getSleuthkitCase().getBlackboardArtifacts(ATTRIBUTE_TYPE.TSK_TAGGED_ARTIFACT, artifactID); - for (BlackboardArtifact tag : tags) { - tagArtifactIDs.add(tag.getArtifactID()); - } - } - - for (Long tagArtifactID : tagArtifactIDs) { - String whereClause = "WHERE artifact_id = " + tagArtifactID + " AND attribute_type_id = " + ATTRIBUTE_TYPE.TSK_TAG_NAME.getTypeID(); - List attributes = Case.getCurrentCase().getSleuthkitCase().getMatchingAttributes(whereClause); - for (BlackboardAttribute attr : attributes) { - tagNames.add(attr.getValueString()); - } - } - } - catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Failed to get tags for artifact " + artifactID, ex); - } - - return tagNames; - } -} diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/TagsNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/TagsNode.java index 908252a95f..76a56f8ab9 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/TagsNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/TagsNode.java @@ -24,7 +24,6 @@ import org.openide.nodes.ChildFactory; import org.openide.nodes.Children; import org.openide.nodes.Node; import org.openide.nodes.Sheet; -import org.sleuthkit.autopsy.actions.GetTagNameAndCommentDialog; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.datamodel.TagName; diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/TagsNodeKey.java b/Core/src/org/sleuthkit/autopsy/datamodel/TagsNodeKey.java index 56cd249705..78d0006178 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/TagsNodeKey.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/TagsNodeKey.java @@ -23,7 +23,7 @@ package org.sleuthkit.autopsy.datamodel; * RootContentChildren class. RootContentChildren is a NetBeans child node * factory built on top of the NetBeans Children.Keys class. */ -public class TagsNodeKey implements AutopsyVisitableItem { // RJCTODO: Rename to Tags when old Tags class is deleted (for the sake of consistency). Add comments to similar classes. +public class TagsNodeKey implements AutopsyVisitableItem { // Creation of a TagsNode object corresponding to a TagsNodeKey object is done // by a CreateAutopsyNodeVisitor dispatched from the AbstractContentChildren // override of Children.Keys.createNodes(). diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportHTML.java b/Core/src/org/sleuthkit/autopsy/report/ReportHTML.java index 14f47a798b..85f76bb31f 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ReportHTML.java +++ b/Core/src/org/sleuthkit/autopsy/report/ReportHTML.java @@ -35,7 +35,6 @@ import java.io.Writer; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Date; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.TreeMap; @@ -45,7 +44,6 @@ import org.openide.util.Exceptions; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.datamodel.ContentUtils.ExtractFscContentVisitor; -import org.sleuthkit.autopsy.datamodel.Tags; import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Image; @@ -53,7 +51,6 @@ import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; -import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.ContentTag; import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM; From ff54c75cfdca002a79414169a00a86e819bd5d69 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Tue, 22 Oct 2013 13:32:49 -0400 Subject: [PATCH 082/179] Fixed DRVT node expansion bug and added selection logic for node deletion --- .../autopsy/datamodel/ContentTagTypeNode.java | 2 +- .../autopsy/datamodel/TagNameNode.java | 2 +- .../sleuthkit/autopsy/datamodel/TagsNode.java | 6 +- .../BlackboardArtifactTagTypeNode.java | 2 +- .../directorytree/DataResultFilterNode.java | 17 +++++ .../DirectoryTreeTopComponent.java | 64 ++++++++++++------- .../autopsy/report/ReportGenerator.java | 42 ++++++------ 7 files changed, 79 insertions(+), 56 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagTypeNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagTypeNode.java index 6c1a454357..df7b8aead8 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagTypeNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagTypeNode.java @@ -50,7 +50,7 @@ public class ContentTagTypeNode extends DisplayableItemNode { Logger.getLogger(ContentTagTypeNode.class.getName()).log(Level.SEVERE, "Failed to get content tags count for " + tagName.getDisplayName() + " tag name", ex); } - super.setName(DISPLAY_NAME + " (" + tagsCount + ")"); + super.setName(DISPLAY_NAME); super.setDisplayName(DISPLAY_NAME + " (" + tagsCount + ")"); this.setIconBaseWithExtension(ICON_PATH); } diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/TagNameNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/TagNameNode.java index 05f4a00627..6d41f4911b 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/TagNameNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/TagNameNode.java @@ -53,7 +53,7 @@ public class TagNameNode extends DisplayableItemNode { Logger.getLogger(TagNameNode.class.getName()).log(Level.SEVERE, "Failed to get tags count for " + tagName.getDisplayName() + " tag name", ex); } - super.setName(tagName.getDisplayName() + " (" + tagsCount + ")"); + super.setName(tagName.getDisplayName()); super.setDisplayName(tagName.getDisplayName() + " (" + tagsCount + ")"); if (tagName.getDisplayName().equals("Bookmark")) { setIconBaseWithExtension(BOOKMARK_TAG_ICON_PATH); diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/TagsNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/TagsNode.java index 76a56f8ab9..3597e34108 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/TagsNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/TagsNode.java @@ -46,10 +46,6 @@ public class TagsNode extends DisplayableItemNode { this.setIconBaseWithExtension(ICON_PATH); } - public static String getNodeName() { - return DISPLAY_NAME; - } - @Override public boolean isLeafTypeNode() { return false; @@ -78,7 +74,7 @@ public class TagsNode extends DisplayableItemNode { @Override protected boolean createKeys(List keys) { try { - Case.getCurrentCase().getServices().getTagsManager().getAllTagNames(keys); + Case.getCurrentCase().getServices().getTagsManager().getTagNamesInUse(keys); } catch (TskCoreException ex) { Logger.getLogger(TagNameNodeFactory.class.getName()).log(Level.SEVERE, "Failed to get tag names", ex); diff --git a/Core/src/org/sleuthkit/autopsy/directorytree/BlackboardArtifactTagTypeNode.java b/Core/src/org/sleuthkit/autopsy/directorytree/BlackboardArtifactTagTypeNode.java index 543db96eb0..e8b0cb9fab 100755 --- a/Core/src/org/sleuthkit/autopsy/directorytree/BlackboardArtifactTagTypeNode.java +++ b/Core/src/org/sleuthkit/autopsy/directorytree/BlackboardArtifactTagTypeNode.java @@ -55,7 +55,7 @@ public class BlackboardArtifactTagTypeNode extends DisplayableItemNode { Logger.getLogger(BlackboardArtifactTagTypeNode.class.getName()).log(Level.SEVERE, "Failed to get blackboard artifact tags count for " + tagName.getDisplayName() + " tag name", ex); } - super.setName(DISPLAY_NAME + " (" + tagsCount + ")"); + super.setName(DISPLAY_NAME); super.setDisplayName(DISPLAY_NAME + " (" + tagsCount + ")"); this.setIconBaseWithExtension(ICON_PATH); } diff --git a/Core/src/org/sleuthkit/autopsy/directorytree/DataResultFilterNode.java b/Core/src/org/sleuthkit/autopsy/directorytree/DataResultFilterNode.java index 1619c18ccf..632d7a63ca 100755 --- a/Core/src/org/sleuthkit/autopsy/directorytree/DataResultFilterNode.java +++ b/Core/src/org/sleuthkit/autopsy/directorytree/DataResultFilterNode.java @@ -39,6 +39,7 @@ import org.sleuthkit.autopsy.datamodel.AbstractAbstractFileNode.AbstractFileProp import org.sleuthkit.autopsy.datamodel.AbstractFsContentNode; import org.sleuthkit.autopsy.datamodel.ArtifactTypeNode; import org.sleuthkit.autopsy.datamodel.BlackboardArtifactNode; +import org.sleuthkit.autopsy.datamodel.ContentTagTypeNode; import org.sleuthkit.autopsy.datamodel.LocalFileNode; import org.sleuthkit.autopsy.datamodel.DeletedContent.DeletedContentsChildren.DeletedContentNode; import org.sleuthkit.autopsy.datamodel.DeletedContent.DeletedContentsNode; @@ -63,6 +64,7 @@ import org.sleuthkit.autopsy.datamodel.LayoutFileNode; import org.sleuthkit.autopsy.datamodel.RecentFilesFilterNode; import org.sleuthkit.autopsy.datamodel.RecentFilesNode; import org.sleuthkit.autopsy.datamodel.FileTypesNode; +import org.sleuthkit.autopsy.datamodel.TagNameNode; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardAttribute; @@ -403,6 +405,21 @@ public class DataResultFilterNode extends FilterNode { return openChild(atn); } + @Override + public AbstractAction visit(TagNameNode node) { + return openChild(node); + } + + @Override + public AbstractAction visit(ContentTagTypeNode node) { + return openChild(node); + } + + @Override + public AbstractAction visit(BlackboardArtifactTagTypeNode node) { + return openChild(node); + } + @Override public AbstractAction visit(DirectoryNode dn) { if (dn.getDisplayName().equals(DirectoryNode.DOTDOTDIR)) { diff --git a/Core/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeTopComponent.java b/Core/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeTopComponent.java index d376f06722..b50a5c2122 100644 --- a/Core/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeTopComponent.java +++ b/Core/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeTopComponent.java @@ -27,6 +27,7 @@ import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.beans.PropertyChangeSupport; import java.util.ArrayList; +import java.util.Arrays; import java.util.LinkedList; import java.util.List; import java.util.logging.Level; @@ -856,38 +857,53 @@ public final class DirectoryTreeTopComponent extends TopComponent implements Dat } /** - * Set selected node using the previously saved selection path to the - * selected node + * Set the selected node using a path to a previously selected node. * - * @param path node path with node names - * @param rootNodeName name of the root node to match or null if any + * @param previouslySelectedNodePath Path to a previously selected node. + * @param rootNodeName Name of the root node to match, may be null. */ - private void setSelectedNode(final String[] path, final String rootNodeName) { - if (path == null) { + private void setSelectedNode(final String[] previouslySelectedNodePath, final String rootNodeName) { + if (previouslySelectedNodePath == null) { return; } SwingUtilities.invokeLater(new Runnable() { @Override public void run() { - - if (path.length > 0 && (rootNodeName == null || path[0].equals(rootNodeName))) { - try { - Node newSelection = NodeOp.findPath(em.getRootContext(), path); - - if (newSelection != null) { - if (rootNodeName != null) { - //called from tree auto refresh context - //remove last from backlist, because auto select will result in duplication - backList.pollLast(); - } - em.setExploredContextAndSelection(newSelection, new Node[]{newSelection}); + if (previouslySelectedNodePath.length > 0 && (rootNodeName == null || previouslySelectedNodePath[0].equals(rootNodeName))) { + Node selectedNode = null; + ArrayList selectedNodePath = new ArrayList<>(Arrays.asList(previouslySelectedNodePath)); + while (null == selectedNode && !selectedNodePath.isEmpty()) { + try { + selectedNode = NodeOp.findPath(em.getRootContext(), selectedNodePath.toArray(new String[0])); + } + catch (NodeNotFoundException ex) { + // The selected node may have been deleted (e.g., a deleted tag), so truncate the path and try again. + if (selectedNodePath.size() > 1) { + selectedNodePath.remove(selectedNodePath.size() - 1); + } + else { + StringBuilder nodePath = new StringBuilder(); + for (int i = 0; i < previouslySelectedNodePath.length; ++i) { + nodePath.append(previouslySelectedNodePath[i]).append("/"); + } + logger.log(Level.WARNING, "Failed to find any nodes to select on path " + nodePath.toString(), ex); + break; + } + } + } + + if (null != selectedNode) { + if (rootNodeName != null) { + //called from tree auto refresh context + //remove last from backlist, because auto select will result in duplication + backList.pollLast(); + } + try { + em.setExploredContextAndSelection(selectedNode, new Node[]{selectedNode}); + } + catch (PropertyVetoException ex) { + logger.log(Level.WARNING, "Property veto from ExplorerManager setting selection to " + selectedNode.getName(), ex); } - - // We need to set the selection, which will refresh dataresult and get rid of the oob exception - } catch (NodeNotFoundException ex) { - logger.log(Level.WARNING, "Node not found", ex); - } catch (PropertyVetoException ex) { - logger.log(Level.WARNING, "Property Veto", ex); } } } diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java b/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java index 792bfa169b..bf66eb66fe 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java +++ b/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java @@ -494,24 +494,19 @@ public class ReportGenerator { // Give the modules the rows for the content tags. for (ContentTag tag : tags) { - // Apply the tag names filter. - if (!tagNamesFilter.isEmpty()) { - if (tagNamesFilter.contains(tag.getName().getDisplayName())) { - continue; + if (passesTagNamesFilter(tag.getName().getDisplayName())) { + ArrayList rowData = new ArrayList<>(Arrays.asList(tag.getContent().getName(), tag.getName().getDisplayName(), tag.getComment())); + for (TableReportModule module : tableModules) { + // @@@ This casting is a tricky little workaround to allow the HTML report module to slip in a content hyperlink. + if (module instanceof ReportHTML) { + ReportHTML htmlReportModule = (ReportHTML)module; + htmlReportModule.addRowWithTaggedContentHyperlink(rowData, tag); + } + else { + module.addRow(rowData); + } } } - - ArrayList rowData = new ArrayList<>(Arrays.asList(tag.getContent().getName(), tag.getName().getDisplayName(), tag.getComment())); - for (TableReportModule module : tableModules) { - // @@@ This casting is a tricky little workaround to allow the HTML report module to slip in a content hyperlink. - if (module instanceof ReportHTML) { - ReportHTML htmlReportModule = (ReportHTML)module; - htmlReportModule.addRowWithTaggedContentHyperlink(rowData, tag); - } - else { - module.addRow(rowData); - } - } } // The the modules content tags reporting is ended. @@ -553,16 +548,11 @@ public class ReportGenerator { // Give the modules the rows for the content tags. for (BlackboardArtifactTag tag : tags) { - // Apply the tag names filter. - if (!tagNamesFilter.isEmpty()) { - if (tagNamesFilter.contains(tag.getName().getDisplayName())) { - continue; + if (passesTagNamesFilter(tag.getName().getDisplayName())) { + for (TableReportModule module : tableModules) { + module.addRow(new ArrayList<>(Arrays.asList(tag.getArtifact().getArtifactTypeName(), tag.getName().getDisplayName(), tag.getComment(), tag.getContent().getName()))); } } - - for (TableReportModule module : tableModules) { - module.addRow(new ArrayList<>(Arrays.asList(tag.getArtifact().getArtifactTypeName(), tag.getName().getDisplayName(), tag.getComment(), tag.getContent().getName()))); - } } // The the modules blackboard artifact tags reporting is ended. @@ -573,6 +563,10 @@ public class ReportGenerator { } } + boolean passesTagNamesFilter(String tagName) { + return tagNamesFilter.isEmpty() || tagNamesFilter.contains(tagName); + } + void removeCancelledTableReportModules() { Iterator iter = tableModules.iterator(); while (iter.hasNext()) { From 8b487ca56da8e7bd369dc660afe1abe1c9bb445c Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Tue, 22 Oct 2013 17:21:44 -0400 Subject: [PATCH 083/179] Removed prematuer merge of some new tags api files --- .../autopsy/casemodule/services/Services.java | 8 - .../casemodule/services/TagsManager.java | 247 ------------------ .../directorytree/TagAbstractFileAction.java | 169 ++++++------ .../TagBlackboardArtifactAction.java | 171 ++++++------ 4 files changed, 151 insertions(+), 444 deletions(-) delete mode 100755 Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/services/Services.java b/Core/src/org/sleuthkit/autopsy/casemodule/services/Services.java index 069b13ef2e..10663c173b 100644 --- a/Core/src/org/sleuthkit/autopsy/casemodule/services/Services.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/services/Services.java @@ -41,26 +41,18 @@ public class Services implements Closeable { // services private FileManager fileManager; - private TagsManager tagsManager; public Services(SleuthkitCase tskCase) { this.tskCase = tskCase; //create and initialize FileManager as early as possibly in the new/opened Case fileManager = new FileManager(tskCase); services.add(fileManager); - - tagsManager = new TagsManager(tskCase); - services.add(tagsManager); } public FileManager getFileManager() { return fileManager; } - public TagsManager getTagsManager() { - return tagsManager; - } - @Override public void close() throws IOException { // close all services diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java b/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java deleted file mode 100755 index 2ae7426655..0000000000 --- a/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java +++ /dev/null @@ -1,247 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2013 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.casemodule.services; - -import java.io.Closeable; -import java.io.IOException; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.logging.Level; -import java.util.logging.Logger; -import org.sleuthkit.autopsy.coreutils.ModuleSettings; -import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardArtifactTag; -import org.sleuthkit.datamodel.Content; -import org.sleuthkit.datamodel.ContentTag; -import org.sleuthkit.datamodel.SleuthkitCase; -import org.sleuthkit.datamodel.TagType; -import org.sleuthkit.datamodel.TskCoreException; - -/** - * A singleton instance of this class functions as an Autopsy service that - * manages the creation, updating, and deletion of tags applied to Content and - * BlackboardArtifacts objects by users. - */ -public class TagsManager implements Closeable { - private static final String TAGS_SETTINGS_FILE_NAME = "tags"; - private static final String TAG_TYPES_SETTING_KEY = "tagTypes"; - private final SleuthkitCase tskCase; - private final HashMap tagTypes = new HashMap<>(); - - TagsManager(SleuthkitCase tskCase) { - this.tskCase = tskCase; - loadTagTypesFromTagSettings(); - } - - private void loadTagTypesFromTagSettings() { - // Get any tag types already added to the current case. - try { - List currentTagTypes = tskCase.getTagTypes(); - for (TagType tagType : currentTagTypes) { - tagTypes.put(tagType.getDisplayName(), tagType); - } - } - catch (TskCoreException ex) { - Logger.getLogger(TagsManager.class.getName()).log(Level.SEVERE, "Failed to get tag types from the current case", ex); - } - - // Read the saved tag types, if any, from the tags settings file and - // add them to the current case if they haven't already been added, e.g, - // when the case was last opened. - String setting = ModuleSettings.getConfigSetting(TAGS_SETTINGS_FILE_NAME, TAG_TYPES_SETTING_KEY); - if (null != setting && !setting.isEmpty()) { - // Read the tag types setting and break in into tag type tuples. - List tagTypeTuples = Arrays.asList(setting.split(";")); - - // Parse each tuple and add the tag types to the current case, one - // at a time to gracefully discard any duplicates or corrupt tuples. - for (String tagTypeTuple : tagTypeTuples) { - String[] tagTypeAttributes = tagTypeTuple.split(","); - if (!tagTypes.containsKey(tagTypeAttributes[0])) { - TagType tagType = new TagType(tagTypeAttributes[0], tagTypeAttributes[1], TagType.HTML_COLOR.getColorByName(tagTypeAttributes[2])); - try { - tskCase.addTagType(tagType); - tagTypes.put(tagType.getDisplayName(),tagType); - } - catch(TskCoreException ex) { - Logger.getLogger(TagsManager.class.getName()).log(Level.WARNING, "Failed to add saved " + tagType.getDisplayName() + " tag type to the current case", ex); - } - } - } - - saveTagTypesToTagsSettings(); - } - } - - private void saveTagTypesToTagsSettings() { - if (!tagTypes.isEmpty()) { - StringBuilder setting = new StringBuilder(); - for (TagType tagType : tagTypes.values()) { - if (setting.length() != 0) { - setting.append(";"); - } - setting.append(tagType.getDisplayName()).append(","); - setting.append(tagType.getDescription()).append(","); - setting.append(tagType.getColor().name()); - } - - ModuleSettings.setConfigSetting(TAGS_SETTINGS_FILE_NAME, TAG_TYPES_SETTING_KEY, setting.toString()); - } - } - - /** - * Gets a list of all tag types currently available for tagging content or - * blackboard artifacts. - * @return A list, possibly empty, of TagType data transfer objects (DTOs). - * @throws TskCoreException - */ - public List getTagTypes() throws TskCoreException { - return tskCase.getTagTypes(); - } - - /** - * Adds a new tag type to the current case and to the tags settings file. - * @param displayName The display name for the new tag type. - * @return A TagType object representing the new type on success, null on failure. - * @throws TskCoreException - */ - public TagType addTagType(String displayName) throws TagTypeAlreadyExistsException, TskCoreException { - return addTagType(displayName, "", TagType.HTML_COLOR.NONE); - } - - /** - * Adds a new tag type to the current case and to the tags settings file. - * @param displayName The display name for the new tag type. - * @param description The description for the new tag type. - * @return A TagType object representing the new type on success, null on failure. - * @throws TskCoreException - */ - public TagType addTagType(String displayName, String description) throws TagTypeAlreadyExistsException, TskCoreException { - return addTagType(displayName, description, TagType.HTML_COLOR.NONE); - } - - /** - * Adds a new tag type to the current case and to the tags settings file. - * @param displayName The display name for the new tag type. - * @param description The description for the new tag type. - * @param color The HTML color to associate with the new tag type. - * @return A TagType object representing the new type. - * @throws TskCoreException - */ - public synchronized TagType addTagType(String displayName, String description, TagType.HTML_COLOR color) throws TagTypeAlreadyExistsException, TskCoreException { - if (tagTypes.containsKey(displayName)) { - throw new TagTypeAlreadyExistsException(); - } - - TagType newTagType = new TagType(displayName, description, color); - tskCase.addTagType(newTagType); - tagTypes.put(newTagType.getDisplayName(), newTagType); - saveTagTypesToTagsSettings(); - return newTagType; - } - - public class TagTypeAlreadyExistsException extends Exception { - } - - /** - * Tags a Content object. - * @param content The Content to tag. - * @param tagType The type of tag to add. - * @throws TskCoreException - */ - public void addContentTag(Content content, TagType tagType) throws TskCoreException { - addContentTag(content, tagType, "", 0, content.getSize() - 1); - } - - /** - * Tags a Content object. - * @param content The Content to tag. - * @param tagType The type of tag to add. - * @param comment A comment to store with the tag. - * @throws TskCoreException - */ - public void addContentTag(Content content, TagType tagType, String comment) throws TskCoreException { - addContentTag(content, tagType, comment, 0, content.getSize() - 1); - } - - /** - * Tags a Content object or a portion of a content object. - * @param content The Content to tag. - * @param tagType The type of tag to add. - * @param comment A comment to store with the tag. - * @param beginByteOffset Designates the beginning of a tagged extent. - * @param endByteOffset Designates the end of a tagged extent. - * @throws TskCoreException - */ - public void addContentTag(Content content, TagType tagType, String comment, long beginByteOffset, long endByteOffset) throws IllegalArgumentException, TskCoreException { - if (beginByteOffset < 0) { - throw new IllegalArgumentException("Content extent incorrect: beginByteOffset < 0"); - } - - if (endByteOffset <= beginByteOffset) { - throw new IllegalArgumentException("Content extent incorrect: endByteOffset <= beginByteOffset"); - } - - if (endByteOffset > content.getSize() - 1) { - throw new IllegalArgumentException("Content extent incorrect: endByteOffset exceeds content size"); - } - - tskCase.addContentTag(new ContentTag(content, tagType, comment, beginByteOffset, endByteOffset)); - } - - /** - * Deletes a content tag. - * @param tag The tag to delete. - * @throws TskCoreException - */ - public void deleteContentTag(ContentTag tag) throws TskCoreException { - tskCase.deleteContentTag(tag); - } - - /** - * Tags a BlackboardArtifact object. - * @param artifact The BlackboardArtifact to tag. - * @param tagType The type of tag to add. - * @throws TskCoreException - */ - public void addBlackboardArtifactTag(BlackboardArtifact artifact, TagType tagType) throws TskCoreException { - addBlackboardArtifactTag(artifact, tagType, ""); - } - - /** - * Tags a BlackboardArtifact object. - * @param artifact The BlackboardArtifact to tag. - * @param tagType The type of tag to add. - * @param comment A comment to store with the tag. - * @throws TskCoreException - */ - public void addBlackboardArtifactTag(BlackboardArtifact artifact, TagType tagType, String comment) throws TskCoreException { - tskCase.addBlackboardArtifactTag(new BlackboardArtifactTag(artifact, tagType, comment)); - } - - void deleteBlackboardArtifactTag(BlackboardArtifactTag tag) throws TskCoreException { - tskCase.deleteBlackboardArtifactTag(tag); - } - - @Override - public void close() throws IOException { - saveTagTypesToTagsSettings(); - } -} diff --git a/Core/src/org/sleuthkit/autopsy/directorytree/TagAbstractFileAction.java b/Core/src/org/sleuthkit/autopsy/directorytree/TagAbstractFileAction.java index 5afbf89fef..f174e79121 100755 --- a/Core/src/org/sleuthkit/autopsy/directorytree/TagAbstractFileAction.java +++ b/Core/src/org/sleuthkit/autopsy/directorytree/TagAbstractFileAction.java @@ -1,94 +1,75 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2013 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.directorytree; - -import java.awt.event.ActionEvent; -import java.util.Collection; -import java.util.logging.Level; -import javax.swing.AbstractAction; -import javax.swing.JMenuItem; -import javax.swing.JOptionPane; -import org.openide.util.Utilities; -import org.openide.util.actions.Presenter; -import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.casemodule.services.TagsManager; -import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.datamodel.Tags; -import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.TagType; -import org.sleuthkit.datamodel.TskCoreException; - -public class TagAbstractFileAction extends AbstractAction implements Presenter.Popup { - // This class is a singleton to support multi-selection of nodes, since - // org.openide.nodes.NodeOp.findActions(Node[] nodes) will only pick up an Action if every - // node in the array returns a reference to the same action object from Node.getActions(boolean). - private static TagAbstractFileAction instance; - - public static synchronized TagAbstractFileAction getInstance() { - if (null == instance) { - instance = new TagAbstractFileAction(); - } - return instance; - } - - private TagAbstractFileAction() { - } - - @Override - public JMenuItem getPopupPresenter() { - return new TagAbstractFileMenu(); - } - - @Override - public void actionPerformed(ActionEvent e) { - // Do nothing - this action should never be performed. - // Submenu actions are invoked instead. - } - - private static class TagAbstractFileMenu extends TagMenu { - public TagAbstractFileMenu() { - super(Utilities.actionsGlobalContext().lookupAll(AbstractFile.class).size() > 1 ? "Tag Files" : "Tag File"); - } - - @Override - protected void applyTag(String tagDisplayName, String comment) { - try { - TagsManager tagsManager = Case.getCurrentCase().getServices().getTagsManager(); - TagType tagType = tagsManager.addTagType(tagDisplayName); - - Collection selectedFiles = Utilities.actionsGlobalContext().lookupAll(AbstractFile.class); - for (AbstractFile file : selectedFiles) { - Tags.createTag(file, tagDisplayName, comment); -// try { -// tagsManager.addContentTag(file, tagType); -// } -// catch (TskCoreException ex) { -// Logger.getLogger(TagAbstractFileMenu.class.getName()).log(Level.SEVERE, "Error tagging content", ex); -// } - } - } - catch (TagsManager.TagTypeAlreadyExistsException ex) { - JOptionPane.showMessageDialog(null, "A " + tagDisplayName + " tag type has already been defined.", "Duplicate Tag Type", JOptionPane.ERROR_MESSAGE); - } - catch (TskCoreException ex) { - Logger.getLogger(TagAbstractFileMenu.class.getName()).log(Level.SEVERE, "Error adding " + tagDisplayName + " tag type", ex); - } - } - } -} +/* + * Autopsy Forensic Browser + * + * Copyright 2013 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.directorytree; + +import java.awt.event.ActionEvent; +import java.util.Collection; +import java.util.logging.Level; +import javax.swing.AbstractAction; +import javax.swing.JMenuItem; +import javax.swing.JOptionPane; +import org.openide.util.Utilities; +import org.openide.util.actions.Presenter; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.datamodel.Tags; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.TskCoreException; + +public class TagAbstractFileAction extends AbstractAction implements Presenter.Popup { + // This class is a singleton to support multi-selection of nodes, since + // org.openide.nodes.NodeOp.findActions(Node[] nodes) will only pick up an Action if every + // node in the array returns a reference to the same action object from Node.getActions(boolean). + private static TagAbstractFileAction instance; + + public static synchronized TagAbstractFileAction getInstance() { + if (null == instance) { + instance = new TagAbstractFileAction(); + } + return instance; + } + + private TagAbstractFileAction() { + } + + @Override + public JMenuItem getPopupPresenter() { + return new TagAbstractFileMenu(); + } + + @Override + public void actionPerformed(ActionEvent e) { + // Do nothing - this action should never be performed. + // Submenu actions are invoked instead. + } + + private static class TagAbstractFileMenu extends TagMenu { + public TagAbstractFileMenu() { + super(Utilities.actionsGlobalContext().lookupAll(AbstractFile.class).size() > 1 ? "Tag Files" : "Tag File"); + } + + @Override + protected void applyTag(String tagDisplayName, String comment) { + Collection selectedFiles = Utilities.actionsGlobalContext().lookupAll(AbstractFile.class); + for (AbstractFile file : selectedFiles) { + Tags.createTag(file, tagDisplayName, comment); + } + } + } +} diff --git a/Core/src/org/sleuthkit/autopsy/directorytree/TagBlackboardArtifactAction.java b/Core/src/org/sleuthkit/autopsy/directorytree/TagBlackboardArtifactAction.java index 3f2c8fd407..460c5b1a90 100755 --- a/Core/src/org/sleuthkit/autopsy/directorytree/TagBlackboardArtifactAction.java +++ b/Core/src/org/sleuthkit/autopsy/directorytree/TagBlackboardArtifactAction.java @@ -1,95 +1,76 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2013 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.directorytree; - -import java.awt.event.ActionEvent; -import java.util.Collection; -import java.util.logging.Level; -import javax.swing.AbstractAction; -import javax.swing.JMenuItem; -import javax.swing.JOptionPane; -import org.openide.util.Utilities; -import org.openide.util.actions.Presenter; -import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.casemodule.services.TagsManager; -import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.datamodel.Tags; -import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.TagType; -import org.sleuthkit.datamodel.TskCoreException; - -public class TagBlackboardArtifactAction extends AbstractAction implements Presenter.Popup { - // This class is a singleton to support multi-selection of nodes, since - // org.openide.nodes.NodeOp.findActions(Node[] nodes) will only pick up an Action if every - // node in the array returns a reference to the same action object from Node.getActions(boolean). - private static TagBlackboardArtifactAction instance; - - public static synchronized TagBlackboardArtifactAction getInstance() { - if (null == instance) { - instance = new TagBlackboardArtifactAction(); - } - return instance; - } - - private TagBlackboardArtifactAction() { - } - - @Override - public JMenuItem getPopupPresenter() { - return new TagBlackboardArtifactMenu(); - } - - @Override - public void actionPerformed(ActionEvent e) { - // Do nothing - this action should never be performed. - // Submenu actions are invoked instead. - } - - - private static class TagBlackboardArtifactMenu extends TagMenu { - public TagBlackboardArtifactMenu() { - super(Utilities.actionsGlobalContext().lookupAll(BlackboardArtifact.class).size() > 1 ? "Tag Results" : "Tag Result"); - } - - @Override - protected void applyTag(String tagDisplayName, String comment) { - try { - TagsManager tagsManager = Case.getCurrentCase().getServices().getTagsManager(); - TagType tagType = tagsManager.addTagType(tagDisplayName); - - Collection selectedArtifacts = Utilities.actionsGlobalContext().lookupAll(BlackboardArtifact.class); - for (BlackboardArtifact artifact : selectedArtifacts) { - Tags.createTag(artifact, tagDisplayName, comment); - try { - tagsManager.addBlackboardArtifactTag(artifact, tagType); - } - catch (TskCoreException ex) { - Logger.getLogger(TagBlackboardArtifactMenu.class.getName()).log(Level.SEVERE, "Error tagging result", ex); - } - } - } - catch (TagsManager.TagTypeAlreadyExistsException ex) { - JOptionPane.showMessageDialog(null, "A " + tagDisplayName + " tag type has already been defined.", "Duplicate Tag Type", JOptionPane.ERROR_MESSAGE); - } - catch (TskCoreException ex) { - Logger.getLogger(TagBlackboardArtifactMenu.class.getName()).log(Level.SEVERE, "Error adding " + tagDisplayName + " tag type", ex); - } - } - } -} +/* + * Autopsy Forensic Browser + * + * Copyright 2013 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.directorytree; + +import java.awt.event.ActionEvent; +import java.util.Collection; +import java.util.logging.Level; +import javax.swing.AbstractAction; +import javax.swing.JMenuItem; +import javax.swing.JOptionPane; +import org.openide.util.Utilities; +import org.openide.util.actions.Presenter; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.datamodel.Tags; +import org.sleuthkit.datamodel.BlackboardArtifact; +import org.sleuthkit.datamodel.TskCoreException; + +public class TagBlackboardArtifactAction extends AbstractAction implements Presenter.Popup { + // This class is a singleton to support multi-selection of nodes, since + // org.openide.nodes.NodeOp.findActions(Node[] nodes) will only pick up an Action if every + // node in the array returns a reference to the same action object from Node.getActions(boolean). + private static TagBlackboardArtifactAction instance; + + public static synchronized TagBlackboardArtifactAction getInstance() { + if (null == instance) { + instance = new TagBlackboardArtifactAction(); + } + return instance; + } + + private TagBlackboardArtifactAction() { + } + + @Override + public JMenuItem getPopupPresenter() { + return new TagBlackboardArtifactMenu(); + } + + @Override + public void actionPerformed(ActionEvent e) { + // Do nothing - this action should never be performed. + // Submenu actions are invoked instead. + } + + + private static class TagBlackboardArtifactMenu extends TagMenu { + public TagBlackboardArtifactMenu() { + super(Utilities.actionsGlobalContext().lookupAll(BlackboardArtifact.class).size() > 1 ? "Tag Results" : "Tag Result"); + } + + @Override + protected void applyTag(String tagDisplayName, String comment) { + Collection selectedArtifacts = Utilities.actionsGlobalContext().lookupAll(BlackboardArtifact.class); + for (BlackboardArtifact artifact : selectedArtifacts) { + Tags.createTag(artifact, tagDisplayName, comment); + } + } + } +} From 407ffb95680d2aac7ec815b58146aaf530407032 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Tue, 22 Oct 2013 19:25:10 -0400 Subject: [PATCH 084/179] Line endings update --- .../autopsy/datamodel/DirectoryNode.java | 194 +++++----- .../sleuthkit/autopsy/datamodel/FileNode.java | 360 +++++++++--------- nbproject/platform.properties | 240 ++++++------ 3 files changed, 397 insertions(+), 397 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/DirectoryNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/DirectoryNode.java index 3c4d33c253..f859362757 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/DirectoryNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/DirectoryNode.java @@ -1,97 +1,97 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2011 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.datamodel; - -import java.util.ArrayList; -import java.util.List; -import javax.swing.Action; -import org.sleuthkit.autopsy.directorytree.ExtractAction; -import org.sleuthkit.autopsy.directorytree.NewWindowViewAction; -import org.sleuthkit.autopsy.directorytree.TagAbstractFileAction; -import org.sleuthkit.autopsy.directorytree.ViewContextAction; -import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.Directory; -import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM; - -/** - * This class is used to represent the "Node" for the directory. Its children - * are more directories. - */ -public class DirectoryNode extends AbstractFsContentNode { - - public static final String DOTDOTDIR = "[parent folder]"; - public static final String DOTDIR = "[current folder]"; - - public DirectoryNode(Directory dir) { - this(dir, true); - - setIcon(dir); - } - - public DirectoryNode(AbstractFile dir, boolean directoryBrowseMode) { - super(dir, directoryBrowseMode); - - setIcon(dir); - } - - private void setIcon(AbstractFile dir) { - // set name, display name, and icon - if (dir.isDirNameFlagSet(TSK_FS_NAME_FLAG_ENUM.UNALLOC)) { - this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/folder-icon-deleted.png"); - } else { - this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/Folder-icon.png"); - } - } - - /** - * Right click action for this node - * - * @param popup - * @return - */ - @Override - public Action[] getActions(boolean popup) { - List actions = new ArrayList<>(); - if (!getDirectoryBrowseMode()) { - actions.add(new ViewContextAction("View File in Directory", this)); - actions.add(null); // creates a menu separator - } - actions.add(new NewWindowViewAction("View in New Window", this)); - actions.add(null); // creates a menu separator - actions.add(ExtractAction.getInstance()); - actions.add(null); // creates a menu separator - actions.add(TagAbstractFileAction.getInstance()); - return actions.toArray(new Action[0]); - } - - @Override - public T accept(ContentNodeVisitor v) { - return v.visit(this); - } - - @Override - public T accept(DisplayableItemNodeVisitor v) { - return v.visit(this); - } - - @Override - public TYPE getDisplayableItemNodeType() { - return TYPE.CONTENT; - } -} +/* + * Autopsy Forensic Browser + * + * Copyright 2011 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.datamodel; + +import java.util.ArrayList; +import java.util.List; +import javax.swing.Action; +import org.sleuthkit.autopsy.directorytree.ExtractAction; +import org.sleuthkit.autopsy.directorytree.NewWindowViewAction; +import org.sleuthkit.autopsy.directorytree.TagAbstractFileAction; +import org.sleuthkit.autopsy.directorytree.ViewContextAction; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.Directory; +import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM; + +/** + * This class is used to represent the "Node" for the directory. Its children + * are more directories. + */ +public class DirectoryNode extends AbstractFsContentNode { + + public static final String DOTDOTDIR = "[parent folder]"; + public static final String DOTDIR = "[current folder]"; + + public DirectoryNode(Directory dir) { + this(dir, true); + + setIcon(dir); + } + + public DirectoryNode(AbstractFile dir, boolean directoryBrowseMode) { + super(dir, directoryBrowseMode); + + setIcon(dir); + } + + private void setIcon(AbstractFile dir) { + // set name, display name, and icon + if (dir.isDirNameFlagSet(TSK_FS_NAME_FLAG_ENUM.UNALLOC)) { + this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/folder-icon-deleted.png"); + } else { + this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/Folder-icon.png"); + } + } + + /** + * Right click action for this node + * + * @param popup + * @return + */ + @Override + public Action[] getActions(boolean popup) { + List actions = new ArrayList<>(); + if (!getDirectoryBrowseMode()) { + actions.add(new ViewContextAction("View File in Directory", this)); + actions.add(null); // creates a menu separator + } + actions.add(new NewWindowViewAction("View in New Window", this)); + actions.add(null); // creates a menu separator + actions.add(ExtractAction.getInstance()); + actions.add(null); // creates a menu separator + actions.add(TagAbstractFileAction.getInstance()); + return actions.toArray(new Action[0]); + } + + @Override + public T accept(ContentNodeVisitor v) { + return v.visit(this); + } + + @Override + public T accept(DisplayableItemNodeVisitor v) { + return v.visit(this); + } + + @Override + public TYPE getDisplayableItemNodeType() { + return TYPE.CONTENT; + } +} diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/FileNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/FileNode.java index 9ca87fd8a7..628cd145a8 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/FileNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/FileNode.java @@ -1,180 +1,180 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2011 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.datamodel; - -import java.util.ArrayList; -import java.util.List; -import javax.swing.Action; -import org.sleuthkit.autopsy.directorytree.ExternalViewerAction; -import org.sleuthkit.autopsy.directorytree.ExtractAction; -import org.sleuthkit.autopsy.directorytree.HashSearchAction; -import org.sleuthkit.autopsy.directorytree.NewWindowViewAction; -import org.sleuthkit.autopsy.directorytree.TagAbstractFileAction; -import org.sleuthkit.autopsy.directorytree.ViewContextAction; -import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM; -import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM; - -/** - * This class is used to represent the "Node" for the file. It may have derived - * files children. - */ -public class FileNode extends AbstractFsContentNode { - - /** - * @param file underlying Content - */ - public FileNode(AbstractFile file) { - this(file, true); - - setIcon(file); - } - - public FileNode(AbstractFile file, boolean directoryBrowseMode) { - super(file, directoryBrowseMode); - - setIcon(file); - } - - private void setIcon(AbstractFile file) { - // set name, display name, and icon - if (file.isDirNameFlagSet(TSK_FS_NAME_FLAG_ENUM.UNALLOC)) { - if (file.getType().equals(TSK_DB_FILES_TYPE_ENUM.CARVED)) { - this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/carved-file-icon-16.png"); - } else { - this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/file-icon-deleted.png"); - } - } else { - this.setIconBaseWithExtension(getIconForFileType(file)); - } - } - - /** - * Right click action for this node - * - * @param popup - * @return - */ - @Override - public Action[] getActions(boolean popup) { - List actionsList = new ArrayList<>(); - if (!this.getDirectoryBrowseMode()) { - actionsList.add(new ViewContextAction("View File in Directory", this)); - actionsList.add(null); // creates a menu separator - } - actionsList.add(new NewWindowViewAction("View in New Window", this)); - actionsList.add(new ExternalViewerAction("Open in External Viewer", this)); - actionsList.add(null); // creates a menu separator - actionsList.add(ExtractAction.getInstance()); - actionsList.add(new HashSearchAction("Search for files with the same MD5 hash", this)); - actionsList.add(null); // creates a menu separator - actionsList.add(TagAbstractFileAction.getInstance()); - return actionsList.toArray(new Action[0]); - } - - @Override - public T accept(ContentNodeVisitor< T> v) { - return v.visit(this); - } - - @Override - public T accept(DisplayableItemNodeVisitor< T> v) { - return v.visit(this); - } - - // Given a file, returns the correct icon for said - // file based off it's extension - static String getIconForFileType(AbstractFile file) { - // Get the name, extension - String name = file.getName(); - int dotIndex = name.lastIndexOf("."); - if (dotIndex == -1) { - return "org/sleuthkit/autopsy/images/file-icon.png"; - } - String ext = name.substring(dotIndex).toLowerCase(); - - // Images - for (String s : FileTypeExtensions.getImageExtensions()) { - if (ext.equals(s)) { - return "org/sleuthkit/autopsy/images/image-file.png"; - } - } - // Videos - for (String s : FileTypeExtensions.getVideoExtensions()) { - if (ext.equals(s)) { - return "org/sleuthkit/autopsy/images/video-file.png"; - } - } - // Audio Files - for (String s : FileTypeExtensions.getAudioExtensions()) { - if (ext.equals(s)) { - return "org/sleuthkit/autopsy/images/audio-file.png"; - } - } - // Documents - for (String s : FileTypeExtensions.getDocumentExtensions()) { - if (ext.equals(s)) { - return "org/sleuthkit/autopsy/images/doc-file.png"; - } - } - // Executables / System Files - for (String s : FileTypeExtensions.getExecutableExtensions()) { - if (ext.equals(s)) { - return "org/sleuthkit/autopsy/images/exe-file.png"; - } - } - // Text Files - for (String s : FileTypeExtensions.getTextExtensions()) { - if (ext.equals(s)) { - return "org/sleuthkit/autopsy/images/text-file.png"; - } - } - // Web Files - for (String s : FileTypeExtensions.getWebExtensions()) { - if (ext.equals(s)) { - return "org/sleuthkit/autopsy/images/web-file.png"; - } - } - // PDFs - for (String s : FileTypeExtensions.getPDFExtensions()) { - if (ext.equals(s)) { - return "org/sleuthkit/autopsy/images/pdf-file.png"; - } - } - // Archives - for (String s : FileTypeExtensions.getArchiveExtensions()) { - if (ext.equals(s)) { - return "org/sleuthkit/autopsy/images/archive-file.png"; - } - } - // Else return the default - return "org/sleuthkit/autopsy/images/file-icon.png"; - - } - - @Override - public TYPE getDisplayableItemNodeType() { - return TYPE.CONTENT; - } - - @Override - public boolean isLeafTypeNode() { - return true; //false; - } -} +/* + * Autopsy Forensic Browser + * + * Copyright 2011 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.datamodel; + +import java.util.ArrayList; +import java.util.List; +import javax.swing.Action; +import org.sleuthkit.autopsy.directorytree.ExternalViewerAction; +import org.sleuthkit.autopsy.directorytree.ExtractAction; +import org.sleuthkit.autopsy.directorytree.HashSearchAction; +import org.sleuthkit.autopsy.directorytree.NewWindowViewAction; +import org.sleuthkit.autopsy.directorytree.TagAbstractFileAction; +import org.sleuthkit.autopsy.directorytree.ViewContextAction; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM; +import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM; + +/** + * This class is used to represent the "Node" for the file. It may have derived + * files children. + */ +public class FileNode extends AbstractFsContentNode { + + /** + * @param file underlying Content + */ + public FileNode(AbstractFile file) { + this(file, true); + + setIcon(file); + } + + public FileNode(AbstractFile file, boolean directoryBrowseMode) { + super(file, directoryBrowseMode); + + setIcon(file); + } + + private void setIcon(AbstractFile file) { + // set name, display name, and icon + if (file.isDirNameFlagSet(TSK_FS_NAME_FLAG_ENUM.UNALLOC)) { + if (file.getType().equals(TSK_DB_FILES_TYPE_ENUM.CARVED)) { + this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/carved-file-icon-16.png"); + } else { + this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/file-icon-deleted.png"); + } + } else { + this.setIconBaseWithExtension(getIconForFileType(file)); + } + } + + /** + * Right click action for this node + * + * @param popup + * @return + */ + @Override + public Action[] getActions(boolean popup) { + List actionsList = new ArrayList<>(); + if (!this.getDirectoryBrowseMode()) { + actionsList.add(new ViewContextAction("View File in Directory", this)); + actionsList.add(null); // creates a menu separator + } + actionsList.add(new NewWindowViewAction("View in New Window", this)); + actionsList.add(new ExternalViewerAction("Open in External Viewer", this)); + actionsList.add(null); // creates a menu separator + actionsList.add(ExtractAction.getInstance()); + actionsList.add(new HashSearchAction("Search for files with the same MD5 hash", this)); + actionsList.add(null); // creates a menu separator + actionsList.add(TagAbstractFileAction.getInstance()); + return actionsList.toArray(new Action[0]); + } + + @Override + public T accept(ContentNodeVisitor< T> v) { + return v.visit(this); + } + + @Override + public T accept(DisplayableItemNodeVisitor< T> v) { + return v.visit(this); + } + + // Given a file, returns the correct icon for said + // file based off it's extension + static String getIconForFileType(AbstractFile file) { + // Get the name, extension + String name = file.getName(); + int dotIndex = name.lastIndexOf("."); + if (dotIndex == -1) { + return "org/sleuthkit/autopsy/images/file-icon.png"; + } + String ext = name.substring(dotIndex).toLowerCase(); + + // Images + for (String s : FileTypeExtensions.getImageExtensions()) { + if (ext.equals(s)) { + return "org/sleuthkit/autopsy/images/image-file.png"; + } + } + // Videos + for (String s : FileTypeExtensions.getVideoExtensions()) { + if (ext.equals(s)) { + return "org/sleuthkit/autopsy/images/video-file.png"; + } + } + // Audio Files + for (String s : FileTypeExtensions.getAudioExtensions()) { + if (ext.equals(s)) { + return "org/sleuthkit/autopsy/images/audio-file.png"; + } + } + // Documents + for (String s : FileTypeExtensions.getDocumentExtensions()) { + if (ext.equals(s)) { + return "org/sleuthkit/autopsy/images/doc-file.png"; + } + } + // Executables / System Files + for (String s : FileTypeExtensions.getExecutableExtensions()) { + if (ext.equals(s)) { + return "org/sleuthkit/autopsy/images/exe-file.png"; + } + } + // Text Files + for (String s : FileTypeExtensions.getTextExtensions()) { + if (ext.equals(s)) { + return "org/sleuthkit/autopsy/images/text-file.png"; + } + } + // Web Files + for (String s : FileTypeExtensions.getWebExtensions()) { + if (ext.equals(s)) { + return "org/sleuthkit/autopsy/images/web-file.png"; + } + } + // PDFs + for (String s : FileTypeExtensions.getPDFExtensions()) { + if (ext.equals(s)) { + return "org/sleuthkit/autopsy/images/pdf-file.png"; + } + } + // Archives + for (String s : FileTypeExtensions.getArchiveExtensions()) { + if (ext.equals(s)) { + return "org/sleuthkit/autopsy/images/archive-file.png"; + } + } + // Else return the default + return "org/sleuthkit/autopsy/images/file-icon.png"; + + } + + @Override + public TYPE getDisplayableItemNodeType() { + return TYPE.CONTENT; + } + + @Override + public boolean isLeafTypeNode() { + return true; //false; + } +} diff --git a/nbproject/platform.properties b/nbproject/platform.properties index e0bdd68b73..a9fa87f749 100644 --- a/nbproject/platform.properties +++ b/nbproject/platform.properties @@ -1,120 +1,120 @@ -branding.token=autopsy -netbeans-plat-version=7.3.1 -suite.dir=${basedir} -nbplatform.active.dir=${suite.dir}/netbeans-plat/${netbeans-plat-version} -harness.dir=${nbplatform.active.dir}/harness -bootstrap.url=http://deadlock.netbeans.org/hudson/job/nbms-and-javadoc/lastStableBuild/artifact/nbbuild/netbeans/harness/tasks.jar -autoupdate.catalog.url=http://dlc.sun.com.edgesuite.net/netbeans/updates/${netbeans-plat-version}/uc/final/distribution/catalog.xml.gz -cluster.path=\ - ${nbplatform.active.dir}/harness:\ - ${nbplatform.active.dir}/java:\ - ${nbplatform.active.dir}/platform -disabled.modules=\ - org.apache.tools.ant.module,\ - org.netbeans.api.debugger.jpda,\ - org.netbeans.api.java,\ - org.netbeans.lib.nbjavac,\ - org.netbeans.libs.cglib,\ - org.netbeans.libs.javacapi,\ - org.netbeans.libs.javacimpl,\ - org.netbeans.libs.springframework,\ - org.netbeans.modules.ant.browsetask,\ - org.netbeans.modules.ant.debugger,\ - org.netbeans.modules.ant.freeform,\ - org.netbeans.modules.ant.grammar,\ - org.netbeans.modules.ant.kit,\ - org.netbeans.modules.beans,\ - org.netbeans.modules.classfile,\ - org.netbeans.modules.dbschema,\ - org.netbeans.modules.debugger.jpda,\ - org.netbeans.modules.debugger.jpda.ant,\ - org.netbeans.modules.debugger.jpda.kit,\ - org.netbeans.modules.debugger.jpda.projects,\ - org.netbeans.modules.debugger.jpda.ui,\ - org.netbeans.modules.debugger.jpda.visual,\ - org.netbeans.modules.findbugs.installer,\ - org.netbeans.modules.form,\ - org.netbeans.modules.form.binding,\ - org.netbeans.modules.form.j2ee,\ - org.netbeans.modules.form.kit,\ - org.netbeans.modules.form.nb,\ - org.netbeans.modules.form.refactoring,\ - org.netbeans.modules.hibernate,\ - org.netbeans.modules.hibernatelib,\ - org.netbeans.modules.hudson.ant,\ - org.netbeans.modules.hudson.maven,\ - org.netbeans.modules.i18n,\ - org.netbeans.modules.i18n.form,\ - org.netbeans.modules.j2ee.core.utilities,\ - org.netbeans.modules.j2ee.eclipselink,\ - org.netbeans.modules.j2ee.eclipselinkmodelgen,\ - org.netbeans.modules.j2ee.jpa.refactoring,\ - org.netbeans.modules.j2ee.jpa.verification,\ - org.netbeans.modules.j2ee.metadata,\ - org.netbeans.modules.j2ee.metadata.model.support,\ - org.netbeans.modules.j2ee.persistence,\ - org.netbeans.modules.j2ee.persistence.kit,\ - org.netbeans.modules.j2ee.persistenceapi,\ - org.netbeans.modules.java.api.common,\ - org.netbeans.modules.java.debug,\ - org.netbeans.modules.java.editor,\ - org.netbeans.modules.java.editor.lib,\ - org.netbeans.modules.java.examples,\ - org.netbeans.modules.java.freeform,\ - org.netbeans.modules.java.guards,\ - org.netbeans.modules.java.helpset,\ - org.netbeans.modules.java.hints,\ - org.netbeans.modules.java.hints.declarative,\ - org.netbeans.modules.java.hints.declarative.test,\ - org.netbeans.modules.java.hints.legacy.spi,\ - org.netbeans.modules.java.hints.test,\ - org.netbeans.modules.java.hints.ui,\ - org.netbeans.modules.java.j2seplatform,\ - org.netbeans.modules.java.j2seproject,\ - org.netbeans.modules.java.kit,\ - org.netbeans.modules.java.lexer,\ - org.netbeans.modules.java.navigation,\ - org.netbeans.modules.java.platform,\ - org.netbeans.modules.java.preprocessorbridge,\ - org.netbeans.modules.java.project,\ - org.netbeans.modules.java.source,\ - org.netbeans.modules.java.source.ant,\ - org.netbeans.modules.java.source.queries,\ - org.netbeans.modules.java.source.queriesimpl,\ - org.netbeans.modules.java.sourceui,\ - org.netbeans.modules.java.testrunner,\ - org.netbeans.modules.javadoc,\ - org.netbeans.modules.javawebstart,\ - org.netbeans.modules.junit,\ - org.netbeans.modules.maven,\ - org.netbeans.modules.maven.checkstyle,\ - org.netbeans.modules.maven.coverage,\ - org.netbeans.modules.maven.embedder,\ - org.netbeans.modules.maven.grammar,\ - org.netbeans.modules.maven.graph,\ - org.netbeans.modules.maven.hints,\ - org.netbeans.modules.maven.indexer,\ - org.netbeans.modules.maven.junit,\ - org.netbeans.modules.maven.kit,\ - org.netbeans.modules.maven.model,\ - org.netbeans.modules.maven.osgi,\ - org.netbeans.modules.maven.persistence,\ - org.netbeans.modules.maven.refactoring,\ - org.netbeans.modules.maven.repository,\ - org.netbeans.modules.maven.search,\ - org.netbeans.modules.maven.spring,\ - org.netbeans.modules.projectimport.eclipse.core,\ - org.netbeans.modules.projectimport.eclipse.j2se,\ - org.netbeans.modules.refactoring.java,\ - org.netbeans.modules.spellchecker.bindings.java,\ - org.netbeans.modules.spring.beans,\ - org.netbeans.modules.testng,\ - org.netbeans.modules.testng.ant,\ - org.netbeans.modules.testng.maven,\ - org.netbeans.modules.websvc.jaxws21,\ - org.netbeans.modules.websvc.jaxws21api,\ - org.netbeans.modules.websvc.saas.codegen.java,\ - org.netbeans.modules.xml.jaxb,\ - org.netbeans.modules.xml.tools.java,\ - org.netbeans.spi.java.hints - +branding.token=autopsy +netbeans-plat-version=7.3.1 +suite.dir=${basedir} +nbplatform.active.dir=${suite.dir}/netbeans-plat/${netbeans-plat-version} +harness.dir=${nbplatform.active.dir}/harness +bootstrap.url=http://deadlock.netbeans.org/hudson/job/nbms-and-javadoc/lastStableBuild/artifact/nbbuild/netbeans/harness/tasks.jar +autoupdate.catalog.url=http://dlc.sun.com.edgesuite.net/netbeans/updates/${netbeans-plat-version}/uc/final/distribution/catalog.xml.gz +cluster.path=\ + ${nbplatform.active.dir}/harness:\ + ${nbplatform.active.dir}/java:\ + ${nbplatform.active.dir}/platform +disabled.modules=\ + org.apache.tools.ant.module,\ + org.netbeans.api.debugger.jpda,\ + org.netbeans.api.java,\ + org.netbeans.lib.nbjavac,\ + org.netbeans.libs.cglib,\ + org.netbeans.libs.javacapi,\ + org.netbeans.libs.javacimpl,\ + org.netbeans.libs.springframework,\ + org.netbeans.modules.ant.browsetask,\ + org.netbeans.modules.ant.debugger,\ + org.netbeans.modules.ant.freeform,\ + org.netbeans.modules.ant.grammar,\ + org.netbeans.modules.ant.kit,\ + org.netbeans.modules.beans,\ + org.netbeans.modules.classfile,\ + org.netbeans.modules.dbschema,\ + org.netbeans.modules.debugger.jpda,\ + org.netbeans.modules.debugger.jpda.ant,\ + org.netbeans.modules.debugger.jpda.kit,\ + org.netbeans.modules.debugger.jpda.projects,\ + org.netbeans.modules.debugger.jpda.ui,\ + org.netbeans.modules.debugger.jpda.visual,\ + org.netbeans.modules.findbugs.installer,\ + org.netbeans.modules.form,\ + org.netbeans.modules.form.binding,\ + org.netbeans.modules.form.j2ee,\ + org.netbeans.modules.form.kit,\ + org.netbeans.modules.form.nb,\ + org.netbeans.modules.form.refactoring,\ + org.netbeans.modules.hibernate,\ + org.netbeans.modules.hibernatelib,\ + org.netbeans.modules.hudson.ant,\ + org.netbeans.modules.hudson.maven,\ + org.netbeans.modules.i18n,\ + org.netbeans.modules.i18n.form,\ + org.netbeans.modules.j2ee.core.utilities,\ + org.netbeans.modules.j2ee.eclipselink,\ + org.netbeans.modules.j2ee.eclipselinkmodelgen,\ + org.netbeans.modules.j2ee.jpa.refactoring,\ + org.netbeans.modules.j2ee.jpa.verification,\ + org.netbeans.modules.j2ee.metadata,\ + org.netbeans.modules.j2ee.metadata.model.support,\ + org.netbeans.modules.j2ee.persistence,\ + org.netbeans.modules.j2ee.persistence.kit,\ + org.netbeans.modules.j2ee.persistenceapi,\ + org.netbeans.modules.java.api.common,\ + org.netbeans.modules.java.debug,\ + org.netbeans.modules.java.editor,\ + org.netbeans.modules.java.editor.lib,\ + org.netbeans.modules.java.examples,\ + org.netbeans.modules.java.freeform,\ + org.netbeans.modules.java.guards,\ + org.netbeans.modules.java.helpset,\ + org.netbeans.modules.java.hints,\ + org.netbeans.modules.java.hints.declarative,\ + org.netbeans.modules.java.hints.declarative.test,\ + org.netbeans.modules.java.hints.legacy.spi,\ + org.netbeans.modules.java.hints.test,\ + org.netbeans.modules.java.hints.ui,\ + org.netbeans.modules.java.j2seplatform,\ + org.netbeans.modules.java.j2seproject,\ + org.netbeans.modules.java.kit,\ + org.netbeans.modules.java.lexer,\ + org.netbeans.modules.java.navigation,\ + org.netbeans.modules.java.platform,\ + org.netbeans.modules.java.preprocessorbridge,\ + org.netbeans.modules.java.project,\ + org.netbeans.modules.java.source,\ + org.netbeans.modules.java.source.ant,\ + org.netbeans.modules.java.source.queries,\ + org.netbeans.modules.java.source.queriesimpl,\ + org.netbeans.modules.java.sourceui,\ + org.netbeans.modules.java.testrunner,\ + org.netbeans.modules.javadoc,\ + org.netbeans.modules.javawebstart,\ + org.netbeans.modules.junit,\ + org.netbeans.modules.maven,\ + org.netbeans.modules.maven.checkstyle,\ + org.netbeans.modules.maven.coverage,\ + org.netbeans.modules.maven.embedder,\ + org.netbeans.modules.maven.grammar,\ + org.netbeans.modules.maven.graph,\ + org.netbeans.modules.maven.hints,\ + org.netbeans.modules.maven.indexer,\ + org.netbeans.modules.maven.junit,\ + org.netbeans.modules.maven.kit,\ + org.netbeans.modules.maven.model,\ + org.netbeans.modules.maven.osgi,\ + org.netbeans.modules.maven.persistence,\ + org.netbeans.modules.maven.refactoring,\ + org.netbeans.modules.maven.repository,\ + org.netbeans.modules.maven.search,\ + org.netbeans.modules.maven.spring,\ + org.netbeans.modules.projectimport.eclipse.core,\ + org.netbeans.modules.projectimport.eclipse.j2se,\ + org.netbeans.modules.refactoring.java,\ + org.netbeans.modules.spellchecker.bindings.java,\ + org.netbeans.modules.spring.beans,\ + org.netbeans.modules.testng,\ + org.netbeans.modules.testng.ant,\ + org.netbeans.modules.testng.maven,\ + org.netbeans.modules.websvc.jaxws21,\ + org.netbeans.modules.websvc.jaxws21api,\ + org.netbeans.modules.websvc.saas.codegen.java,\ + org.netbeans.modules.xml.jaxb,\ + org.netbeans.modules.xml.tools.java,\ + org.netbeans.spi.java.hints + From 9a2da084430e44870597f15065408942f25516c6 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Wed, 23 Oct 2013 11:00:08 -0400 Subject: [PATCH 085/179] Second step in merge of new_tags_api branch into master --- .../sleuthkit/autopsy/casemodule/services/Services.java | 8 ++++++++ 1 file changed, 8 insertions(+) mode change 100644 => 100755 Core/src/org/sleuthkit/autopsy/casemodule/services/Services.java diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/services/Services.java b/Core/src/org/sleuthkit/autopsy/casemodule/services/Services.java old mode 100644 new mode 100755 index 10663c173b..069b13ef2e --- a/Core/src/org/sleuthkit/autopsy/casemodule/services/Services.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/services/Services.java @@ -41,18 +41,26 @@ public class Services implements Closeable { // services private FileManager fileManager; + private TagsManager tagsManager; public Services(SleuthkitCase tskCase) { this.tskCase = tskCase; //create and initialize FileManager as early as possibly in the new/opened Case fileManager = new FileManager(tskCase); services.add(fileManager); + + tagsManager = new TagsManager(tskCase); + services.add(tagsManager); } public FileManager getFileManager() { return fileManager; } + public TagsManager getTagsManager() { + return tagsManager; + } + @Override public void close() throws IOException { // close all services From 75f8dd47e78e4887b44070662f7cb47d03477388 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Wed, 23 Oct 2013 11:04:00 -0400 Subject: [PATCH 086/179] Third step in merge of new_tags_api branch into master --- Core/src/org/sleuthkit/autopsy/datamodel/DirectoryNode.java | 4 ++-- Core/src/org/sleuthkit/autopsy/datamodel/FileNode.java | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/DirectoryNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/DirectoryNode.java index b3da63790c..278c147917 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/DirectoryNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/DirectoryNode.java @@ -21,9 +21,9 @@ package org.sleuthkit.autopsy.datamodel; import java.util.ArrayList; import java.util.List; import javax.swing.Action; +import org.sleuthkit.autopsy.actions.AddContentTagAction; import org.sleuthkit.autopsy.directorytree.ExtractAction; import org.sleuthkit.autopsy.directorytree.NewWindowViewAction; -import org.sleuthkit.autopsy.directorytree.TagAbstractFileAction; import org.sleuthkit.autopsy.directorytree.ViewContextAction; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Directory; @@ -76,7 +76,7 @@ public class DirectoryNode extends AbstractFsContentNode { actions.add(null); // creates a menu separator actions.add(ExtractAction.getInstance()); actions.add(null); // creates a menu separator - actions.add(TagAbstractFileAction.getInstance()); + actions.add(AddContentTagAction.getInstance()); return actions.toArray(new Action[0]); } diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/FileNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/FileNode.java index 27686787ef..b3312292cf 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/FileNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/FileNode.java @@ -21,6 +21,7 @@ package org.sleuthkit.autopsy.datamodel; import java.util.ArrayList; import java.util.List; import javax.swing.Action; +import org.sleuthkit.autopsy.actions.AddContentTagAction; import org.sleuthkit.autopsy.directorytree.ExternalViewerAction; import org.sleuthkit.autopsy.directorytree.ExtractAction; import org.sleuthkit.autopsy.directorytree.HashSearchAction; @@ -83,7 +84,7 @@ public class FileNode extends AbstractFsContentNode { actionsList.add(ExtractAction.getInstance()); actionsList.add(new HashSearchAction("Search for files with the same MD5 hash", this)); actionsList.add(null); // creates a menu separator - actionsList.add(TagAbstractFileAction.getInstance()); + actionsList.add(AddContentTagAction.getInstance()); return actionsList.toArray(new Action[0]); } From 9c1743ab5a76866e0808492ba7d7b1db5cc80d49 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Wed, 23 Oct 2013 15:33:38 -0400 Subject: [PATCH 087/179] Updated for improved lower level tags api, fixed small bug in tag and comment dialog --- .../autopsy/actions/AddTagAction.java | 8 +- .../actions/GetTagNameAndCommentDialog.java | 19 ++- .../autopsy/actions/GetTagNameDialog.java | 36 ++++-- .../casemodule/services/TagsManager.java | 108 +++++++++--------- .../autopsy/datamodel/ContentTagTypeNode.java | 2 +- .../autopsy/datamodel/TagNameNode.java | 3 +- .../sleuthkit/autopsy/datamodel/TagsNode.java | 2 +- .../BlackboardArtifactTagTypeNode.java | 2 +- .../autopsy/report/ReportGenerator.java | 11 +- .../autopsy/report/ReportVisualPanel2.java | 5 +- .../autopsy/report/TableReportModule.java | 4 +- .../sleuthkit/autopsy/timeline/Timeline.java | 6 +- 12 files changed, 112 insertions(+), 94 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/actions/AddTagAction.java b/Core/src/org/sleuthkit/autopsy/actions/AddTagAction.java index b6c8feea06..65f6a5e589 100755 --- a/Core/src/org/sleuthkit/autopsy/actions/AddTagAction.java +++ b/Core/src/org/sleuthkit/autopsy/actions/AddTagAction.java @@ -20,7 +20,7 @@ package org.sleuthkit.autopsy.actions; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; -import java.util.ArrayList; +import java.util.List; import java.util.logging.Level; import javax.swing.JMenu; import javax.swing.JMenuItem; @@ -76,9 +76,9 @@ abstract class AddTagAction extends TagAction implements Presenter.Popup { // Get the current set of tag names. TagsManager tagsManager = Case.getCurrentCase().getServices().getTagsManager(); - ArrayList tagNames = new ArrayList<>(); + List tagNames = null; try { - tagsManager.getAllTagNames(tagNames); + tagNames = tagsManager.getAllTagNames(); } catch (TskCoreException ex) { Logger.getLogger(TagsManager.class.getName()).log(Level.SEVERE, "Failed to get tag names", ex); @@ -91,7 +91,7 @@ abstract class AddTagAction extends TagAction implements Presenter.Popup { // Each tag name in the current set of tags gets its own menu item in // the "Quick Tags" sub-menu. Selecting one of these menu items adds // a tag with the associated tag name. - if (!tagNames.isEmpty()) { + if (null != tagNames && !tagNames.isEmpty()) { for (final TagName tagName : tagNames) { JMenuItem tagNameItem = new JMenuItem(tagName.getDisplayName()); tagNameItem.addActionListener(new ActionListener() { diff --git a/Core/src/org/sleuthkit/autopsy/actions/GetTagNameAndCommentDialog.java b/Core/src/org/sleuthkit/autopsy/actions/GetTagNameAndCommentDialog.java index 34aee2c7df..e953694d90 100644 --- a/Core/src/org/sleuthkit/autopsy/actions/GetTagNameAndCommentDialog.java +++ b/Core/src/org/sleuthkit/autopsy/actions/GetTagNameAndCommentDialog.java @@ -20,8 +20,8 @@ package org.sleuthkit.autopsy.actions; import java.awt.event.ActionEvent; import java.awt.event.KeyEvent; -import java.util.ArrayList; import java.util.HashMap; +import java.util.List; import java.util.logging.Level; import javax.swing.AbstractAction; import javax.swing.ActionMap; @@ -38,7 +38,7 @@ import org.sleuthkit.datamodel.TagName; import org.sleuthkit.datamodel.TskCoreException; public class GetTagNameAndCommentDialog extends JDialog { - private static final String NO_TAG_NAMES_MESSAGE = "No Tags"; // RJCTODO: ?? + private static final String NO_TAG_NAMES_MESSAGE = "No Tags"; private final HashMap tagNames = new HashMap<>(); private TagNameAndComment tagNameAndComment = null; @@ -70,7 +70,6 @@ public class GetTagNameAndCommentDialog extends JDialog { initComponents(); // Set up the dialog to close when Esc is pressed. - // RJCTODO: Could do this for the other dialog, too. String cancelName = "cancel"; InputMap inputMap = getRootPane().getInputMap(JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT); inputMap.put(KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0), cancelName); @@ -82,17 +81,17 @@ public class GetTagNameAndCommentDialog extends JDialog { } }); - // Populate the combo box with the available tag names. - // Save the tag names to be enable to return the one the user selects. + // Populate the combo box with the available tag names and save the + // tag name DTOs to be enable to return the one the user selects. TagsManager tagsManager = Case.getCurrentCase().getServices().getTagsManager(); - ArrayList currentTagNames = new ArrayList<>(); + List currentTagNames = null; try { - tagsManager.getAllTagNames(currentTagNames); + currentTagNames = tagsManager.getAllTagNames(); } catch (TskCoreException ex) { Logger.getLogger(GetTagNameAndCommentDialog.class.getName()).log(Level.SEVERE, "Failed to get tag names", ex); } - if (currentTagNames.isEmpty()) { + if (null != currentTagNames && currentTagNames.isEmpty()) { tagCombo.addItem(NO_TAG_NAMES_MESSAGE); } else { @@ -222,18 +221,16 @@ public class GetTagNameAndCommentDialog extends JDialog { }//GEN-LAST:event_cancelButtonActionPerformed private void closeDialog(java.awt.event.WindowEvent evt) {//GEN-FIRST:event_closeDialog - // RJCTODO: Is this dead code? tagNameAndComment = null; dispose(); }//GEN-LAST:event_closeDialog private void newTagButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_newTagButtonActionPerformed - // RJCTODO: Make suer this works for dups TagName newTagName = GetTagNameDialog.doDialog(); if (newTagName != null) { tagNames.put(newTagName.getDisplayName(), newTagName); tagCombo.addItem(newTagName.getDisplayName()); - tagCombo.setSelectedItem(newTagName); + tagCombo.setSelectedItem(newTagName.getDisplayName()); } }//GEN-LAST:event_newTagButtonActionPerformed diff --git a/Core/src/org/sleuthkit/autopsy/actions/GetTagNameDialog.java b/Core/src/org/sleuthkit/autopsy/actions/GetTagNameDialog.java index 637effab00..2100d83ae2 100644 --- a/Core/src/org/sleuthkit/autopsy/actions/GetTagNameDialog.java +++ b/Core/src/org/sleuthkit/autopsy/actions/GetTagNameDialog.java @@ -18,14 +18,20 @@ */ package org.sleuthkit.autopsy.actions; +import java.awt.event.ActionEvent; import java.awt.event.KeyEvent; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.logging.Level; +import javax.swing.AbstractAction; +import javax.swing.ActionMap; +import javax.swing.InputMap; +import javax.swing.JComponent; import javax.swing.JDialog; import javax.swing.JFrame; import javax.swing.JOptionPane; +import javax.swing.KeyStroke; import javax.swing.table.AbstractTableModel; import org.openide.util.ImageUtilities; import org.openide.windows.WindowManager; @@ -50,19 +56,36 @@ public class GetTagNameDialog extends JDialog { setIconImage(ImageUtilities.loadImage(TAG_ICON_PATH)); initComponents(); + // Set up the dialog to close when Esc is pressed. + String cancelName = "cancel"; + InputMap inputMap = getRootPane().getInputMap(JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT); + inputMap.put(KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0), cancelName); + ActionMap actionMap = getRootPane().getActionMap(); + actionMap.put(cancelName, new AbstractAction() { + @Override + public void actionPerformed(ActionEvent e) { + dispose(); + } + }); + // Get the current set of tag names and hash them for a speedy lookup in // case the user chooses an existing tag name from the tag names table. TagsManager tagsManager = Case.getCurrentCase().getServices().getTagsManager(); - ArrayList currentTagNames = new ArrayList<>(); + List currentTagNames = null; try { - tagsManager.getAllTagNames(currentTagNames); + currentTagNames = tagsManager.getAllTagNames(); } catch (TskCoreException ex) { Logger.getLogger(GetTagNameDialog.class.getName()).log(Level.SEVERE, "Failed to get tag names", ex); - } - for (TagName name : currentTagNames) { - this.tagNames.put(name.getDisplayName(), name); - } + } + if (null != currentTagNames) { + for (TagName name : currentTagNames) { + this.tagNames.put(name.getDisplayName(), name); + } + } + else { + currentTagNames = new ArrayList<>(); + } // Populate the tag names table. tagsTable.setModel(new TagsTableModel(currentTagNames)); @@ -251,7 +274,6 @@ public class GetTagNameDialog extends JDialog { }//GEN-LAST:event_cancelButtonActionPerformed private void okButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_okButtonActionPerformed - // RJCTODO: Check out this stuff, titles etc. String tagDisplayName = tagNameField.getText(); if (tagDisplayName.isEmpty()) { JOptionPane.showMessageDialog(null, "Must supply a tag name to continue.", "Tag Name", JOptionPane.ERROR_MESSAGE); diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java b/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java index 16b3ef58d1..3562887438 100755 --- a/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java @@ -20,7 +20,6 @@ package org.sleuthkit.autopsy.casemodule.services; import java.io.Closeable; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; @@ -43,7 +42,6 @@ import org.sleuthkit.datamodel.TskCoreException; public class TagsManager implements Closeable { private static final String TAGS_SETTINGS_NAME = "Tags"; private static final String TAG_NAMES_SETTING_KEY = "TagNames"; - private static final TagName[] predefinedTagNames = new TagName[]{new TagName("Bookmark", "", TagName.HTML_COLOR.NONE)}; private final SleuthkitCase tskCase; private final HashMap uniqueTagNames = new HashMap<>(); private boolean tagNamesInitialized = false; // @@@ This is part of a work around to be removed when database access on the EDT is correctly synchronized. @@ -69,33 +67,31 @@ public class TagsManager implements Closeable { /** * Gets a list of all tag names currently available for tagging content or * blackboard artifacts. - * @param [out] A list, possibly empty, of TagName data transfer objects (DTOs). + * @return A list, possibly empty, of TagName data transfer objects (DTOs). * @throws TskCoreException */ - public synchronized void getAllTagNames(List tagNames) throws TskCoreException { + public synchronized List getAllTagNames() throws TskCoreException { // @@@ This is a work around to be removed when database access on the EDT is correctly synchronized. if (!tagNamesInitialized) { getExistingTagNames(); } - tagNames.clear(); - tskCase.getAllTagNames(tagNames); + return tskCase.getAllTagNames(); } /** * Gets a list of all tag names currently used for tagging content or * blackboard artifacts. - * @param [out] A list, possibly empty, of TagName data transfer objects (DTOs). + * @return A list, possibly empty, of TagName data transfer objects (DTOs). * @throws TskCoreException */ - public synchronized void getTagNamesInUse(List tagNames) throws TskCoreException { + public synchronized List getTagNamesInUse() throws TskCoreException { // @@@ This is a work around to be removed when database access on the EDT is correctly synchronized. if (!tagNamesInitialized) { getExistingTagNames(); } - tagNames.clear(); - tskCase.getTagNamesInUse(tagNames); + return tskCase.getTagNamesInUse(); } /** @@ -152,8 +148,7 @@ public class TagsManager implements Closeable { } // Add the tag name to the case. - TagName newTagName = new TagName(displayName, description, color); - tskCase.addTagName(newTagName); + TagName newTagName = tskCase.addTagName(displayName, description, color); // Add the tag name to the tags settings. uniqueTagNames.put(newTagName.getDisplayName(), newTagName); @@ -166,10 +161,11 @@ public class TagsManager implements Closeable { * Tags a content object. * @param [in] content The content to tag. * @param [in] tagName The name to use for the tag. + * @return A ContentTag data transfer object (DTO) representing the new tag. * @throws TskCoreException */ - public void addContentTag(Content content, TagName tagName) throws TskCoreException { - addContentTag(content, tagName, "", 0, content.getSize() - 1); + public ContentTag addContentTag(Content content, TagName tagName) throws TskCoreException { + return addContentTag(content, tagName, "", 0, content.getSize() - 1); } /** @@ -177,10 +173,11 @@ public class TagsManager implements Closeable { * @param [in] content The content to tag. * @param [in] tagName The name to use for the tag. * @param [in] comment A comment to store with the tag. + * @return A ContentTag data transfer object (DTO) representing the new tag. * @throws TskCoreException */ - public void addContentTag(Content content, TagName tagName, String comment) throws TskCoreException { - addContentTag(content, tagName, comment, 0, content.getSize() - 1); + public ContentTag addContentTag(Content content, TagName tagName, String comment) throws TskCoreException { + return addContentTag(content, tagName, comment, 0, content.getSize() - 1); } /** @@ -190,9 +187,10 @@ public class TagsManager implements Closeable { * @param [in] comment A comment to store with the tag. * @param [in] beginByteOffset Designates the beginning of a tagged section. * @param [in] endByteOffset Designates the end of a tagged section. + * @return A ContentTag data transfer object (DTO) representing the new tag. * @throws IllegalArgumentException, TskCoreException */ - public synchronized void addContentTag(Content content, TagName tagName, String comment, long beginByteOffset, long endByteOffset) throws IllegalArgumentException, TskCoreException { + public synchronized ContentTag addContentTag(Content content, TagName tagName, String comment, long beginByteOffset, long endByteOffset) throws IllegalArgumentException, TskCoreException { // @@@ This is a work around to be removed when database access on the EDT is correctly synchronized. if (!tagNamesInitialized) { getExistingTagNames(); @@ -210,7 +208,7 @@ public class TagsManager implements Closeable { throw new IllegalArgumentException("endByteOffset < beginByteOffset"); } - tskCase.addContentTag(new ContentTag(content, tagName, comment, beginByteOffset, endByteOffset)); + return tskCase.addContentTag(content, tagName, comment, beginByteOffset, endByteOffset); } /** @@ -229,16 +227,16 @@ public class TagsManager implements Closeable { /** * Gets all content tags for the current case. - * @param [out] tags A list, possibly empty, of content tags. + * @return A list, possibly empty, of content tags. * @throws TskCoreException */ - public void getAllContentTags(List tags) throws TskCoreException { + public List getAllContentTags() throws TskCoreException { // @@@ This is a work around to be removed when database access on the EDT is correctly synchronized. if (!tagNamesInitialized) { getExistingTagNames(); } - tskCase.getAllContentTags(tags); + return tskCase.getAllContentTags(); } /** @@ -262,23 +260,24 @@ public class TagsManager implements Closeable { * @return A list, possibly empty, of the content tags with the specified tag name. * @throws TskCoreException */ - public synchronized void getContentTagsByTagName(TagName tagName, List tags) throws TskCoreException { + public synchronized List getContentTagsByTagName(TagName tagName) throws TskCoreException { // @@@ This is a work around to be removed when database access on the EDT is correctly synchronized. if (!tagNamesInitialized) { getExistingTagNames(); } - tskCase.getContentTagsByTagName(tagName, tags); + return tskCase.getContentTagsByTagName(tagName); } /** * Tags a blackboard artifact object. * @param [in] artifact The blackboard artifact to tag. * @param [in] tagName The name to use for the tag. + * @return A BlackboardArtifactTag data transfer object (DTO) representing the new tag. * @throws TskCoreException */ - public void addBlackboardArtifactTag(BlackboardArtifact artifact, TagName tagName) throws TskCoreException { - addBlackboardArtifactTag(artifact, tagName, ""); + public BlackboardArtifactTag addBlackboardArtifactTag(BlackboardArtifact artifact, TagName tagName) throws TskCoreException { + return addBlackboardArtifactTag(artifact, tagName, ""); } /** @@ -286,15 +285,16 @@ public class TagsManager implements Closeable { * @param [in] artifact The blackboard artifact to tag. * @param [in] tagName The name to use for the tag. * @param [in] comment A comment to store with the tag. + * @return A BlackboardArtifactTag data transfer object (DTO) representing the new tag. * @throws TskCoreException */ - public synchronized void addBlackboardArtifactTag(BlackboardArtifact artifact, TagName tagName, String comment) throws TskCoreException { + public synchronized BlackboardArtifactTag addBlackboardArtifactTag(BlackboardArtifact artifact, TagName tagName, String comment) throws TskCoreException { // @@@ This is a work around to be removed when database access on the EDT is correctly synchronized. if (!tagNamesInitialized) { getExistingTagNames(); } - tskCase.addBlackboardArtifactTag(new BlackboardArtifactTag(artifact, tskCase.getContentById(artifact.getObjectID()), tagName, comment)); + return tskCase.addBlackboardArtifactTag(artifact, tagName, comment); } /** @@ -313,16 +313,16 @@ public class TagsManager implements Closeable { /** * Gets all blackboard artifact tags for the current case. - * @param [out] tags A list, possibly empty, of blackboard artifact tags. + * @return A list, possibly empty, of blackboard artifact tags. * @throws TskCoreException */ - public void getAllBlackboardArtifactTags(List tags) throws TskCoreException { + public List getAllBlackboardArtifactTags() throws TskCoreException { // @@@ This is a work around to be removed when database access on the EDT is correctly synchronized. if (!tagNamesInitialized) { getExistingTagNames(); } - tskCase.getAllBlackboardArtifactTags(tags); + return tskCase.getAllBlackboardArtifactTags(); } /** @@ -343,48 +343,38 @@ public class TagsManager implements Closeable { /** * Gets blackboard artifact tags by tag name. * @param [in] tagName The tag name of interest. - * @return A list, possibly empty, of the content tags with the specified tag name. + * @return A list, possibly empty, of the blackboard artifact tags with the specified tag name. * @throws TskCoreException */ - public synchronized void getBlackboardArtifactTagsByTagName(TagName tagName, List tags) throws TskCoreException { + public synchronized List getBlackboardArtifactTagsByTagName(TagName tagName) throws TskCoreException { // @@@ This is a work around to be removed when database access on the EDT is correctly synchronized. if (!tagNamesInitialized) { getExistingTagNames(); } - tskCase.getBlackboardArtifactTagsByTagName(tagName, tags); + return tskCase.getBlackboardArtifactTagsByTagName(tagName); } /** * Gets blackboard artifact tags for a particular blackboard artifact. * @param [in] artifact The blackboard artifact of interest. - * @param [out] tags A list, possibly empty, of the tags that have been applied to the artifact. + * @return A list, possibly empty, of the tags that have been applied to the artifact. * @throws TskCoreException */ - public synchronized void getBlackboardArtifactTagsByArtifact(BlackboardArtifact artifact, List tags) throws TskCoreException { + public synchronized List getBlackboardArtifactTagsByArtifact(BlackboardArtifact artifact) throws TskCoreException { // @@@ This is a work around to be removed when database access on the EDT is correctly synchronized. if (!tagNamesInitialized) { getExistingTagNames(); } - tskCase.getBlackboardArtifactTagsByArtifact(artifact, tags); + return tskCase.getBlackboardArtifactTagsByArtifact(artifact); } @Override public void close() throws IOException { saveTagNamesToTagsSettings(); } - - private void addTagName(TagName tagName, String errorMessage) { - try { - tskCase.addTagName(tagName); - uniqueTagNames.put(tagName.getDisplayName(), tagName); - } - catch(TskCoreException ex) { - Logger.getLogger(TagsManager.class.getName()).log(Level.SEVERE, errorMessage, ex); - } - } - + private void getExistingTagNames() { getTagNamesFromCurrentCase(); getTagNamesFromTagsSettings(); @@ -395,8 +385,7 @@ public class TagsManager implements Closeable { private void getTagNamesFromCurrentCase() { try { - ArrayList currentTagNames = new ArrayList<>(); - tskCase.getAllTagNames(currentTagNames); + List currentTagNames = tskCase.getAllTagNames(); for (TagName tagName : currentTagNames) { uniqueTagNames.put(tagName.getDisplayName(), tagName); } @@ -416,18 +405,27 @@ public class TagsManager implements Closeable { // at a time to gracefully discard any duplicates or corrupt tuples. for (String tagNameTuple : tagNameTuples) { String[] tagNameAttributes = tagNameTuple.split(","); - if (!uniqueTagNames.containsKey(tagNameAttributes[0])) { - TagName tagName = new TagName(tagNameAttributes[0], tagNameAttributes[1], TagName.HTML_COLOR.getColorByName(tagNameAttributes[2])); - addTagName(tagName, "Failed to add " + tagName.getDisplayName() + " tag name from tag settings to the current case"); + if (!uniqueTagNames.containsKey(tagNameAttributes[0])) { + try { + TagName tagName = tskCase.addTagName(tagNameAttributes[0], tagNameAttributes[1], TagName.HTML_COLOR.getColorByName(tagNameAttributes[2])); + uniqueTagNames.put(tagName.getDisplayName(), tagName); + } + catch (TskCoreException ex) { + Logger.getLogger(TagsManager.class.getName()).log(Level.SEVERE, "Failed to add saved tag name " + tagNameAttributes[0], ex); + } } } } } private void getPredefinedTagNames() { - for (TagName tagName : predefinedTagNames) { - if (!uniqueTagNames.containsKey(tagName.getDisplayName())) { - addTagName(tagName, "Failed to add predefined " + tagName.getDisplayName() + " tag name to the current case"); + if (!uniqueTagNames.containsKey("Bookmark")) { + try { + TagName tagName = tskCase.addTagName("Bookmark", "", TagName.HTML_COLOR.NONE); + uniqueTagNames.put(tagName.getDisplayName(), tagName); + } + catch (TskCoreException ex) { + Logger.getLogger(TagsManager.class.getName()).log(Level.SEVERE, "Failed to add predefined 'Bookmark' tag name", ex); } } } diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagTypeNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagTypeNode.java index ed7f9b655f..37b14bf976 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagTypeNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagTypeNode.java @@ -90,7 +90,7 @@ public class ContentTagTypeNode extends DisplayableItemNode { protected boolean createKeys(List keys) { // Use the content tags bearing the specified tag name as the keys. try { - Case.getCurrentCase().getServices().getTagsManager().getContentTagsByTagName(tagName, keys); + keys.addAll(Case.getCurrentCase().getServices().getTagsManager().getContentTagsByTagName(tagName)); } catch (TskCoreException ex) { Logger.getLogger(ContentTagTypeNode.ContentTagNodeFactory.class.getName()).log(Level.SEVERE, "Failed to get tag names", ex); diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/TagNameNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/TagNameNode.java index d8d743b92d..74b3f195bf 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/TagNameNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/TagNameNode.java @@ -113,7 +113,8 @@ public class TagNameNode extends DisplayableItemNode { case BLACKBOARD_ARTIFACT_TAG_TYPE_NODE_KEY: return new BlackboardArtifactTagTypeNode(tagName); default: - return null; // RJCTODO: Programming error decide how to handle. + Logger.getLogger(TagNameNode.class.getName()).log(Level.SEVERE, "{0} not a recognized key", key); + return null; } } } diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/TagsNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/TagsNode.java index 247ffb841a..635416bc1e 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/TagsNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/TagsNode.java @@ -74,7 +74,7 @@ public class TagsNode extends DisplayableItemNode { @Override protected boolean createKeys(List keys) { try { - Case.getCurrentCase().getServices().getTagsManager().getTagNamesInUse(keys); + keys.addAll(Case.getCurrentCase().getServices().getTagsManager().getTagNamesInUse()); } catch (TskCoreException ex) { Logger.getLogger(TagNameNodeFactory.class.getName()).log(Level.SEVERE, "Failed to get tag names", ex); diff --git a/Core/src/org/sleuthkit/autopsy/directorytree/BlackboardArtifactTagTypeNode.java b/Core/src/org/sleuthkit/autopsy/directorytree/BlackboardArtifactTagTypeNode.java index e0085e35a7..5cedd82e98 100755 --- a/Core/src/org/sleuthkit/autopsy/directorytree/BlackboardArtifactTagTypeNode.java +++ b/Core/src/org/sleuthkit/autopsy/directorytree/BlackboardArtifactTagTypeNode.java @@ -95,7 +95,7 @@ public class BlackboardArtifactTagTypeNode extends DisplayableItemNode { protected boolean createKeys(List keys) { try { // Use the blackboard artifact tags bearing the specified tag name as the keys. - Case.getCurrentCase().getServices().getTagsManager().getBlackboardArtifactTagsByTagName(tagName, keys); + keys.addAll(Case.getCurrentCase().getServices().getTagsManager().getBlackboardArtifactTagsByTagName(tagName)); } catch (TskCoreException ex) { Logger.getLogger(BlackboardArtifactTagTypeNode.BlackboardArtifactTagNodeFactory.class.getName()).log(Level.SEVERE, "Failed to get tag names", ex); diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java b/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java index bf66eb66fe..a31be8d299 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java +++ b/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java @@ -461,9 +461,9 @@ public class ReportGenerator { } // Get the content tags. - ArrayList tags = new ArrayList<>(); + List tags; try { - Case.getCurrentCase().getServices().getTagsManager().getAllContentTags(tags); + tags = Case.getCurrentCase().getServices().getTagsManager().getAllContentTags(); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "failed to get content tags", ex); @@ -524,9 +524,9 @@ public class ReportGenerator { return; } - ArrayList tags = new ArrayList<>(); + List tags; try { - Case.getCurrentCase().getServices().getTagsManager().getAllBlackboardArtifactTags(tags); + tags = Case.getCurrentCase().getServices().getTagsManager().getAllBlackboardArtifactTags(); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "failed to get blackboard artifact tags", ex); @@ -601,8 +601,7 @@ public class ReportGenerator { List artifacts = new ArrayList<>(); try { for (BlackboardArtifact artifact : skCase.getBlackboardArtifacts(type)) { - ArrayList tags = new ArrayList<>(); - Case.getCurrentCase().getServices().getTagsManager().getBlackboardArtifactTagsByArtifact(artifact, tags); + List tags = Case.getCurrentCase().getServices().getTagsManager().getBlackboardArtifactTagsByArtifact(artifact); HashSet uniqueTagNames = new HashSet<>(); for (BlackboardArtifactTag tag : tags) { uniqueTagNames.add(tag.getName().getDisplayName()); diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportVisualPanel2.java b/Core/src/org/sleuthkit/autopsy/report/ReportVisualPanel2.java index d63c65f73a..8ccd2b0770 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ReportVisualPanel2.java +++ b/Core/src/org/sleuthkit/autopsy/report/ReportVisualPanel2.java @@ -72,12 +72,13 @@ public final class ReportVisualPanel2 extends JPanel { // Initialize the list of Tags private void initTags() { - ArrayList tagNamesInUse = new ArrayList<>(); + List tagNamesInUse; try { - Case.getCurrentCase().getServices().getTagsManager().getTagNamesInUse(tagNamesInUse); + tagNamesInUse = Case.getCurrentCase().getServices().getTagsManager().getTagNamesInUse(); } catch (TskCoreException ex) { Logger.getLogger(ReportVisualPanel2.class.getName()).log(Level.SEVERE, "Failed to get tag names", ex); + return; } for(TagName tagName : tagNamesInUse) { diff --git a/Core/src/org/sleuthkit/autopsy/report/TableReportModule.java b/Core/src/org/sleuthkit/autopsy/report/TableReportModule.java index 037b6f16e9..4a7bc9ac50 100644 --- a/Core/src/org/sleuthkit/autopsy/report/TableReportModule.java +++ b/Core/src/org/sleuthkit/autopsy/report/TableReportModule.java @@ -47,11 +47,11 @@ public interface TableReportModule extends ReportModule { /** * Start a new data type for the report. This is how the report will differentiate between - * the start and end of a certain type of data, such as a Blackboard Artifact Type. + * the start and end of a certain type of data, such as a blackboard artifact Type. * It is up to the report how the differentiation is shown. * * @param title String name of the data type - * @param description RJCTODO: fix this header comment + * @param description Description of the data type */ public void startDataType(String title, String description); diff --git a/Timeline/src/org/sleuthkit/autopsy/timeline/Timeline.java b/Timeline/src/org/sleuthkit/autopsy/timeline/Timeline.java index 3468ab6fef..190aa2a7d5 100644 --- a/Timeline/src/org/sleuthkit/autopsy/timeline/Timeline.java +++ b/Timeline/src/org/sleuthkit/autopsy/timeline/Timeline.java @@ -885,10 +885,10 @@ public class Timeline extends CallableSystemAction implements Presenter.Toolbar, } @Override - public DisplayableItemNode.TYPE getDisplayableItemNodeType() { - return DisplayableItemNode.TYPE.CONTENT; + public boolean isLeafTypeNode() { + return false; } - + @Override public T accept(DisplayableItemNodeVisitor v) { return null; From c45e0d2f782b34fe5b30379211e5aa0bde0060b0 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Wed, 23 Oct 2013 16:07:13 -0400 Subject: [PATCH 088/179] Class name adjustments --- .../org/sleuthkit/autopsy/actions/Bundle.properties | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/actions/Bundle.properties b/Core/src/org/sleuthkit/autopsy/actions/Bundle.properties index 1ae4a0f597..ad0c347ecb 100755 --- a/Core/src/org/sleuthkit/autopsy/actions/Bundle.properties +++ b/Core/src/org/sleuthkit/autopsy/actions/Bundle.properties @@ -4,13 +4,13 @@ GetTagNameDialog.okButton.text=OK GetTagNameDialog.preexistingLabel.text=Pre-existing Tags: GetTagNameDialog.newTagPanel.border.title=New Tag GetTagNameDialog.tagNameLabel.text=Tag Name: -GetTagNameAndCommentDialog.tagLabel.text=Tag: +GetTagNameAndCommentDialog.newTagButton.text=New Tag GetTagNameAndCommentDialog.okButton.text=OK -GetTagNameAndCommentDialog.tagCombo.toolTipText=Select tag to use -# To change this template, choose Tools | Templates -# and open the template in the editor. -GetTagNameAndCommentDialog.cancelButton.text=Cancel GetTagNameAndCommentDialog.commentText.toolTipText=Enter an optional tag comment or leave blank GetTagNameAndCommentDialog.commentText.text= GetTagNameAndCommentDialog.commentLabel.text=Comment: -GetTagNameAndCommentDialog.newTagButton.text=New Tag +# To change this template, choose Tools | Templates +# and open the template in the editor. +GetTagNameAndCommentDialog.cancelButton.text=Cancel +GetTagNameAndCommentDialog.tagCombo.toolTipText=Select tag to use +GetTagNameAndCommentDialog.tagLabel.text=Tag: From a8263dd7d71ff880bca3df26311e45fafade67f5 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Wed, 23 Oct 2013 16:30:13 -0400 Subject: [PATCH 089/179] Fix merge of platform.properties file --- nbproject/platform.properties | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/nbproject/platform.properties b/nbproject/platform.properties index 08baaff2f8..a9fa87f749 100644 --- a/nbproject/platform.properties +++ b/nbproject/platform.properties @@ -1,4 +1,3 @@ -<<<<<<< HEAD branding.token=autopsy netbeans-plat-version=7.3.1 suite.dir=${basedir} @@ -119,19 +118,3 @@ disabled.modules=\ org.netbeans.modules.xml.tools.java,\ org.netbeans.spi.java.hints -======= -branding.token=autopsy -netbeans-plat-version=7.3.1 -suite.dir=${basedir} -nbplatform.active.dir=${suite.dir}/netbeans-plat/${netbeans-plat-version} -harness.dir=${nbplatform.active.dir}/harness -bootstrap.url=http://deadlock.netbeans.org/hudson/job/nbms-and-javadoc/lastStableBuild/artifact/nbbuild/netbeans/harness/tasks.jar -autoupdate.catalog.url=http://dlc.sun.com.edgesuite.net/netbeans/updates/${netbeans-plat-version}/uc/final/distribution/catalog.xml.gz -cluster.path=\ - ${nbplatform.active.dir}/harness:\ - ${nbplatform.active.dir}/java:\ - ${nbplatform.active.dir}/platform -disabled.modules=\ - org.netbeans.modules.junit - ->>>>>>> new_tags_api From efbb7d0acfbdfae7afa518e40446ef9339b97300 Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Fri, 25 Oct 2013 15:12:34 -0400 Subject: [PATCH 090/179] Added FILE_DONE event to Ingest Manager. Comments cleanup --- .../autopsy/ingest/IngestManager.java | 71 ++++++++++++++----- .../autopsy/ingest/IngestServices.java | 4 +- 2 files changed, 57 insertions(+), 18 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java index 2c65dee2a6..070388cc46 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java @@ -86,40 +86,52 @@ public class IngestManager { public enum IngestModuleEvent { /** - * Event sent when the ingest module has been started processing. Second - * argument of the property change fired contains module name String and - * third argument is null. + * Event sent when an ingest module has been started. Second + * argument of the property change is a string form of the module name + * and the third argument is null. */ STARTED, + /** - * Event sent when the ingest module has completed processing. Second - * argument of the property change fired contains module name String and - * third argument is null. + * Event sent when an ingest module has completed processing by its own + * means. Second + * argument of the property change is a string form of the module name + * and the third argument is null. * * This event is generally used by listeners to perform a final data * view refresh (listeners need to query all data from the blackboard). - * */ COMPLETED, + /** - * Event sent when the ingest module has stopped processing, and likely + * Event sent when an ingest module has stopped processing, and likely * not all data has been processed. Second argument of the property - * change fired contains module name String and third argument is null. + * change is a string form of the module name and third argument is null. */ STOPPED, + /** - * Event sent when ingest module has new data. Second argument of the + * Event sent when ingest module posts new data to blackboard or somewhere + * else. Second argument of the * property change fired contains ModuleDataEvent object and third * argument is null. The object can contain encapsulated new data * created by the module. Listener can also query new data as needed. - * */ DATA, + /** * Event send when content changed, either its attributes changed, or - * new content children have been added + * new content children have been added. I.e. from ZIP files or Carved files */ - CONTENT_CHANGED + CONTENT_CHANGED, + + + /** + * Event sent when a file has finished going through a pipeline of modules. + * Second argument is the object ID. Third argument is null + */ + FILE_DONE, + }; //ui //Initialized by Installer in AWT thread once the Window System is ready @@ -196,11 +208,29 @@ public class IngestManager { static synchronized void fireModuleEvent(String eventType, String moduleName) { pcs.firePropertyChange(eventType, moduleName, null); } + + + /** + * Fire event when file is done with a pipeline run + * @param objId ID of file that is done + */ + static synchronized void fireFileDone(long objId) { + pcs.firePropertyChange(IngestModuleEvent.FILE_DONE.toString(), objId, null); + } + + /** + * Fire event for ModuleDataEvent (when modules post data to blackboard, etc.) + * @param moduleDataEvent + */ static synchronized void fireModuleDataEvent(ModuleDataEvent moduleDataEvent) { pcs.firePropertyChange(IngestModuleEvent.DATA.toString(), moduleDataEvent, null); } + /** + * Fire event for ModuleContentChanged (when modules create new content that needs to be analyzed) + * @param moduleContentEvent + */ static synchronized void fireModuleContentEvent(ModuleContentEvent moduleContentEvent) { pcs.firePropertyChange(IngestModuleEvent.CONTENT_CHANGED.toString(), moduleContentEvent, null); } @@ -282,7 +312,8 @@ public class IngestManager { } /** - * Starts the needed worker threads. + * Starts the File-level Ingest Module pipeline and the Data Source-level Ingest Modules + * for the queued up data sources and files. * * if AbstractFile module is still running, do nothing and allow it to * consume queue otherwise start /restart AbstractFile worker @@ -303,8 +334,10 @@ public class IngestManager { ingestMonitor.start(); } + ///////// + // Start the data source-level ingest modules List newThreads = new ArrayList<>(); - //image ingesters + // cycle through each data source content in the queue while (dataSourceScheduler.hasNext()) { if (allInited == false) { @@ -987,10 +1020,14 @@ public class IngestManager { logger.log(Level.SEVERE, "Error: out of memory from module: " + module.getName(), e); stats.addError(module); } + } //end for every module - + //free the internal file resource after done with every module fileToProcess.close(); + + // notify listeners thsi file is done + fireFileDone(fileToProcess.getId()); int newTotalEnqueuedFiles = fileScheduler.getFilesEnqueuedEst(); if (newTotalEnqueuedFiles > totalEnqueuedFiles) { @@ -1076,7 +1113,7 @@ public class IngestManager { } } - /* Thread that adds content/file and module pairs to queues */ + /* Thread that adds content/file and module pairs to queues. Starts pipelines when done. */ private class EnqueueWorker extends SwingWorker { private List modules; diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestServices.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestServices.java index 45ba9cf4fa..93f9edd626 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestServices.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestServices.java @@ -110,7 +110,8 @@ public class IngestServices { /** - * Fire module data event to notify registered module data event listeners + * Fire module data event to notify registered module data event listeners that there + * is new data of a given type from a module * @param moduleDataEvent module data event, encapsulating blackboard artifact data */ public void fireModuleDataEvent(ModuleDataEvent moduleDataEvent) { @@ -120,6 +121,7 @@ public class IngestServices { /** * Fire module content event to notify registered module content event listeners + * that there is new content (from ZIP file contents, carving, etc.) * @param moduleContentEvent module content event, encapsulating content changed */ public void fireModuleContentEvent(ModuleContentEvent moduleContentEvent) { From 2e8e5b1405d7cd2b19ca578c7f54cd943bf9a907 Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Fri, 25 Oct 2013 20:49:40 -0400 Subject: [PATCH 091/179] comment updates --- API-CHANGES.txt | 5 +- .../sleuthkit/autopsy/casemodule/Case.java | 65 +++++++++++++++++++ .../autopsy/ingest/IngestManager.java | 4 +- 3 files changed, 70 insertions(+), 4 deletions(-) diff --git a/API-CHANGES.txt b/API-CHANGES.txt index d2082bf901..c07b51f275 100644 --- a/API-CHANGES.txt +++ b/API-CHANGES.txt @@ -2,5 +2,6 @@ Changes to make to API when we are ready to make backward incompatible changes: - HTMLReport has special API for more context on columns and special handling in REportGenerator. Change all reports to the new API. - DataContentViewer.isPreferred does not need isSupported to be passed in -- DataContentViewerHex and STrings can have the public setDataView methods removed in favor of the new private ones -Content.getUniquePath() shoudl not thrown TskException. We should deal with it in the method. +- DataContentViewerHex and Strings can have the public setDataView methods removed in favor of the new private ones +- Content.getUniquePath() should not thrown TskException. We should deal with it in the method. +- Make the list of events that Case fires off to be part of an enum to group them together (like IngestManager does). diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/Case.java b/Core/src/org/sleuthkit/autopsy/casemodule/Case.java index 29151bc6ea..08ec802b95 100644 --- a/Core/src/org/sleuthkit/autopsy/casemodule/Case.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/Case.java @@ -109,6 +109,71 @@ public class Case implements SleuthkitCase.ErrorObserver { // pcs is initialized in CaseListener constructor private static final PropertyChangeSupport pcs = new PropertyChangeSupport(Case.class); + /** + * Events that the case module will fire. Event listeners can get the event + * name by using String returned by toString() method on a specific event. + */ + /* @@@ BC: I added this as a place holder for what I want this to be, but + * this is not the time to change it. We'll start using this at a major release + * version. + */ + private enum CaseModuleEvent_DoNotUse { + /** + * Property name that indicates the name of the current case has changed. + * Fired with the case is renamed, and when the current case is + * opened/closed/changed. The value is a String: the name of the case. The + * empty string ("") is used for no open case. + */ + // @@@ BC: I propose that this is no longer called for case open/close. + CASE_NAME("caseName"), + + /** + * Property name that indicates the number of the current case has changed. + * Fired with the case number is changed. The value is an int: the number of + * the case. -1 is used for no case number set. + */ + CASE_NUMBER("caseNumber"), + + /** + * Property name that indicates the examiner of the current case has + * changed. Fired with the case examiner is changed. The value is a String: + * the name of the examiner. The empty string ("") is used for no examiner + * set. + */ + CASE_EXAMINER("caseExaminer"), + + /** + * Property name that indicates a new data source (image, disk or local + * file) has been added to the current case. The new value is the + * newly-added instance of the new data source, and the old value is always + * null. + */ + CASE_ADD_DATA_SOURCE("addDataSource"), + + /** + * Property name that indicates a data source has been removed from the + * current case. The "old value" is the (int) content ID of the data source + * that was removed, the new value is the instance of the data source. + */ + CASE_DEL_DATA_SOURCE("removeDataSource"), + + /** + * Property name that indicates the currently open case has changed. The new + * value is the instance of the opened Case, or null if there is no open + * case. The old value is the instance of the closed Case, or null if there + * was no open case. + */ + CASE_CURRENT_CASE("currentCase"); + + private String name; + CaseModuleEvent_DoNotUse(String name) { + this.name = name; + } + + public String getName() { + return this.name; + } + }; private String name; diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java index 2c65dee2a6..32dd64d1bb 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java @@ -185,9 +185,9 @@ public class IngestManager { } /** - * Add property change listener to listen to ingest events + * Add property change listener to listen to ingest events as defined in IngestModuleEvent. * - * @param l PropertyChangeListener to schedule + * @param l PropertyChangeListener to register */ public static synchronized void addPropertyChangeListener(final PropertyChangeListener l) { pcs.addPropertyChangeListener(l); From 170f40b77d45f63f5b23ad4e47845ba319951d15 Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Fri, 25 Oct 2013 20:53:08 -0400 Subject: [PATCH 092/179] reorganized logic inside of IE parser and order of modules in RecentActivity. --- .../autopsy/recentactivity/ExtractIE.java | 366 +++++++++--------- .../recentactivity/RAImageIngestModule.java | 21 +- 2 files changed, 183 insertions(+), 204 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java index f7e9de53b6..f4528d8587 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java @@ -73,9 +73,6 @@ public class ExtractIE extends Extract { private String PASCO_LIB_PATH; private String JAVA_PATH; - // List of Pasco result files for this data source - private List pascoResults; - boolean pascoFound = false; final public static String MODULE_VERSION = "1.0"; private static final SimpleDateFormat dateFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); @@ -96,27 +93,34 @@ public class ExtractIE extends Extract { @Override public void process(PipelineContextpipelineContext, Content dataSource, IngestDataSourceWorkerController controller) { - this.extractAndRunPasco(dataSource, controller); - this.getBookmark(dataSource, controller); - this.getCookie(dataSource, controller); - this.getRecentDocuments(dataSource, controller); - this.getHistory(pascoResults); + this.getBookmark(dataSource, controller); + this.getCookie(dataSource, controller); + this.getRecentDocuments(dataSource, controller); + this.getHistory(dataSource, controller); } - //Favorites section - // This gets the favorite info + /** + * Finds the files storing bookmarks and creates artifacts + * @param dataSource + * @param controller + */ private void getBookmark(Content dataSource, IngestDataSourceWorkerController controller) { org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); List favoritesFiles = null; try { favoritesFiles = fileManager.findFiles(dataSource, "%.url", "Favorites"); } catch (TskCoreException ex) { - logger.log(Level.WARNING, "Error fetching 'index.data' files for Internet Explorer history.", ex); + logger.log(Level.WARNING, "Error fetching 'url' files for Internet Explorer bookmarks.", ex); this.addErrorMessage(this.getName() + ": Error getting Internet Explorer Bookmarks."); return; } for (AbstractFile favoritesFile : favoritesFiles) { + if (favoritesFile.getSize() == 0) { + continue; + } + + // @@@ WHY DON"T WE PARSE THIS FILE more intelligently. It's text-based if (controller.isCancelled()) { break; } @@ -145,8 +149,6 @@ public class ExtractIE extends Extract { String domain = Util.extractDomain(url); Collection bbattributes = new ArrayList(); - //TODO revisit usage of deprecated constructor as per TSK-583 - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_LAST_ACCESSED.getTypeID(), "RecentActivity", "Last Visited", datetime)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "RecentActivity", datetime)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", url)); //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "RecentActivity", EscapeUtil.decodeURL(url))); @@ -154,21 +156,22 @@ public class ExtractIE extends Extract { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "Internet Explorer")); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", domain)); this.addArtifact(ARTIFACT_TYPE.TSK_WEB_BOOKMARK, favoritesFile, bbattributes); - - services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK)); } + services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK)); } - //Cookies section - // This gets the cookies info - private void getCookie(Content dataSource, IngestDataSourceWorkerController controller) { - + /** + * Finds files that store cookies and adds artifacts for them. + * @param dataSource + * @param controller + */ + private void getCookie(Content dataSource, IngestDataSourceWorkerController controller) { org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); List cookiesFiles = null; try { cookiesFiles = fileManager.findFiles(dataSource, "%.txt", "Cookies"); } catch (TskCoreException ex) { - logger.log(Level.WARNING, "Error fetching 'index.data' files for Internet Explorer history."); + logger.log(Level.WARNING, "Error finding cookie files for IE"); this.addErrorMessage(this.getName() + ": " + "Error getting Internet Explorer cookie files."); return; } @@ -177,10 +180,13 @@ public class ExtractIE extends Extract { if (controller.isCancelled()) { break; } - Content fav = cookiesFile; - byte[] t = new byte[(int) fav.getSize()]; + if (cookiesFile.getSize() == 0) { + continue; + } + + byte[] t = new byte[(int) cookiesFile.getSize()]; try { - final int bytesRead = fav.read(t, 0, fav.getSize()); + final int bytesRead = cookiesFile.read(t, 0, cookiesFile.getSize()); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error reading bytes of Internet Explorer cookie.", ex); this.addErrorMessage(this.getName() + ": Error reading Internet Explorer cookie " + cookiesFile.getName()); @@ -197,25 +203,23 @@ public class ExtractIE extends Extract { String domain = Util.extractDomain(url); Collection bbattributes = new ArrayList(); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", url)); - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "RecentActivity", EscapeUtil.decodeURL(url))); - //TODO Revisit usage of deprecated Constructor as of TSK-583 - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", "Last Visited", datetime)); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "RecentActivity", (name != null) ? name : "")); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", datetime)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_VALUE.getTypeID(), "RecentActivity", value)); - //TODO Revisit usage of deprecated Constructor as of TSK-583 - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "RecentActivity", "Title", (name != null) ? name : "")); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "RecentActivity", (name != null) ? name : "")); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "Internet Explorer")); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", url)); + //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "RecentActivity", EscapeUtil.decodeURL(url))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", domain)); this.addArtifact(ARTIFACT_TYPE.TSK_WEB_COOKIE, cookiesFile, bbattributes); } - services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE)); } - //Recent Documents section - // This gets the recent object info + /** + * Find the documents that Windows stores about recent documents and make artifacts. + * @param dataSource + * @param controller + */ private void getRecentDocuments(Content dataSource, IngestDataSourceWorkerController controller) { org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); @@ -232,12 +236,12 @@ public class ExtractIE extends Extract { if (controller.isCancelled()) { break; } - Content fav = recentFile; - if (fav.getSize() == 0) { + + if (recentFile.getSize() == 0) { continue; } JLNK lnk = null; - JLnkParser lnkParser = new JLnkParser(new ReadContentInputStream(fav), (int) fav.getSize()); + JLnkParser lnkParser = new JLnkParser(new ReadContentInputStream(recentFile), (int) recentFile.getSize()); try { lnk = lnkParser.parse(); } catch (JLnkParserException e) { @@ -258,30 +262,28 @@ public class ExtractIE extends Extract { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "RecentActivity", Util.getFileName(path))); long id = Util.findID(dataSource, path); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH_ID.getTypeID(), "RecentActivity", id)); - //TODO Revisit usage of deprecated constructor as per TSK-583 - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", "Date Created", datetime)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", datetime)); this.addArtifact(ARTIFACT_TYPE.TSK_RECENT_OBJECT, recentFile, bbattributes); } - services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_RECENT_OBJECT)); } - - - private void extractAndRunPasco(Content dataSource, IngestDataSourceWorkerController controller) { - pascoResults = new ArrayList(); - + + /** + * Locates index.dat files, runs Pasco on them, and creates artifacts. + * @param dataSource + * @param controller + */ + private void getHistory(Content dataSource, IngestDataSourceWorkerController controller) { logger.log(Level.INFO, "Pasco results path: " + moduleTempResultsDir); + boolean foundHistory = false; final File pascoRoot = InstalledFileLocator.getDefault().locate("pasco2", ExtractIE.class.getPackage().getName(), false); if (pascoRoot == null) { - logger.log(Level.SEVERE, "Pasco2 not found"); - pascoFound = false; + this.addErrorMessage(this.getName() + ": Unable to get IE History: pasco not found"); + logger.log(Level.SEVERE, "Error finding pasco program "); return; - } else { - pascoFound = true; - } - + } + final String pascoHome = pascoRoot.getAbsolutePath(); logger.log(Level.INFO, "Pasco2 home: " + pascoHome); @@ -291,7 +293,6 @@ public class ExtractIE extends Extract { File resultsDir = new File(moduleTempResultsDir); resultsDir.mkdirs(); - // get index.dat files org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); List indexFiles = null; @@ -316,7 +317,6 @@ public class ExtractIE extends Extract { temps = RAImageIngestModule.getRATempPath(currentCase, "IE") + File.separator + indexFileName; File datFile = new File(temps); if (controller.isCancelled()) { - datFile.delete(); break; } try { @@ -333,7 +333,9 @@ public class ExtractIE extends Extract { //At this point pasco2 proccessed the index files. //Now fetch the results, parse them and the delete the files. if (bPascProcSuccess) { - pascoResults.add(filename); + parsePascoOutput(indexFile, filename); + foundHistory = true; + //Delete index.dat file since it was succcessfully by Pasco datFile.delete(); } else { @@ -341,22 +343,26 @@ public class ExtractIE extends Extract { this.addErrorMessage(this.getName() + ": Error processing Internet Explorer history."); } } + + if (foundHistory) { + services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY)); + } } - //Simple wrapper to JavaSystemCaller.Exec() to execute pasco2 jar - // TODO: Hardcoded command args/path needs to be removed. Maybe set some constants and set env variables for classpath - // I'm not happy with this code. Can't stand making a system call, is not an acceptable solution but is a hack for now. - private boolean executePasco(String indexFilePath, String filename) { - if (pascoFound == false) { - return false; - } + /** + * Execute pasco on a single file that has been saved to disk. + * @param indexFilePath Path to local index.dat file to analyze + * @param outputFileName Name of file to save output to + * @return false on error + */ + private boolean executePasco(String indexFilePath, String outputFileName) { boolean success = true; Writer writer = null; try { - final String pascoOutFile = moduleTempResultsDir + File.separator + filename; - logger.log(Level.INFO, "Writing pasco results to: " + pascoOutFile); - writer = new FileWriter(pascoOutFile); + final String outputFileFullPath = moduleTempResultsDir + File.separator + outputFileName; + logger.log(Level.INFO, "Writing pasco results to: " + outputFileFullPath); + writer = new FileWriter(outputFileFullPath); execPasco = new ExecUtil(); execPasco.execute(writer, JAVA_PATH, "-cp", PASCO_LIB_PATH, @@ -379,124 +385,117 @@ public class ExtractIE extends Extract { } } } - return success; } - private void getHistory(List filenames) { - // Make sure pasco and the results exist - File rFile = new File(moduleTempResultsDir); - if (pascoFound == false || ! rFile.exists()) { + /** + * parse Pasco output and create artifacts + * @param origFile Original index.dat file that was analyzed to get this output + * @param pascoOutputFileName name of pasco output file + */ + private void parsePascoOutput(AbstractFile origFile, String pascoOutputFileName) { + + String fnAbs = moduleTempResultsDir + File.separator + pascoOutputFileName; + + File file = new File(fnAbs); + if (file.exists() == false) { + this.addErrorMessage(this.getName() + ": Pasco output not found: " + file.getName()); + logger.log(Level.WARNING, "Pasco Output not found: " + file.getPath()); return; } - //Give me a list of pasco results in that directory - File[] pascoFiles = rFile.listFiles(); - for (File file : pascoFiles) { - String fileName = file.getName(); - if (!filenames.contains(fileName)) { - logger.log(Level.INFO, "Found a temp Pasco result file not in the list: {0}", fileName); - continue; - } - - // Make sure the file the is not empty or the Scanner will - // throw a "No Line found" Exception - if (file.length() == 0) { - continue; - } - - long artObjId = Long.parseLong(fileName.substring(fileName.indexOf(".") + 1, fileName.lastIndexOf("."))); - Scanner fileScanner; - try { - fileScanner = new Scanner(new FileInputStream(file.toString())); - } catch (FileNotFoundException ex) { - this.addErrorMessage(this.getName() + ": Error parsing IE history entry " + file.getName()); - logger.log(Level.WARNING, "Unable to find the Pasco file at " + file.getPath(), ex); - continue; - } - - while (fileScanner.hasNext()) { - String line = fileScanner.nextLine(); - if (!line.startsWith("URL")) { - continue; - } - - String[] lineBuff = line.split("\\t"); - - if (lineBuff.length < 4) { - logger.log(Level.INFO, "Found unrecognized IE history format."); - continue; - } - - String ddtime = lineBuff[2]; - String actime = lineBuff[3]; - Long ftime = (long) 0; - String user = ""; - String realurl = ""; - String domain = ""; - - /* We've seen two types of lines: - * URL http://XYZ.com .... - * URL Visited: Joe@http://XYZ.com .... - */ - if (lineBuff[1].contains("@")) { - String url[] = lineBuff[1].split("@", 2); - user = url[0]; - user = user.replace("Visited:", ""); - user = user.replace(":Host:", ""); - user = user.replaceAll("(:)(.*?)(:)", ""); - user = user.trim(); - realurl = url[1]; - realurl = realurl.replace("Visited:", ""); - realurl = realurl.replaceAll(":(.*?):", ""); - realurl = realurl.replace(":Host:", ""); - realurl = realurl.trim(); - } else { - user = ""; - realurl = lineBuff[1].trim(); - } - - domain = Util.extractDomain(realurl); - - if (!ddtime.isEmpty()) { - ddtime = ddtime.replace("T", " "); - ddtime = ddtime.substring(ddtime.length() - 5); - } - - if (!actime.isEmpty()) { - try { - Long epochtime = dateFormatter.parse(actime).getTime(); - ftime = epochtime.longValue(); - ftime = ftime / 1000; - } catch (ParseException e) { - this.addErrorMessage(this.getName() + ": Error parsing Internet Explorer History entry."); - logger.log(Level.SEVERE, "Error parsing Pasco results.", e); - } - } - - try { - BlackboardArtifact bbart = tskCase.getContentById(artObjId).newArtifact(ARTIFACT_TYPE.TSK_WEB_HISTORY); - Collection bbattributes = new ArrayList(); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", realurl)); - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "RecentActivity", EscapeUtil.decodeURL(realurl))); - - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "RecentActivity", ftime)); - - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_REFERRER.getTypeID(), "RecentActivity", "")); - - // bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", ddtime)); - - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "Internet Explorer")); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", domain)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_USER_NAME.getTypeID(), "RecentActivity", user)); - bbart.addAttributes(bbattributes); - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error writing Internet Explorer web history artifact to the blackboard.", ex); - } - } - fileScanner.close(); + // Make sure the file the is not empty or the Scanner will + // throw a "No Line found" Exception + if (file.length() == 0) { + return; } - services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY)); + + Scanner fileScanner; + try { + fileScanner = new Scanner(new FileInputStream(file.toString())); + } catch (FileNotFoundException ex) { + this.addErrorMessage(this.getName() + ": Error parsing IE history entry " + file.getName()); + logger.log(Level.WARNING, "Unable to find the Pasco file at " + file.getPath(), ex); + return; + } + + while (fileScanner.hasNext()) { + String line = fileScanner.nextLine(); + if (!line.startsWith("URL")) { + continue; + } + + String[] lineBuff = line.split("\\t"); + + if (lineBuff.length < 4) { + logger.log(Level.INFO, "Found unrecognized IE history format."); + continue; + } + + String ddtime = lineBuff[2]; + String actime = lineBuff[3]; + Long ftime = (long) 0; + String user = ""; + String realurl = ""; + String domain = ""; + + /* We've seen two types of lines: + * URL http://XYZ.com .... + * URL Visited: Joe@http://XYZ.com .... + */ + if (lineBuff[1].contains("@")) { + String url[] = lineBuff[1].split("@", 2); + user = url[0]; + user = user.replace("Visited:", ""); + user = user.replace(":Host:", ""); + user = user.replaceAll("(:)(.*?)(:)", ""); + user = user.trim(); + realurl = url[1]; + realurl = realurl.replace("Visited:", ""); + realurl = realurl.replaceAll(":(.*?):", ""); + realurl = realurl.replace(":Host:", ""); + realurl = realurl.trim(); + } else { + user = ""; + realurl = lineBuff[1].trim(); + } + + domain = Util.extractDomain(realurl); + + if (!ddtime.isEmpty()) { + ddtime = ddtime.replace("T", " "); + ddtime = ddtime.substring(ddtime.length() - 5); + } + + if (!actime.isEmpty()) { + try { + Long epochtime = dateFormatter.parse(actime).getTime(); + ftime = epochtime.longValue(); + ftime = ftime / 1000; + } catch (ParseException e) { + this.addErrorMessage(this.getName() + ": Error parsing Internet Explorer History entry."); + logger.log(Level.SEVERE, "Error parsing Pasco results.", e); + } + } + + try { + BlackboardArtifact bbart = origFile.newArtifact(ARTIFACT_TYPE.TSK_WEB_HISTORY); + Collection bbattributes = new ArrayList<>(); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", realurl)); + //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "RecentActivity", EscapeUtil.decodeURL(realurl))); + + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "RecentActivity", ftime)); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_REFERRER.getTypeID(), "RecentActivity", "")); + // @@@ NOte that other browser modules are adding NAME in hre for the title + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "Internet Explorer")); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", domain)); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_USER_NAME.getTypeID(), "RecentActivity", user)); + bbart.addAttributes(bbattributes); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, "Error writing Internet Explorer web history artifact to the blackboard.", ex); + } + } + fileScanner.close(); } @Override @@ -506,23 +505,6 @@ public class ExtractIE extends Extract { @Override public void complete() { - // Delete all the results when complete - /*for (String file : pascoResults) { - String filePath = moduleTempResultsDir + File.separator + file; - try { - File f = new File(filePath); - if (f.exists() && f.canWrite()) { - f.delete(); - } else { - logger.log(Level.WARNING, "Unable to delete file " + filePath); - } - } catch (SecurityException ex) { - logger.log(Level.WARNING, "Incorrect permission to delete file " + filePath, ex); - } - } - */ - pascoResults.clear(); - logger.info("Internet Explorer extract has completed."); } @Override @@ -545,4 +527,4 @@ public class ExtractIE extends Extract { public boolean hasBackgroundJobsRunning() { return false; } -} +} \ No newline at end of file diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java index 0a63894b26..ae0584fbf0 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java @@ -142,18 +142,15 @@ public final class RAImageIngestModule extends IngestModuleDataSource { logger.log(Level.INFO, "init() {0}", this.toString()); services = IngestServices.getDefault(); - final Extract registry = new ExtractRegistry(); - final Extract iexplore = new ExtractIE(); - final Extract chrome = new Chrome(); - final Extract firefox = new Firefox(); - final Extract SEUQA = new SearchEngineURLQueryAnalyzer(); - - modules.add(chrome); - modules.add(firefox); - modules.add(registry); - modules.add(iexplore); - modules.add(SEUQA); - + modules.add(new Chrome()); + modules.add(new Firefox()); + modules.add(new ExtractIE()); + // this needs to run after the web browser modules + modules.add(new SearchEngineURLQueryAnalyzer()); + + // this runs last because it is slowest + modules.add(new ExtractRegistry()); + for (Extract module : modules) { try { module.init(initContext); From c0c307c09982a9015c5bfe2fc077e640bfb40846 Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Fri, 25 Oct 2013 20:54:46 -0400 Subject: [PATCH 093/179] removed file --- 0 | Bin 22 -> 0 bytes 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100755 0 diff --git a/0 b/0 deleted file mode 100755 index a88ffddb669134339861eee059758bd637150dd3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 22 dcmWe(kdNS%()pmcFTlBDLvi|_Y42?q7ywUt2oL}O From f9485d3ba2c752ab7f8c9a6dd923e67f3aa2f5f8 Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Fri, 25 Oct 2013 23:07:26 -0400 Subject: [PATCH 094/179] Minor cleanup and comments --- .../recentactivity/ExtractRegistry.java | 167 ++++++++++-------- 1 file changed, 89 insertions(+), 78 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java index f047928918..e9edf2759d 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java @@ -111,42 +111,58 @@ public class ExtractRegistry extends Extract { return MODULE_VERSION; } + + /** + * Search for the registry hives on the system. + * @param dataSource Data source to search for hives in. + * @return List of registry hives + */ + private List findRegistryFiles(Content dataSource) { + List allRegistryFiles = new ArrayList<>(); + org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); + + // find the user-specific ntuser-dat files + try { + allRegistryFiles.addAll(fileManager.findFiles(dataSource, "ntuser.dat")); + } + catch (TskCoreException ex) { + logger.log(Level.WARNING, "Error fetching 'ntuser.dat' file."); + } + + // find the system hives' + String[] regFileNames = new String[] {"system", "software", "security", "sam", "default"}; + for (String regFileName : regFileNames) { + try { + allRegistryFiles.addAll(fileManager.findFiles(dataSource, regFileName, "/system32/config")); + } + catch (TskCoreException ex) { + String msg = "Error fetching registry file: " + regFileName; + logger.log(Level.WARNING, msg); + this.addErrorMessage(this.getName() + ": " + msg); + } + } + return allRegistryFiles; + } + /** * Identifies registry files in the database by name, runs regripper on them, and parses the output. * * @param dataSource * @param controller */ - private void getRegistryFiles(Content dataSource, IngestDataSourceWorkerController controller) { - org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); - List allRegistryFiles = new ArrayList<>(); - try { - allRegistryFiles.addAll(fileManager.findFiles(dataSource, "ntuser.dat")); - } catch (TskCoreException ex) { - logger.log(Level.WARNING, "Error fetching 'ntuser.dat' file."); - } - - // try to find each of the listed registry files whose parent directory - // is like '/system32/config' - String[] regFileNames = new String[] {"system", "software", "security", "sam", "default"}; - for (String regFileName : regFileNames) { - try { - allRegistryFiles.addAll(fileManager.findFiles(dataSource, regFileName, "/system32/config")); - } catch (TskCoreException ex) { - String msg = "Error fetching registry file: " + regFileName; - logger.log(Level.WARNING, msg); - this.addErrorMessage(this.getName() + ": " + msg); - } - } - ExtractUSB extrctr = new ExtractUSB(); + private void analyzeRegistryFiles(Content dataSource, IngestDataSourceWorkerController controller) { + List allRegistryFiles = findRegistryFiles(dataSource); + + // open the log file FileWriter logFile = null; try { logFile = new FileWriter(RAImageIngestModule.getRAOutputPath(currentCase, "reg") + File.separator + "regripper-info.txt"); } catch (IOException ex) { java.util.logging.Logger.getLogger(ExtractRegistry.class.getName()).log(Level.SEVERE, null, ex); - logFile = null; } + ExtractUSB extrctr = new ExtractUSB(); + int j = 0; for (AbstractFile regFile : allRegistryFiles) { String regFileName = regFile.getName(); @@ -160,27 +176,35 @@ public class ExtractRegistry extends Extract { this.addErrorMessage(this.getName() + ": Error analyzing registry file " + regFileName); continue; } + + if (controller.isCancelled()) { + break; + } try { if (logFile != null) { logFile.write(Integer.toString(j-1) + "\t" + regFile.getUniquePath() + "\n"); } - } catch (TskCoreException ex) { - java.util.logging.Logger.getLogger(ExtractRegistry.class.getName()).log(Level.SEVERE, null, ex); - } catch (IOException ex) { + } + catch (TskCoreException | IOException ex) { java.util.logging.Logger.getLogger(ExtractRegistry.class.getName()).log(Level.SEVERE, null, ex); } logger.log(Level.INFO, moduleName + "- Now getting registry information from " + regFileNameLocal); RegOutputFiles regOutputFiles = executeRegRip(regFileNameLocal, outputPathBase); + if (controller.isCancelled()) { + break; + } + + // parse the autopsy-specific output if (regOutputFiles.autopsyPlugins.isEmpty() == false) { if (parseAutopsyPluginOutput(regOutputFiles.autopsyPlugins, regFile.getId(), extrctr) == false) { this.addErrorMessage(this.getName() + ": Failed parsing registry file results " + regFileName); - continue; } } + // create a RAW_TOOL artifact for the full output if (regOutputFiles.fullPlugins.isEmpty() == false) { try { BlackboardArtifact art = regFile.newArtifact(ARTIFACT_TYPE.TSK_TOOL_OUTPUT.getTypeID()); @@ -218,6 +242,8 @@ public class ExtractRegistry extends Extract { java.util.logging.Logger.getLogger(ExtractRegistry.class.getName()).log(Level.SEVERE, null, ex); } } + + // delete the hive regFileNameLocalFile.delete(); } @@ -236,48 +262,55 @@ public class ExtractRegistry extends Extract { } // TODO: Hardcoded command args/path needs to be removed. Maybe set some constants and set env variables for classpath - // I'm not happy with this code. Can't stand making a system call, is not an acceptable solution but is a hack for now. /** * Execute regripper on the given registry. * @param regFilePath Path to local copy of registry * @param outFilePathBase Path to location to save output file to. Base name that will be extended on */ private RegOutputFiles executeRegRip(String regFilePath, String outFilePathBase) { - Writer writer = null; - String type = ""; - String fullType = ""; - RegOutputFiles regOutputFiles = new RegOutputFiles(); + String autopsyType = ""; // Type argument for rr for autopsy-specific modules + String fullType = ""; // Type argument for rr for full set of modules if (regFilePath.toLowerCase().contains("system")) { - type = "autopsysystem"; + autopsyType = "autopsysystem"; fullType = "system"; - } else if (regFilePath.toLowerCase().contains("software")) { - type = "autopsysoftware"; + } + else if (regFilePath.toLowerCase().contains("software")) { + autopsyType = "autopsysoftware"; fullType = "software"; - } else if (regFilePath.toLowerCase().contains("ntuser")) { - type = "autopsy"; + } + else if (regFilePath.toLowerCase().contains("ntuser")) { + autopsyType = "autopsy"; fullType = "ntuser"; - } else if (regFilePath.toLowerCase().contains("default")) { + } + else if (regFilePath.toLowerCase().contains("default")) { //type = "1default"; - } else if (regFilePath.toLowerCase().contains("sam")) { + } + else if (regFilePath.toLowerCase().contains("sam")) { fullType = "sam"; - } else if (regFilePath.toLowerCase().contains("security")) { + } + else if (regFilePath.toLowerCase().contains("security")) { fullType = "security"; - } else { + } + else { // @@@ Seems like we should error out or something... - type = "1default"; + autopsyType = "1default"; } - if (!type.isEmpty() && rrFound) { + RegOutputFiles regOutputFiles = new RegOutputFiles(); + + // run the autopsy-specific set of modules + if (!autopsyType.isEmpty() && rrFound) { // TODO - add error messages + Writer writer = null; try { regOutputFiles.autopsyPlugins = outFilePathBase + "-autopsy.txt"; logger.log(Level.INFO, "Writing RegRipper results to: " + regOutputFiles.autopsyPlugins); writer = new FileWriter(regOutputFiles.autopsyPlugins); execRR = new ExecUtil(); execRR.execute(writer, RR_PATH, - "-r", regFilePath, "-f", type); + "-r", regFilePath, "-f", autopsyType); } catch (IOException ex) { logger.log(Level.SEVERE, "Unable to RegRipper and process parse some registry files.", ex); this.addErrorMessage(this.getName() + ": Failed to analyze registry file"); @@ -297,7 +330,9 @@ public class ExtractRegistry extends Extract { logger.log(Level.INFO, "Not running Autopsy-only modules on hive"); } + // run the full set of rr modules if (!fullType.isEmpty() && rrFullFound) { + Writer writer = null; try { regOutputFiles.fullPlugins = outFilePathBase + "-full.txt"; logger.log(Level.INFO, "Writing Full RegRipper results to: " + regOutputFiles.fullPlugins); @@ -327,22 +362,16 @@ public class ExtractRegistry extends Extract { } // @@@ VERIFY that we are doing the right thing when we parse multiple NTUSER.DAT - private boolean parseAutopsyPluginOutput(String regRecord, long orgId, ExtractUSB extrctr) { FileInputStream fstream = null; try { - Case currentCase = Case.getCurrentCase(); // get the most updated case SleuthkitCase tempDb = currentCase.getSleuthkitCase(); // Read the file in and create a Document and elements File regfile = new File(regRecord); fstream = new FileInputStream(regfile); - //InputStreamReader fstreamReader = new InputStreamReader(fstream, "UTF-8"); - //BufferedReader input = new BufferedReader(fstreamReader); - //logger.log(Level.INFO, "using encoding " + fstreamReader.getEncoding()); String regString = new Scanner(fstream, "UTF-8").useDelimiter("\\Z").next(); - //regfile.delete(); String startdoc = ""; String result = regString.replaceAll("----------------------------------------", ""); result = result.replaceAll("\\n", ""); @@ -386,7 +415,6 @@ public class ExtractRegistry extends Extract { Element artroot = (Element) artroots.item(0); NodeList myartlist = artroot.getChildNodes(); String winver = ""; - String installdate = ""; for (int j = 0; j < myartlist.getLength(); j++) { Node artchild = myartlist.item(j); // If it has attributes, then it is an Element (based off API) @@ -402,7 +430,9 @@ public class ExtractRegistry extends Extract { // bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "RecentActivity", context, name)); // bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_VALUE.getTypeID(), "RecentActivity", context, value)); // bbart.addAttributes(bbattributes); - } else if ("usb".equals(context)) { + // @@@ BC: Why are we ignoring this... + } + else if ("usb".equals(context)) { try { Long utime = null; utime = Long.parseLong(name); @@ -410,13 +440,8 @@ public class ExtractRegistry extends Extract { utime = Long.valueOf(Tempdate); BlackboardArtifact bbart = tempDb.getContentById(orgId).newArtifact(ARTIFACT_TYPE.TSK_DEVICE_ATTACHED); - //TODO Revisit usage of deprecated constructor as per TSK-583 - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", context, utime)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", utime)); String dev = artnode.getAttribute("dev"); - //TODO Revisit usage of deprecated constructor as per TSK-583 - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL.getTypeID(), "RecentActivity", context, dev)); - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_ID.getTypeID(), "RecentActivity", context, value)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL.getTypeID(), "RecentActivity", dev)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_ID.getTypeID(), "RecentActivity", value)); if (dev.toLowerCase().contains("vid")) { @@ -430,7 +455,8 @@ public class ExtractRegistry extends Extract { } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error adding device attached artifact to blackboard."); } - } else if ("uninstall".equals(context)) { + } + else if ("uninstall".equals(context)) { Long ftime = null; try { Long epochtime = new SimpleDateFormat("EEE MMM d HH:mm:ss yyyy").parse(name).getTime(); @@ -440,11 +466,6 @@ public class ExtractRegistry extends Extract { logger.log(Level.WARNING, "Failed to parse epoch time for installed program artifact."); } - //TODO Revisit usage of deprecated constructor as per TSK-583 - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_LAST_ACCESSED.getTypeID(), "RecentActivity", context, time)); - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", context, value)); - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", context, ftime)); - try { if (time != null) { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "RecentActivity", time)); @@ -456,7 +477,8 @@ public class ExtractRegistry extends Extract { } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error adding installed program artifact to blackboard."); } - } else if ("WinVersion".equals(context)) { + } + else if ("WinVersion".equals(context)) { if (name.contains("ProductName")) { winver = value; @@ -465,7 +487,6 @@ public class ExtractRegistry extends Extract { winver = winver + " " + value; } if (name.contains("InstallDate")) { - installdate = value; Long installtime = null; try { Long epochtime = new SimpleDateFormat("EEE MMM d HH:mm:ss yyyy").parse(value).getTime(); @@ -476,9 +497,6 @@ public class ExtractRegistry extends Extract { logger.log(Level.SEVERE, "RegRipper::Conversion on DateTime -> ", e); } try { - //TODO Revisit usage of deprecated constructor as per TSK-583 - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", context, winver)); - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", context, installtime)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", winver)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", installtime)); BlackboardArtifact bbart = tempDb.getContentById(orgId).newArtifact(ARTIFACT_TYPE.TSK_INSTALLED_PROG); @@ -487,14 +505,10 @@ public class ExtractRegistry extends Extract { logger.log(Level.SEVERE, "Error adding installed program artifact to blackboard."); } } - } else if ("office".equals(context)) { + } + else if ("office".equals(context)) { try { BlackboardArtifact bbart = tempDb.getContentById(orgId).newArtifact(ARTIFACT_TYPE.TSK_RECENT_OBJECT); - //TODO Revisit usage of deprecated constructor as per TSK-583 - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_LAST_ACCESSED.getTypeID(), "RecentActivity", context, time)); - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "RecentActivity", context, name)); - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_VALUE.getTypeID(), "RecentActivity", context, value)); - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", context, artnode.getName())); if (time != null) { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "RecentActivity", time)); } @@ -535,9 +549,8 @@ public class ExtractRegistry extends Extract { } @Override - public void process(PipelineContextpipelineContext, Content dataSource, IngestDataSourceWorkerController controller) { - this.getRegistryFiles(dataSource, controller); + analyzeRegistryFiles(dataSource, controller); } @Override @@ -547,7 +560,6 @@ public class ExtractRegistry extends Extract { @Override public void complete() { - logger.info("Registry Extract has completed."); } @Override @@ -556,7 +568,6 @@ public class ExtractRegistry extends Extract { execRR.stop(); execRR = null; } - } @Override From 08e324a635afec3059b11473bc8e30ee296d600d Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Sat, 26 Oct 2013 20:35:15 -0400 Subject: [PATCH 095/179] more comment updates --- API-CHANGES.txt | 1 + .../recentactivity/ExtractRegistry.java | 40 ++++++------------- 2 files changed, 13 insertions(+), 28 deletions(-) diff --git a/API-CHANGES.txt b/API-CHANGES.txt index c07b51f275..8d461135f5 100644 --- a/API-CHANGES.txt +++ b/API-CHANGES.txt @@ -5,3 +5,4 @@ Changes to make to API when we are ready to make backward incompatible changes: - DataContentViewerHex and Strings can have the public setDataView methods removed in favor of the new private ones - Content.getUniquePath() should not thrown TskException. We should deal with it in the method. - Make the list of events that Case fires off to be part of an enum to group them together (like IngestManager does). +- Sub-modules in RecentActivity have a bunch of public/protected variables that do not need to be. (i.e. ExtractRegistry.rrFullFound). diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java index e9edf2759d..8da69f4425 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java @@ -32,7 +32,6 @@ import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.openide.modules.InstalledFileLocator; -import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.ExecUtil; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.PlatformUtil; @@ -40,7 +39,6 @@ import org.sleuthkit.autopsy.datamodel.ContentUtils; import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController; import org.sleuthkit.autopsy.ingest.IngestModuleDataSource; import org.sleuthkit.autopsy.ingest.IngestModuleInit; -import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.PipelineContext; import org.sleuthkit.autopsy.recentactivity.ExtractUSB.USBInfo; import org.sleuthkit.datamodel.*; @@ -54,17 +52,18 @@ import org.xml.sax.InputSource; import org.xml.sax.SAXException; /** - * Extracting windows registry data using regripper + * Extract windows registry data using regripper. + * Runs two versions of regripper. One is the generally available set of plug-ins + * and the second is a set that were customized for Autopsy to produce a more structured + * output of XML so that we can parse and turn into blackboard artifacts. */ public class ExtractRegistry extends Extract { public Logger logger = Logger.getLogger(this.getClass().getName()); private String RR_PATH; private String RR_FULL_PATH; - boolean rrFound = false; - boolean rrFullFound = false; - private int sysid; - private IngestServices services; + boolean rrFound = false; // true if we found the Autopsy-specific version of regripper + boolean rrFullFound = false; // true if we found the full version of regripper final public static String MODULE_VERSION = "1.0"; private ExecUtil execRR; @@ -130,7 +129,7 @@ public class ExtractRegistry extends Extract { } // find the system hives' - String[] regFileNames = new String[] {"system", "software", "security", "sam", "default"}; + String[] regFileNames = new String[] {"system", "software", "security", "sam"}; for (String regFileName : regFileNames) { try { allRegistryFiles.addAll(fileManager.findFiles(dataSource, regFileName, "/system32/config")); @@ -261,17 +260,17 @@ public class ExtractRegistry extends Extract { public String fullPlugins = ""; } - // TODO: Hardcoded command args/path needs to be removed. Maybe set some constants and set env variables for classpath /** * Execute regripper on the given registry. * @param regFilePath Path to local copy of registry * @param outFilePathBase Path to location to save output file to. Base name that will be extended on */ private RegOutputFiles executeRegRip(String regFilePath, String outFilePathBase) { - String autopsyType = ""; // Type argument for rr for autopsy-specific modules String fullType = ""; // Type argument for rr for full set of modules + RegOutputFiles regOutputFiles = new RegOutputFiles(); + if (regFilePath.toLowerCase().contains("system")) { autopsyType = "autopsysystem"; fullType = "system"; @@ -283,10 +282,7 @@ public class ExtractRegistry extends Extract { else if (regFilePath.toLowerCase().contains("ntuser")) { autopsyType = "autopsy"; fullType = "ntuser"; - } - else if (regFilePath.toLowerCase().contains("default")) { - //type = "1default"; - } + } else if (regFilePath.toLowerCase().contains("sam")) { fullType = "sam"; } @@ -294,11 +290,8 @@ public class ExtractRegistry extends Extract { fullType = "security"; } else { - // @@@ Seems like we should error out or something... - autopsyType = "1default"; + return regOutputFiles; } - - RegOutputFiles regOutputFiles = new RegOutputFiles(); // run the autopsy-specific set of modules if (!autopsyType.isEmpty() && rrFound) { @@ -326,8 +319,6 @@ public class ExtractRegistry extends Extract { } } } - } else { - logger.log(Level.INFO, "Not running Autopsy-only modules on hive"); } // run the full set of rr modules @@ -355,9 +346,8 @@ public class ExtractRegistry extends Extract { } } } - } else { - logger.log(Level.INFO, "Not running original RR modules on hive"); } + return regOutputFiles; } @@ -519,13 +509,8 @@ public class ExtractRegistry extends Extract { } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error adding recent object artifact to blackboard."); } - - } else { - //BlackboardArtifact bbart = tempDb.getContentById(orgId).newArtifact(sysid); - //bbart.addAttributes(bbattributes); } } - } } return true; @@ -555,7 +540,6 @@ public class ExtractRegistry extends Extract { @Override public void init(IngestModuleInit initContext) { - services = IngestServices.getDefault(); } @Override From 6767cc42abbe762792b76f469b841ff4705b25b9 Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Sat, 26 Oct 2013 21:19:19 -0400 Subject: [PATCH 096/179] renamed autopsy rr plugin for ntuser to be more consistent --- RecentActivity/release/rr/plugins/{autopsy => autopsyntuser} | 0 .../org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) rename RecentActivity/release/rr/plugins/{autopsy => autopsyntuser} (100%) diff --git a/RecentActivity/release/rr/plugins/autopsy b/RecentActivity/release/rr/plugins/autopsyntuser similarity index 100% rename from RecentActivity/release/rr/plugins/autopsy rename to RecentActivity/release/rr/plugins/autopsyntuser diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java index 8da69f4425..840850238a 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java @@ -280,7 +280,7 @@ public class ExtractRegistry extends Extract { fullType = "software"; } else if (regFilePath.toLowerCase().contains("ntuser")) { - autopsyType = "autopsy"; + autopsyType = "autopsyntuser"; fullType = "ntuser"; } else if (regFilePath.toLowerCase().contains("sam")) { From 2d80d2ee2113031929a8977ac3ffa24b6bea5c09 Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Sat, 26 Oct 2013 23:37:48 -0400 Subject: [PATCH 097/179] normalized timestamps in registry analysis, updated reports --- .../autopsy/report/ReportGenerator.java | 3 +- RecentActivity/release/rr/plugins/arunmru.pl | 4 +- .../release/rr/plugins/autopsylogin.pl | 4 +- .../release/rr/plugins/autopsyrecentdocs.pl | 4 +- .../release/rr/plugins/autopsyshellfolders.pl | 4 +- .../release/rr/plugins/autopsyuninstall.pl | 8 +-- .../release/rr/plugins/autopsyusb.pl | 10 +-- .../release/rr/plugins/autopsywinver.pl | 4 +- .../release/rr/plugins/officedocs.pl | 4 +- .../release/rr/plugins/officedocs2010.pl | 2 +- .../recentactivity/ExtractRegistry.java | 66 +++++++++---------- 11 files changed, 57 insertions(+), 56 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java b/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java index 889ca07328..56f4f8780f 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java +++ b/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java @@ -842,7 +842,7 @@ public class ReportGenerator { columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Destination", "Source URL", "Date Accessed", "Program", "Source File"})); break; case TSK_RECENT_OBJECT: - columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Path", "Source File"})); + columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Path", "Date/Time", "Source File"})); break; case TSK_INSTALLED_PROG: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Program Name", "Install Date/Time", "Source File"})); @@ -1030,6 +1030,7 @@ public class ReportGenerator { case TSK_RECENT_OBJECT: List recent = new ArrayList<>(); recent.add(attributes.get(ATTRIBUTE_TYPE.TSK_PATH.getTypeID())); + recent.add(attributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); recent.add(getFileUniquePath(artifactData.getObjectID())); return recent; case TSK_INSTALLED_PROG: diff --git a/RecentActivity/release/rr/plugins/arunmru.pl b/RecentActivity/release/rr/plugins/arunmru.pl index 8edea6e515..504700f145 100644 --- a/RecentActivity/release/rr/plugins/arunmru.pl +++ b/RecentActivity/release/rr/plugins/arunmru.pl @@ -47,7 +47,7 @@ sub pluginmain { my @vals = $key->get_list_of_values(); ::rptMsg(""); - ::rptMsg(""); + ::rptMsg("".gmtime($key->get_timestamp()).""); ::rptMsg(""); my %runvals; my $mru; @@ -75,4 +75,4 @@ sub pluginmain { } -1; \ No newline at end of file +1; diff --git a/RecentActivity/release/rr/plugins/autopsylogin.pl b/RecentActivity/release/rr/plugins/autopsylogin.pl index 5f83827176..ab0365817e 100644 --- a/RecentActivity/release/rr/plugins/autopsylogin.pl +++ b/RecentActivity/release/rr/plugins/autopsylogin.pl @@ -48,7 +48,7 @@ sub pluginmain { #::rptMsg("Logon User Name"); #::rptMsg($key_path); ::rptMsg(""); - ::rptMsg(""); + ::rptMsg("".gmtime($key->get_timestamp()).""); foreach my $v (@vals) { if ($v->get_name() eq $logon_name) { ::rptMsg(" ".$v->get_data() .""); @@ -67,4 +67,4 @@ sub pluginmain { } } -1; \ No newline at end of file +1; diff --git a/RecentActivity/release/rr/plugins/autopsyrecentdocs.pl b/RecentActivity/release/rr/plugins/autopsyrecentdocs.pl index 538555ef8d..776126175b 100644 --- a/RecentActivity/release/rr/plugins/autopsyrecentdocs.pl +++ b/RecentActivity/release/rr/plugins/autopsyrecentdocs.pl @@ -49,7 +49,7 @@ sub pluginmain { #::rptMsg("RecentDocs"); #::rptMsg("**All values printed in MRUList\\MRUListEx order."); #::rptMsg($key_path); - ::rptMsg(""); + ::rptMsg("".gmtime($key->get_timestamp()).""); # Get RecentDocs values my %rdvals = getRDValues($key); if (%rdvals) { @@ -158,4 +158,4 @@ sub getRDValues { } } -1; \ No newline at end of file +1; diff --git a/RecentActivity/release/rr/plugins/autopsyshellfolders.pl b/RecentActivity/release/rr/plugins/autopsyshellfolders.pl index de3115f9dd..d625820ec5 100644 --- a/RecentActivity/release/rr/plugins/autopsyshellfolders.pl +++ b/RecentActivity/release/rr/plugins/autopsyshellfolders.pl @@ -48,7 +48,7 @@ sub pluginmain { my $key; if ($key = $root_key->get_subkey($key_path)) { ::rptMsg(""); - ::rptMsg(""); + ::rptMsg("".gmtime($key->get_timestamp()).""); my @vals = $key->get_list_of_values(); ::rptMsg(""); @@ -69,4 +69,4 @@ sub pluginmain { #::logMsg($key_path." not found."); } } -1; \ No newline at end of file +1; diff --git a/RecentActivity/release/rr/plugins/autopsyuninstall.pl b/RecentActivity/release/rr/plugins/autopsyuninstall.pl index 30fc0dcd74..d3f114dc5e 100644 --- a/RecentActivity/release/rr/plugins/autopsyuninstall.pl +++ b/RecentActivity/release/rr/plugins/autopsyuninstall.pl @@ -51,7 +51,7 @@ sub pluginmain { #::rptMsg($key_path); #::rptMsg(""); ::rptMsg(""); - ::rptMsg(""); + ::rptMsg("".gmtime($key->get_timestamp()).""); ::rptMsg(""); my %uninst; my @subkeys = $key->get_list_of_subkeys(); @@ -73,9 +73,9 @@ sub pluginmain { push(@{$uninst{$lastwrite}},$display); } foreach my $t (reverse sort {$a <=> $b} keys %uninst) { - #::rptMsg(""); + #::rptMsg(""); foreach my $item (@{$uninst{$t}}) { - ::rptMsg("" .$item.""); + ::rptMsg("" .$item.""); } #::rptMsg(""); } @@ -89,4 +89,4 @@ sub pluginmain { } ::rptMsg(""); } -1; \ No newline at end of file +1; diff --git a/RecentActivity/release/rr/plugins/autopsyusb.pl b/RecentActivity/release/rr/plugins/autopsyusb.pl index 9f5b97fdbd..3c6b788c09 100644 --- a/RecentActivity/release/rr/plugins/autopsyusb.pl +++ b/RecentActivity/release/rr/plugins/autopsyusb.pl @@ -59,7 +59,7 @@ sub pluginmain { my $key_path = $ccs."\\Enum\\USB"; my $key; if ($key = $root_key->get_subkey($key_path)) { - ::rptMsg(""); + ::rptMsg(""); my @subkeys = $key->get_list_of_subkeys(); if (scalar(@subkeys) > 0) { @@ -69,8 +69,8 @@ sub pluginmain { if (scalar(@sk) > 0) { foreach my $k (@sk) { my $serial = $k->get_name(); - my $sn_lw = $k->get_timestamp(); - my $str = $comp_name.",".$dev_class.",".$serial.",".$sn_lw; + my $mtime = $k->get_timestamp(); + my $str = $comp_name.",".$dev_class.",".$serial.",".$mtime; my $loc; eval { @@ -94,7 +94,7 @@ sub pluginmain { }; - ::rptMsg("" . $serial . ""); + ::rptMsg("" . $serial . ""); } } } @@ -110,4 +110,4 @@ sub pluginmain { #::logMsg($key_path." not found."); } } -1; \ No newline at end of file +1; diff --git a/RecentActivity/release/rr/plugins/autopsywinver.pl b/RecentActivity/release/rr/plugins/autopsywinver.pl index 73cb5a3017..758dc45b5c 100644 --- a/RecentActivity/release/rr/plugins/autopsywinver.pl +++ b/RecentActivity/release/rr/plugins/autopsywinver.pl @@ -32,7 +32,7 @@ sub pluginmain { my $reg = Parse::Win32Registry->new($hive); my $root_key = $reg->get_root_key; ::rptMsg(""); - ::rptMsg(""); + ::rptMsg(""); ::rptMsg(""); my $key_path = "Microsoft\\Windows NT\\CurrentVersion"; my $key; @@ -106,4 +106,4 @@ sub pluginmain { } ::rptMsg(""); } -1; \ No newline at end of file +1; diff --git a/RecentActivity/release/rr/plugins/officedocs.pl b/RecentActivity/release/rr/plugins/officedocs.pl index ad9495c407..c7ee407a7f 100644 --- a/RecentActivity/release/rr/plugins/officedocs.pl +++ b/RecentActivity/release/rr/plugins/officedocs.pl @@ -56,8 +56,8 @@ sub pluginmain { #::rptMsg("MSOffice version ".$version." located."); my $key_path = "Software\\Microsoft\\Office\\".$version; my $of_key = $root_key->get_subkey($key_path); + ::rptMsg(" ".gmtime($of_key->get_timestamp()).""); ::rptMsg(""); - ::rptMsg(""); if ($of_key) { # Attempt to retrieve Word docs my @funcs = ("Open","Save As","File Save"); @@ -148,4 +148,4 @@ sub pluginmain { ::rptMsg(""); } -1; \ No newline at end of file +1; diff --git a/RecentActivity/release/rr/plugins/officedocs2010.pl b/RecentActivity/release/rr/plugins/officedocs2010.pl index 632751196c..2783dc01f6 100644 --- a/RecentActivity/release/rr/plugins/officedocs2010.pl +++ b/RecentActivity/release/rr/plugins/officedocs2010.pl @@ -218,4 +218,4 @@ sub pluginmain { } } -1; \ No newline at end of file +1; diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java index 840850238a..61b38ce261 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java @@ -144,7 +144,7 @@ public class ExtractRegistry extends Extract { } /** - * Identifies registry files in the database by name, runs regripper on them, and parses the output. + * Identifies registry files in the database by mtimeItem, runs regripper on them, and parses the output. * * @param dataSource * @param controller @@ -263,7 +263,7 @@ public class ExtractRegistry extends Extract { /** * Execute regripper on the given registry. * @param regFilePath Path to local copy of registry - * @param outFilePathBase Path to location to save output file to. Base name that will be extended on + * @param outFilePathBase Path to location to save output file to. Base mtimeItem that will be extended on */ private RegOutputFiles executeRegRip(String regFilePath, String outFilePathBase) { String autopsyType = ""; // Type argument for rr for autopsy-specific modules @@ -379,18 +379,19 @@ public class ExtractRegistry extends Extract { int len = children.getLength(); for (int i = 0; i < len; i++) { Element tempnode = (Element) children.item(i); - String context = tempnode.getNodeName(); + + String dataType = tempnode.getNodeName(); - NodeList timenodes = tempnode.getElementsByTagName("time"); - Long time = null; + NodeList timenodes = tempnode.getElementsByTagName("mtime"); + Long mtime = null; if (timenodes.getLength() > 0) { Element timenode = (Element) timenodes.item(0); String etime = timenode.getTextContent(); try { Long epochtime = new SimpleDateFormat("EEE MMM d HH:mm:ss yyyy").parse(etime).getTime(); - time = epochtime.longValue(); - String Tempdate = time.toString(); - time = Long.valueOf(Tempdate) / 1000; + mtime = epochtime.longValue(); + String Tempdate = mtime.toString(); + mtime = Long.valueOf(Tempdate) / 1000; } catch (ParseException ex) { logger.log(Level.WARNING, "Failed to parse epoch time when parsing the registry."); } @@ -410,27 +411,25 @@ public class ExtractRegistry extends Extract { // If it has attributes, then it is an Element (based off API) if (artchild.hasAttributes()) { Element artnode = (Element) artchild; - String name = artnode.getAttribute("name"); + String value = artnode.getTextContent().trim(); Collection bbattributes = new ArrayList(); - if ("recentdocs".equals(context)) { + if ("recentdocs".equals(dataType)) { // BlackboardArtifact bbart = tempDb.getContentById(orgId).newArtifact(ARTIFACT_TYPE.TSK_RECENT_OBJECT); - // bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_LAST_ACCESSED.getTypeID(), "RecentActivity", context, time)); - // bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "RecentActivity", context, name)); - // bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_VALUE.getTypeID(), "RecentActivity", context, value)); + // bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_LAST_ACCESSED.getTypeID(), "RecentActivity", dataType, mtime)); + // bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "RecentActivity", dataType, mtimeItem)); + // bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_VALUE.getTypeID(), "RecentActivity", dataType, value)); // bbart.addAttributes(bbattributes); // @@@ BC: Why are we ignoring this... } - else if ("usb".equals(context)) { - try { - Long utime = null; - utime = Long.parseLong(name); - String Tempdate = utime.toString(); - utime = Long.valueOf(Tempdate); + else if ("usb".equals(dataType)) { + try { + Long usbMtime = Long.parseLong(artnode.getAttribute("mtime")); + usbMtime = Long.valueOf(usbMtime.toString()); BlackboardArtifact bbart = tempDb.getContentById(orgId).newArtifact(ARTIFACT_TYPE.TSK_DEVICE_ATTACHED); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", utime)); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", usbMtime)); String dev = artnode.getAttribute("dev"); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL.getTypeID(), "RecentActivity", dev)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_ID.getTypeID(), "RecentActivity", value)); @@ -446,29 +445,27 @@ public class ExtractRegistry extends Extract { logger.log(Level.SEVERE, "Error adding device attached artifact to blackboard."); } } - else if ("uninstall".equals(context)) { - Long ftime = null; + else if ("uninstall".equals(dataType)) { + Long itemMtime = null; try { - Long epochtime = new SimpleDateFormat("EEE MMM d HH:mm:ss yyyy").parse(name).getTime(); - ftime = epochtime.longValue(); - ftime = ftime / 1000; + Long epochtime = new SimpleDateFormat("EEE MMM d HH:mm:ss yyyy").parse(artnode.getAttribute("mtime")).getTime(); + itemMtime = epochtime.longValue(); + itemMtime = itemMtime / 1000; } catch (ParseException e) { logger.log(Level.WARNING, "Failed to parse epoch time for installed program artifact."); } try { - if (time != null) { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "RecentActivity", time)); - } bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", value)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", ftime)); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", itemMtime)); BlackboardArtifact bbart = tempDb.getContentById(orgId).newArtifact(ARTIFACT_TYPE.TSK_INSTALLED_PROG); bbart.addAttributes(bbattributes); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error adding installed program artifact to blackboard."); } } - else if ("WinVersion".equals(context)) { + else if ("WinVersion".equals(dataType)) { + String name = artnode.getAttribute("name"); if (name.contains("ProductName")) { winver = value; @@ -496,11 +493,14 @@ public class ExtractRegistry extends Extract { } } } - else if ("office".equals(context)) { + else if ("office".equals(dataType)) { + String name = artnode.getAttribute("name"); + try { BlackboardArtifact bbart = tempDb.getContentById(orgId).newArtifact(ARTIFACT_TYPE.TSK_RECENT_OBJECT); - if (time != null) { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "RecentActivity", time)); + // @@@ BC: Consider removing this after some more testing. It looks like an Mtime associated with the root key and not the individual item + if (mtime != null) { + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "RecentActivity", mtime)); } bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "RecentActivity", name)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_VALUE.getTypeID(), "RecentActivity", value)); From 77c2adce7b7eb9a5769fd80ec3cd55389520a0c8 Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Sat, 26 Oct 2013 23:38:42 -0400 Subject: [PATCH 098/179] added pl for newline normalization for rr plugins --- .gitattributes | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitattributes b/.gitattributes index ea6aa14ebc..70e247e540 100644 --- a/.gitattributes +++ b/.gitattributes @@ -11,3 +11,4 @@ Doxyfile text *.py text diff=python +*.pl text From 8834123cb473475663ebb304a123118a75219c65 Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Sun, 27 Oct 2013 00:21:31 -0400 Subject: [PATCH 099/179] changed order of attributes for web artifacts, marked 2 unused classes as deprecated --- API-CHANGES.txt | 1 + .../recentactivity/BrowserActivityType.java | 3 ++- .../autopsy/recentactivity/BrowserType.java | 3 ++- .../autopsy/recentactivity/Chrome.java | 11 +++------- .../autopsy/recentactivity/ExtractIE.java | 19 +++++++----------- .../recentactivity/ExtractRegistry.java | 9 +++++---- .../autopsy/recentactivity/Firefox.java | 20 +++++++------------ 7 files changed, 27 insertions(+), 39 deletions(-) diff --git a/API-CHANGES.txt b/API-CHANGES.txt index 8d461135f5..1c268170a4 100644 --- a/API-CHANGES.txt +++ b/API-CHANGES.txt @@ -6,3 +6,4 @@ Changes to make to API when we are ready to make backward incompatible changes: - Content.getUniquePath() should not thrown TskException. We should deal with it in the method. - Make the list of events that Case fires off to be part of an enum to group them together (like IngestManager does). - Sub-modules in RecentActivity have a bunch of public/protected variables that do not need to be. (i.e. ExtractRegistry.rrFullFound). +- Delete BrowserType enum and BrowserActivityType in RecentActivity. diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/BrowserActivityType.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/BrowserActivityType.java index cd94f12c5b..a54977273a 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/BrowserActivityType.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/BrowserActivityType.java @@ -24,8 +24,9 @@ import java.util.Map; /** * - * @author arivera + * No one seems to be using this */ +@Deprecated public enum BrowserActivityType { Cookies(0), Url(1), diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/BrowserType.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/BrowserType.java index 48c8e303bc..ebdf41f48d 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/BrowserType.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/BrowserType.java @@ -23,8 +23,9 @@ import java.util.Map; /** * - * @author arivera + * No one is using this. It should go away */ +@Deprecated public enum BrowserType { IE(0), //Internet Explorer FF(1), //Firefox diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java index 3412977112..d788c85a65 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java @@ -265,10 +265,9 @@ public class Chrome extends Extract { Collection bbattributes = new ArrayList(); //TODO Revisit usage of deprecated constructor as per TSK-583 //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_LAST_ACCESSED.getTypeID(), "Recent Activity", "Last Visited", (date / 10000000))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "Recent Activity", (date / 10000000))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "Recent Activity", url)); - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "Recent Activity", EscapeUtil.decodeURL(url))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "Recent Activity", name)); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "Recent Activity", (date / 10000000))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "Recent Activity", "Chrome")); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "Recent Activity", domain)); bbart.addAttributes(bbattributes); @@ -325,15 +324,11 @@ public class Chrome extends Extract { logger.log(Level.INFO, moduleName + "- Now getting cookies from " + temps + " with " + tempList.size() + "artifacts identified."); for (HashMap result : tempList) { Collection bbattributes = new ArrayList(); - //TODO Revisit usage of deprecated constructor as per TSK-583 - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "Recent Activity", "Title", ((result.get("name").toString() != null) ? result.get("name").toString() : ""))); - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "Recent Activity", "Last Visited", ((Long.valueOf(result.get("last_access_utc").toString())) / 10000000))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "Recent Activity", ((result.get("name").toString() != null) ? result.get("name").toString() : ""))); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "Recent Activity", ((result.get("host_key").toString() != null) ? result.get("host_key").toString() : ""))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "Recent Activity", ((Long.valueOf(result.get("last_access_utc").toString())) / 10000000))); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "Recent Activity", ((result.get("name").toString() != null) ? result.get("name").toString() : ""))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_VALUE.getTypeID(), "Recent Activity", ((result.get("value").toString() != null) ? result.get("value").toString() : ""))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "Recent Activity", "Chrome")); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "Recent Activity", ((result.get("host_key").toString() != null) ? result.get("host_key").toString() : ""))); - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "Recent Activity", ((result.get("host_key").toString() != null) ? EscapeUtil.decodeURL(result.get("host_key").toString()) : ""))); String domain = result.get("host_key").toString(); domain = domain.replaceFirst("^\\.+(?!$)", ""); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "Recent Activity", domain)); diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java index f4528d8587..eef5bb92cd 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java @@ -149,10 +149,9 @@ public class ExtractIE extends Extract { String domain = Util.extractDomain(url); Collection bbattributes = new ArrayList(); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "RecentActivity", datetime)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", url)); - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "RecentActivity", EscapeUtil.decodeURL(url))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "RecentActivity", name)); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "RecentActivity", datetime)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "Internet Explorer")); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", domain)); this.addArtifact(ARTIFACT_TYPE.TSK_WEB_BOOKMARK, favoritesFile, bbattributes); @@ -203,12 +202,11 @@ public class ExtractIE extends Extract { String domain = Util.extractDomain(url); Collection bbattributes = new ArrayList(); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "RecentActivity", (name != null) ? name : "")); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", url)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", datetime)); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "RecentActivity", (name != null) ? name : "")); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_VALUE.getTypeID(), "RecentActivity", value)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "Internet Explorer")); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", url)); - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "RecentActivity", EscapeUtil.decodeURL(url))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", domain)); this.addArtifact(ARTIFACT_TYPE.TSK_WEB_COOKIE, cookiesFile, bbattributes); } @@ -254,15 +252,12 @@ public class ExtractIE extends Extract { } continue; } - String path = lnk.getBestPath(); - Long datetime = recentFile.getCrtime(); - + Collection bbattributes = new ArrayList(); + String path = lnk.getBestPath(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH.getTypeID(), "RecentActivity", path)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "RecentActivity", Util.getFileName(path))); - long id = Util.findID(dataSource, path); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH_ID.getTypeID(), "RecentActivity", id)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", datetime)); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH_ID.getTypeID(), "RecentActivity", Util.findID(dataSource, path))); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", recentFile.getCrtime())); this.addArtifact(ARTIFACT_TYPE.TSK_RECENT_OBJECT, recentFile, bbattributes); } services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_RECENT_OBJECT)); diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java index 61b38ce261..8b0c89ade8 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java @@ -430,16 +430,17 @@ public class ExtractRegistry extends Extract { BlackboardArtifact bbart = tempDb.getContentById(orgId).newArtifact(ARTIFACT_TYPE.TSK_DEVICE_ATTACHED); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", usbMtime)); - String dev = artnode.getAttribute("dev"); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL.getTypeID(), "RecentActivity", dev)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_ID.getTypeID(), "RecentActivity", value)); + String dev = artnode.getAttribute("dev"); + String model = dev; if (dev.toLowerCase().contains("vid")) { USBInfo info = extrctr.get(dev); if(info.getVendor()!=null) bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MAKE.getTypeID(), "RecentActivity", info.getVendor())); if(info.getProduct() != null) - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL.getTypeID(), "RecentActivity", info.getProduct())); + model = info.getProduct(); } + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL.getTypeID(), "RecentActivity", model)); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_ID.getTypeID(), "RecentActivity", value)); bbart.addAttributes(bbattributes); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error adding device attached artifact to blackboard."); diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java index b4231bac76..9da8013c0e 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java @@ -181,9 +181,8 @@ public class Firefox extends Extract { Collection bbattributes = new ArrayList(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", ((result.get("url").toString() != null) ? result.get("url").toString() : ""))); - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "RecentActivity", ((result.get("url").toString() != null) ? EscapeUtil.decodeURL(result.get("url").toString()) : ""))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "RecentActivity", (Long.valueOf(result.get("dateAdded").toString())))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "RecentActivity", ((result.get("title").toString() != null) ? result.get("title").toString() : ""))); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "RecentActivity", (Long.valueOf(result.get("dateAdded").toString())))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "FireFox")); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", (Util.extractDomain((result.get("url").toString() != null) ? result.get("url").toString() : "")))); this.addArtifact(ARTIFACT_TYPE.TSK_WEB_BOOKMARK, bookmarkFile, bbattributes); @@ -246,19 +245,14 @@ public class Firefox extends Extract { Collection bbattributes = new ArrayList(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", ((result.get("host").toString() != null) ? result.get("host").toString() : ""))); - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "RecentActivity", ((result.get("host").toString() != null) ? EscapeUtil.decodeURL(result.get("host").toString()) : ""))); - //TODO Revisit usage of deprecated constructor as per TSK-583 - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "RecentActivity", "Title", ((result.get("name").toString() != null) ? result.get("name").toString() : ""))); - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", "Last Visited", (Long.valueOf(result.get("lastAccessed").toString())))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "RecentActivity", ((result.get("name").toString() != null) ? result.get("name").toString() : ""))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", (Long.valueOf(result.get("lastAccessed").toString())))); - if (checkColumn == true) { - //TODO Revisit usage of deprecated constructor as per TSK-583 - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", "Created", (Long.valueOf(result.get("creationTime").toString())))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", (Long.valueOf(result.get("creationTime").toString())))); - } - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "FireFox")); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "RecentActivity", ((result.get("name").toString() != null) ? result.get("name").toString() : ""))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_VALUE.getTypeID(), "RecentActivity", ((result.get("value").toString() != null) ? result.get("value").toString() : ""))); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "FireFox")); + + if (checkColumn == true) { + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED.getTypeID(), "RecentActivity", (Long.valueOf(result.get("creationTime").toString())))); + } String domain = Util.extractDomain(result.get("host").toString()); domain = domain.replaceFirst("^\\.+(?!$)", ""); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", domain)); From 5e8517feb9f9b5f390c969c1190370a7e330b218 Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Mon, 28 Oct 2013 18:00:14 -0400 Subject: [PATCH 100/179] Fixed downloads for newest chrome and firefox versions --- .../autopsy/recentactivity/Chrome.java | 6 ++ .../autopsy/recentactivity/Firefox.java | 73 ++++++++++++++++++- 2 files changed, 78 insertions(+), 1 deletion(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java index d788c85a65..ea5a20b9a8 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java @@ -62,6 +62,7 @@ public class Chrome extends Extract { private static final String cookieQuery = "select name, value, host_key, expires_utc,last_access_utc, creation_utc from cookies"; private static final String bookmarkQuery = "SELECT starred.title, urls.url, starred.date_added, starred.date_modified, urls.typed_count,urls._last_visit_time FROM starred INNER JOIN urls ON urls.id = starred.url_id"; private static final String downloadQuery = "select full_path, url, start_time, received_bytes from downloads"; + private static final String downloadQueryVersion30 = "SELECT current_path as full_path, url, start_time, received_bytes FROM downloads, downloads_url_chains WHERE downloads.id=downloads_url_chains.id"; private static final String loginQuery = "select origin_url, username_value, signon_realm from logins"; private final Logger logger = Logger.getLogger(this.getClass().getName()); public int ChromeCount = 0; @@ -380,6 +381,11 @@ public class Chrome extends Extract { } List> tempList = this.dbConnect(temps, downloadQuery); + + if (tempList.isEmpty()) { + tempList = this.dbConnect(temps, downloadQueryVersion30); + } + logger.log(Level.INFO, moduleName + "- Now getting downloads from " + temps + " with " + tempList.size() + "artifacts identified."); for (HashMap result : tempList) { Collection bbattributes = new ArrayList(); diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java index 9da8013c0e..bf0f009bda 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java @@ -56,7 +56,8 @@ public class Firefox extends Extract { private static final String cookieQuery = "SELECT name,value,host,expiry,(lastAccessed/1000000) as lastAccessed,(creationTime/1000000) as creationTime FROM moz_cookies"; private static final String cookieQueryV3 = "SELECT name,value,host,expiry,(lastAccessed/1000000) as lastAccessed FROM moz_cookies"; private static final String bookmarkQuery = "SELECT fk, moz_bookmarks.title, url, (moz_bookmarks.dateAdded/1000000) as dateAdded FROM moz_bookmarks INNER JOIN moz_places ON moz_bookmarks.fk=moz_places.id"; - private static final String downloadQuery = "SELECT target, source,(startTime/1000000) as startTime, maxBytes FROM moz_downloads"; + private static final String downloadQuery = "SELECT target, source,(startTime/1000000) as startTime, maxBytes FROM moz_downloads"; + private static final String downloadQueryVersion24 = "SELECT url, content as target, (lastModified/1000000) as lastModified FROM moz_places, moz_annos WHERE moz_places.id = moz_annos.place_id AND moz_annos.anno_attribute_id = 3"; public int FireFoxCount = 0; final public static String MODULE_VERSION = "1.0"; @@ -282,6 +283,11 @@ public class Firefox extends Extract { this.addErrorMessage(this.getName() + ": " + msg); return; } + + if (downloadsFiles.isEmpty()) { + getDownloadVersion24(dataSource, controller); + return; + } int j = 0; for (AbstractFile downloadsFile : downloadsFiles) { @@ -305,6 +311,10 @@ public class Firefox extends Extract { } List> tempList = this.dbConnect(temps, downloadQuery); + if (tempList.isEmpty()) { + getDownloadVersion24(dataSource, controller); + return; + } logger.log(Level.INFO, moduleName + "- Now getting downloads from " + temps + " with " + tempList.size() + "artifacts identified."); for (HashMap result : tempList) { @@ -363,4 +373,65 @@ public class Firefox extends Extract { public boolean hasBackgroundJobsRunning() { return false; } + + private void getDownloadVersion24(Content dataSource, IngestDataSourceWorkerController controller) { + FileManager fileManager = currentCase.getServices().getFileManager(); + List downloadsFiles = null; + try { + downloadsFiles = fileManager.findFiles(dataSource, "places.sqlite", "Firefox"); + } catch (TskCoreException ex) { + String msg = "Error fetching 'places' files for Firefox."; + logger.log(Level.WARNING, msg); + this.addErrorMessage(this.getName() + ": " + msg); + return; + } + + int j = 0; + for (AbstractFile downloadsFile : downloadsFiles) { + if (downloadsFile.getSize() == 0) { + continue; + } + String fileName = downloadsFile.getName(); + String temps = RAImageIngestModule.getRATempPath(currentCase, "firefox") + File.separator + fileName + "-downloads" + j + ".db"; + int errors = 0; + try { + ContentUtils.writeToFile(downloadsFile, new File(temps)); + } catch (IOException ex) { + logger.log(Level.SEVERE, "Error writing the sqlite db for firefox download artifacts.{0}", ex); + this.addErrorMessage(this.getName() + ": Error while trying to analyze file:" + fileName); + continue; + } + File dbFile = new File(temps); + if (controller.isCancelled()) { + dbFile.delete(); + break; + } + + List> tempList = this.dbConnect(temps, downloadQueryVersion24); + logger.log(Level.INFO, moduleName + "- Now getting downloads from " + temps + " with " + tempList.size() + "artifacts identified."); + for (HashMap result : tempList) { + + Collection bbattributes = new ArrayList(); + + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", ((result.get("url").toString() != null) ? result.get("url").toString() : ""))); + //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "RecentActivity", ((result.get("source").toString() != null) ? EscapeUtil.decodeURL(result.get("source").toString()) : ""))); + //TODO Revisit usage of deprecated constructor as per TSK-583 + //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_LAST_ACCESSED.getTypeID(), "RecentActivity", "Last Visited", (Long.valueOf(result.get("startTime").toString())))); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "RecentActivity", Long.valueOf(result.get("lastModified").toString()))); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH.getTypeID(), "RecentActivity", ((result.get("target").toString() != null) ? result.get("target").toString().replaceAll("file:///", "") : ""))); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "FireFox")); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", (Util.extractDomain((result.get("url").toString() != null) ? result.get("url").toString() : "")))); + this.addArtifact(ARTIFACT_TYPE.TSK_WEB_DOWNLOAD, downloadsFile, bbattributes); + + } + if (errors > 0) { + this.addErrorMessage(this.getName() + ": Error parsing " + errors + " Firefox web download artifacts."); + } + j++; + dbFile.delete(); + break; + } + + services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_DOWNLOAD)); + } } From 7e967e2568749d0bb8c1377843196b51e71fb4da Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Tue, 29 Oct 2013 10:54:56 -0400 Subject: [PATCH 101/179] Added ingest messages summarizing browser data found on the image. --- .../autopsy/recentactivity/Chrome.java | 25 +++++- .../autopsy/recentactivity/Extract.java | 8 +- .../autopsy/recentactivity/ExtractIE.java | 11 ++- .../autopsy/recentactivity/Firefox.java | 80 +++++++++---------- .../recentactivity/RAImageIngestModule.java | 35 ++++++-- 5 files changed, 107 insertions(+), 52 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java index ea5a20b9a8..5f80fb7e1e 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java @@ -82,6 +82,7 @@ public class Chrome extends Extract { @Override public void process(PipelineContextpipelineContext, Content dataSource, IngestDataSourceWorkerController controller) { + historyFound = true; this.getHistory(dataSource, controller); this.getBookmark(dataSource, controller); this.getCookie(dataSource, controller); @@ -104,6 +105,7 @@ public class Chrome extends Extract { String msg = "Error when trying to get Chrome history files."; logger.log(Level.SEVERE, msg, ex); this.addErrorMessage(this.getName() + ": " + msg); + historyFound = false; return; } @@ -117,7 +119,10 @@ public class Chrome extends Extract { // log a message if we don't have any allocated history files if (allocatedHistoryFiles.isEmpty()) { - logger.log(Level.INFO, "Could not find any allocated Chrome history files."); + String msg = "Could not find any allocated Chrome history files."; + logger.log(Level.INFO, msg); + addErrorMessage(getName() + ": " + msg); + historyFound = false; return; } @@ -380,9 +385,11 @@ public class Chrome extends Extract { break; } - List> tempList = this.dbConnect(temps, downloadQuery); + List> tempList = null; - if (tempList.isEmpty()) { + if (isChromePreVersion30(temps)) { + tempList = this.dbConnect(temps, downloadQuery); + } else { tempList = this.dbConnect(temps, downloadQueryVersion30); } @@ -495,4 +502,16 @@ public class Chrome extends Extract { public boolean hasBackgroundJobsRunning() { return false; } + + private boolean isChromePreVersion30(String temps) { + String query = "PRAGMA table_info(downloads)"; + List> columns = this.dbConnect(temps, query); + for (HashMap col : columns) { + if (col.get("name").equals("url")) { + return true; + } + } + + return false; + } } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java index b3a5a5fe2b..374b7767fb 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java @@ -40,10 +40,11 @@ abstract public class Extract extends IngestModuleDataSource{ public final Logger logger = Logger.getLogger(this.getClass().getName()); protected final ArrayList errorMessages = new ArrayList<>(); protected String moduleName = ""; + protected boolean historyFound = false; //hide public constructor to prevent from instantiation by ingest module loader Extract() { - + historyFound = true; } /** @@ -103,6 +104,7 @@ abstract public class Extract extends IngestModuleDataSource{ tempdbconnect.closeConnection(); } catch (SQLException ex) { logger.log(Level.SEVERE, "Error while trying to read into a sqlite db." + connectionString, ex); + errorMessages.add(getName() + ": Failed to query database."); return Collections.>emptyList(); } return list; @@ -142,4 +144,8 @@ abstract public class Extract extends IngestModuleDataSource{ public String getName() { return moduleName; } + + public boolean foundHistory() { + return historyFound; + } } \ No newline at end of file diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java index eef5bb92cd..b816b600bf 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java @@ -93,6 +93,7 @@ public class ExtractIE extends Extract { @Override public void process(PipelineContextpipelineContext, Content dataSource, IngestDataSourceWorkerController controller) { + historyFound = true; this.getBookmark(dataSource, controller); this.getCookie(dataSource, controller); this.getRecentDocuments(dataSource, controller); @@ -299,6 +300,14 @@ public class ExtractIE extends Extract { return; } + if (indexFiles.isEmpty()) { + String msg = "No InternetExplorer history files found."; + logger.log(Level.INFO, msg); + addErrorMessage(getName() + ": " + msg); + historyFound = false; + return; + } + String temps; String indexFileName; for (AbstractFile indexFile : indexFiles) { @@ -522,4 +531,4 @@ public class ExtractIE extends Extract { public boolean hasBackgroundJobsRunning() { return false; } -} \ No newline at end of file +} diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java index bf0f009bda..24f1fb6ce9 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java @@ -74,7 +74,8 @@ public class Firefox extends Extract { } @Override - public void process(PipelineContextpipelineContext, Content dataSource, IngestDataSourceWorkerController controller) { + public void process(PipelineContext pipelineContext, Content dataSource, IngestDataSourceWorkerController controller) { + historyFound = true; this.getHistory(dataSource, controller); this.getBookmark(dataSource, controller); this.getDownload(dataSource, controller); @@ -94,6 +95,16 @@ public class Firefox extends Extract { String msg = "Error fetching internet history files for Firefox."; logger.log(Level.WARNING, msg); this.addErrorMessage(this.getName() + ": " + msg); + historyFound = false; + return; + } + + if (historyFiles.isEmpty()) { + String msg = "No FireFox history files found."; + logger.log(Level.INFO, msg); + addErrorMessage(getName() + ": " + msg); + historyFound = false; + return; } int j = 0; @@ -266,29 +277,8 @@ public class Firefox extends Extract { services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE)); } - /** - * Queries for downloads files and adds artifacts - * @param dataSource - * @param controller - */ - private void getDownload(Content dataSource, IngestDataSourceWorkerController controller) { - - FileManager fileManager = currentCase.getServices().getFileManager(); - List downloadsFiles = null; - try { - downloadsFiles = fileManager.findFiles(dataSource, "downloads.sqlite", "Firefox"); - } catch (TskCoreException ex) { - String msg = "Error fetching 'downloads' files for Firefox."; - logger.log(Level.WARNING, msg); - this.addErrorMessage(this.getName() + ": " + msg); - return; - } - - if (downloadsFiles.isEmpty()) { - getDownloadVersion24(dataSource, controller); - return; - } - + + private void getDownloadPreVersion24(Content dataSource, IngestDataSourceWorkerController controller, List downloadsFiles) { int j = 0; for (AbstractFile downloadsFile : downloadsFiles) { if (downloadsFile.getSize() == 0) { @@ -311,10 +301,6 @@ public class Firefox extends Extract { } List> tempList = this.dbConnect(temps, downloadQuery); - if (tempList.isEmpty()) { - getDownloadVersion24(dataSource, controller); - return; - } logger.log(Level.INFO, moduleName + "- Now getting downloads from " + temps + " with " + tempList.size() + "artifacts identified."); for (HashMap result : tempList) { @@ -350,6 +336,28 @@ public class Firefox extends Extract { services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_DOWNLOAD)); } + /** + * Queries for downloads files and adds artifacts + * @param dataSource + * @param controller + */ + private void getDownload(Content dataSource, IngestDataSourceWorkerController controller) { + FileManager fileManager = currentCase.getServices().getFileManager(); + List downloadsFiles = null; + List placesFiles = null; + try { + downloadsFiles = fileManager.findFiles(dataSource, "downloads.sqlite", "Firefox"); + placesFiles = fileManager.findFiles(dataSource, "places.sqlite", "Firefox"); + } catch (TskCoreException ex) { + String msg = "Error fetching 'downloads' files for Firefox."; + logger.log(Level.WARNING, msg); + this.addErrorMessage(this.getName() + ": " + msg); + return; + } + + getDownloadPreVersion24(dataSource, controller, downloadsFiles); + getDownloadVersion24(dataSource, controller, placesFiles); + } @Override public void init(IngestModuleInit initContext) { @@ -374,18 +382,7 @@ public class Firefox extends Extract { return false; } - private void getDownloadVersion24(Content dataSource, IngestDataSourceWorkerController controller) { - FileManager fileManager = currentCase.getServices().getFileManager(); - List downloadsFiles = null; - try { - downloadsFiles = fileManager.findFiles(dataSource, "places.sqlite", "Firefox"); - } catch (TskCoreException ex) { - String msg = "Error fetching 'places' files for Firefox."; - logger.log(Level.WARNING, msg); - this.addErrorMessage(this.getName() + ": " + msg); - return; - } - + private void getDownloadVersion24(Content dataSource, IngestDataSourceWorkerController controller, List downloadsFiles) { int j = 0; for (AbstractFile downloadsFile : downloadsFiles) { if (downloadsFile.getSize() == 0) { @@ -406,8 +403,9 @@ public class Firefox extends Extract { dbFile.delete(); break; } - + List> tempList = this.dbConnect(temps, downloadQueryVersion24); + logger.log(Level.INFO, moduleName + "- Now getting downloads from " + temps + " with " + tempList.size() + "artifacts identified."); for (HashMap result : tempList) { diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java index ae0584fbf0..1e81f63496 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java @@ -23,8 +23,8 @@ package org.sleuthkit.autopsy.recentactivity; import java.io.File; -import java.nio.file.Path; import java.util.ArrayList; +import java.util.List; import java.util.logging.Level; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.Logger; @@ -49,6 +49,7 @@ public final class RAImageIngestModule extends IngestModuleDataSource { private static int messageId = 0; private StringBuilder subCompleted = new StringBuilder(); private ArrayList modules; + private List browserModules; final public static String MODULE_VERSION = "1.0"; //public constructor is required @@ -106,6 +107,17 @@ public final class RAImageIngestModule extends IngestModuleDataSource { } final IngestMessage msg = IngestMessage.createMessage(++messageId, msgLevel, this, "Finished " + dataSource.getName()+ " - " + errorMsgSubject, errorMessage.toString()); services.postMessage(msg); + + StringBuilder historyMsg = new StringBuilder(); + historyMsg.append("

Browser Data on ").append(dataSource.getName()).append(":

    \n"); + for (Extract module : browserModules) { + historyMsg.append("
  • ").append(module.getName()); + historyMsg.append(": ").append((module.foundHistory()) ? " Found." : " Not Found."); + historyMsg.append("
  • "); + } + historyMsg.append("
"); + final IngestMessage inboxMsg = IngestMessage.createMessage(++messageId, MessageType.INFO, this, dataSource.getName() + " - Browser Results", historyMsg.toString()); + services.postMessage(inboxMsg); } @Override @@ -139,18 +151,29 @@ public final class RAImageIngestModule extends IngestModuleDataSource { @Override public void init(IngestModuleInit initContext) { modules = new ArrayList<>(); + browserModules = new ArrayList(); logger.log(Level.INFO, "init() {0}", this.toString()); services = IngestServices.getDefault(); - modules.add(new Chrome()); - modules.add(new Firefox()); - modules.add(new ExtractIE()); + final Extract registry = new ExtractRegistry(); + final Extract iexplore = new ExtractIE(); + final Extract chrome = new Chrome(); + final Extract firefox = new Firefox(); + final Extract SEUQA = new SearchEngineURLQueryAnalyzer(); + + modules.add(chrome); + modules.add(firefox); + modules.add(iexplore); // this needs to run after the web browser modules - modules.add(new SearchEngineURLQueryAnalyzer()); + modules.add(SEUQA); // this runs last because it is slowest - modules.add(new ExtractRegistry()); + modules.add(registry); + browserModules.add(chrome); + browserModules.add(firefox); + browserModules.add(iexplore); + for (Extract module : modules) { try { module.init(initContext); From ffd28bf89921cdb3d231e018c34ac8b41264304b Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Tue, 29 Oct 2013 13:03:53 -0400 Subject: [PATCH 102/179] Throw exception from SevenZip init if it fails --- .../org/sleuthkit/autopsy/sevenzip/SevenZipIngestModule.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/SevenZip/src/org/sleuthkit/autopsy/sevenzip/SevenZipIngestModule.java b/SevenZip/src/org/sleuthkit/autopsy/sevenzip/SevenZipIngestModule.java index bbec25d70a..c3b594c07e 100644 --- a/SevenZip/src/org/sleuthkit/autopsy/sevenzip/SevenZipIngestModule.java +++ b/SevenZip/src/org/sleuthkit/autopsy/sevenzip/SevenZipIngestModule.java @@ -134,7 +134,7 @@ public final class SevenZipIngestModule extends IngestModuleAbstractFile { String details = "Error initializing output dir: " + unpackDirPath + ": " + e.getMessage(); //MessageNotifyUtil.Notify.error(msg, details); services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details)); - return; + throw e; } } @@ -148,7 +148,7 @@ public final class SevenZipIngestModule extends IngestModuleAbstractFile { String details = "Could not initialize 7-ZIP library: " + e.getMessage(); //MessageNotifyUtil.Notify.error(msg, details); services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details)); - return; + throw new RuntimeException(e); } archiveDepthCountTree = new ArchiveDepthCountTree(); From 952b05c909584d40c1c43d7e9a8812e8609e0721 Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Tue, 29 Oct 2013 13:06:39 -0400 Subject: [PATCH 103/179] Update number of matches after all nodes have been loaded. --- .../sleuthkit/autopsy/corecomponents/DataResultPanel.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java index 6835a1e435..47a67b7b2c 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java @@ -625,6 +625,12 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C if (load && containsReal(delta)) { load = false; setupTabs(nme.getNode()); + SwingUtilities.invokeLater(new Runnable() { + @Override + public void run() { + numberMatchLabel.setText(Integer.toString(rootNode.getChildren().getNodesCount())); + } + }); } } From b18cf9a02fcae4dd708559e55658f8141969a46c Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Tue, 29 Oct 2013 13:35:53 -0400 Subject: [PATCH 104/179] Fixed UI hang and updated number of hits column name. --- .../sleuthkit/autopsy/datamodel/KeywordHits.java | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/KeywordHits.java b/Core/src/org/sleuthkit/autopsy/datamodel/KeywordHits.java index e11fe6be2c..9e8674a0b2 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/KeywordHits.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/KeywordHits.java @@ -20,7 +20,7 @@ package org.sleuthkit.autopsy.datamodel; import java.sql.ResultSet; import java.sql.SQLException; -import java.sql.Statement; +import java.util.ArrayList; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; @@ -28,7 +28,6 @@ import java.util.Map; import java.util.Set; import java.util.logging.Level; import org.sleuthkit.autopsy.coreutils.Logger; -import org.openide.nodes.AbstractNode; import org.openide.nodes.ChildFactory; import org.openide.nodes.Children; import org.openide.nodes.Node; @@ -242,7 +241,7 @@ public class KeywordHits implements AutopsyVisitableItem { ss = Sheet.createPropertiesSet(); s.put(ss); } - + ss.put(new NodeProperty("List Name", "List Name", "no description", @@ -323,15 +322,15 @@ public class KeywordHits implements AutopsyVisitableItem { ss = Sheet.createPropertiesSet(); s.put(ss); } - + ss.put(new NodeProperty("List Name", "List Name", "no description", name)); - ss.put(new NodeProperty("Number of Hits", - "Number of Hits", + ss.put(new NodeProperty("Files with Hits", + "Files with Hits", "no description", children.size())); @@ -350,14 +349,16 @@ public class KeywordHits implements AutopsyVisitableItem { @Override protected boolean createKeys(List list) { + List tempList = new ArrayList<>(); for (long l : children) { try { //TODO: bulk artifact gettings - list.add(skCase.getBlackboardArtifact(l)); + tempList.add(skCase.getBlackboardArtifact(l)); } catch (TskException ex) { logger.log(Level.WARNING, "TSK Exception occurred", ex); } } + list.addAll(tempList); return true; } From 5ad40dcbaf48b0222df10d1552310cc4301c692b Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Tue, 29 Oct 2013 13:56:13 -0400 Subject: [PATCH 105/179] Reset content artifact viewer on resetComponent --- .../corecomponents/DataContentViewerArtifact.java | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataContentViewerArtifact.java b/Core/src/org/sleuthkit/autopsy/corecomponents/DataContentViewerArtifact.java index 919d6a85be..8a3fbdae88 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataContentViewerArtifact.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataContentViewerArtifact.java @@ -257,19 +257,15 @@ public class DataContentViewerArtifact extends javax.swing.JPanel implements Dat outputViewPane.setText(""); prevPageButton.setEnabled(false); nextPageButton.setEnabled(false); + currentNode = null; } @Override public void setNode(Node selectedNode) { - // @@@ Remove this when the redundant setNode() calls problem is fixed. if (currentNode == selectedNode) { return; } currentNode = selectedNode; - - // @@@ resetComponent() is currently a no-op due to the redundant setNode() calls problem. - // For now, do the reset here. Remove this when the redundant setNode() calls problem is fixed. - resetComponents(); // Make sure there is a node. Null might be passed to reset the viewer. if (selectedNode == null) { @@ -308,8 +304,7 @@ public class DataContentViewerArtifact extends javax.swing.JPanel implements Dat @Override public void resetComponent() { - // @@@ Restore this when the redundant setNode() calls problem is fixed. - // resetComponents(); + resetComponents(); } @Override From 12dc09a081ba243e25b0c339a0a037209a104d1f Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Tue, 29 Oct 2013 15:45:24 -0400 Subject: [PATCH 106/179] Sort artifact types alphabetically in artifact selection dialog. --- .../autopsy/report/ArtifactSelectionDialog.java | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/Core/src/org/sleuthkit/autopsy/report/ArtifactSelectionDialog.java b/Core/src/org/sleuthkit/autopsy/report/ArtifactSelectionDialog.java index 24ed36f302..dd4e8fc542 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ArtifactSelectionDialog.java +++ b/Core/src/org/sleuthkit/autopsy/report/ArtifactSelectionDialog.java @@ -24,6 +24,8 @@ import java.awt.Toolkit; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; import java.util.EnumMap; import java.util.List; import java.util.Map; @@ -71,6 +73,13 @@ public class ArtifactSelectionDialog extends javax.swing.JDialog { artifacts.removeAll(doNotReport); + Collections.sort(artifacts, new Comparator() { + @Override + public int compare(ARTIFACT_TYPE o1, ARTIFACT_TYPE o2) { + return o1.getDisplayName().compareTo(o2.getDisplayName()); + } + }); + artifactStates = new EnumMap(BlackboardArtifact.ARTIFACT_TYPE.class); for (BlackboardArtifact.ARTIFACT_TYPE type : artifacts) { artifactStates.put(type, Boolean.TRUE); From 2dce63fef5b28b644e78d9884c62cdf45a75c418 Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Tue, 29 Oct 2013 21:50:37 -0400 Subject: [PATCH 107/179] Updated based on 64-bit options --- BUILDING.txt | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/BUILDING.txt b/BUILDING.txt index 7cba4cdc1e..5577925b21 100644 --- a/BUILDING.txt +++ b/BUILDING.txt @@ -11,8 +11,7 @@ correct C libraries. STEPS: 1) Get Java Setup -1a) Download and install 32-bit version of JDK version 1.7 (32-bit is currently -needed even if you have a 64-bit system). +1a) Download and install JDK version 1.7. You can now use 32-bit or 64-bit, but special work is needed to get The Sleuth Kit to compile as 64-bit. So, 32-bit is easier. Autopsy has been used and tested with Oracle JavaSE and the included JavaFX support (http://www.oracle.com/technetwork/java/javase/downloads/index.html). @@ -26,7 +25,8 @@ Note: Netbeans IDE is not required to build and run Autopsy, but it is a recommended IDE to use for development of Autopsy modules. 1d) (optional) If you are going to package Autopsy, then you'll also -need to set JRE_HOME to the root JRE directory. +need to set JRE_HOME_32 to the root 32-bit JRE directory and/or JRE_HOME_64 +to the root 64-bit JRE directory. 1e) (optional) For some Autopsy features to be functional, you need to add java executable to the system PATH. @@ -37,6 +37,9 @@ need to set JRE_HOME to the root JRE directory. later). All you need is the dll file. Note that you will get a launching error if you use libewf 1. - http://sourceforge.net/projects/libewf/ +If you want to build the 64-bit version of The Sleuth Kit, download +our 64-bit version of libewf: +- https://github.com/sleuthkit/libewf_64bit 2b) Set LIBEWF_HOME environment variable to root directory of LIBEWF @@ -97,13 +100,13 @@ BACKGROUND: Here are some notes to shed some light on what is going on during the build process. -- NetBeans uses ant to build Autopsy. The build target locates TSK -(and LIBEWF) based on the environment variables and copies the -needed JAR and library files into the DataModel module in the Autopsy -project (see build-unix.xml and build-windows.xml in the root -directory for details). If you want to use the debug version of -the TSK dll, then edit the copy line in the build-windows.xml file -to copy from the Debug folder. +- The Sleuth Kit Java datamodel JAR file has native libraries +that are copied into it. + +- NetBeans uses ant to build Autopsy. The build target copies the +TSK datamodel JAR file into the project. If you want to use the +debug version of the TSK dll, then there is a different ant target +in TSK to copy the debug versions of the dlls. - On a Windows system, the ant target copies all needed libraries to the autopsy folder. On a Unix system, the ant taget copies only From 64a231a2e35d5108f1cd70f02f29513fc668b1b4 Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Wed, 30 Oct 2013 00:49:42 -0400 Subject: [PATCH 108/179] file names are found by regexp keyword search now, fixed error reporting in keyword search, comment updates, etc. --- .../release/solr/solr/conf/schema.xml | 8 +- .../autopsy/keywordsearch/Ingester.java | 3 + .../KeywordSearchIngestModule.java | 105 +++++++++++------- .../keywordsearch/TermComponentQuery.java | 1 + NEWS.txt | 9 ++ 5 files changed, 83 insertions(+), 43 deletions(-) diff --git a/KeywordSearch/release/solr/solr/conf/schema.xml b/KeywordSearch/release/solr/solr/conf/schema.xml index 203820992f..6a28005313 100644 --- a/KeywordSearch/release/solr/solr/conf/schema.xml +++ b/KeywordSearch/release/solr/solr/conf/schema.xml @@ -510,12 +510,13 @@ - + + @@ -555,7 +556,7 @@ - + + + + - - - - Keyword Search - - - - -

Keyword Search

-

- Autopsy ships a keyword search module, which provides the ingest capability - and also supports a manual text search mode. -

-

The keyword search ingest module extracts text from the files on the image being ingested and adds them to the index that can then be searched.

-

- Autopsy tries its best to extract maximum amount of text from the files being indexed. - First, the indexing will try to extract text from supported file formats, such as pure text file format, MS Office Documents, PDF files, Email files, and many others. - If the file is not supported by the standard text extractor, Autopsy will fallback to string extraction algorithm. - String extraction on unknown file formats or arbitrary binary files can often still extract a good amount of text from the file, often good enough to provide additional clues. - However, string extraction will not be able to extract text strings from binary files that have been encrypted. -

-

- Autopsy ships with some built-in lists that define regular expressions and enable user to search for Phone Numbers, IP addresses, URLs and E-mail addresses. - However, enabling some of these very general lists can produce a very large number of hits, many of them can be false-positives. -

-

- Once files are in the index, they can be searched quickly for specific keywords, regular expressions, - or using keyword search lists that can contain a mixture of keywords and regular expressions. - Search queries can be executed automatically by the ingest during the ingest run, or at the end of the ingest, depending on the current settings and the time it takes to ingest the image. -

-

Search queries can also be executed manually by the user at any time, as long as there are some files already indexed and ready to be searched.

-

- Keyword search module will save the search results regardless whether the search is performed by the ingest process, or manually by the user. - The saved results are available in the Directory Tree in the left hand side panel. -

-

- To see keyword search results in real-time while ingest is running, add keyword lists using the - Keyword Search Configuration Dialog - and select the "Use during ingest" check box. - You can select "Enable sending messages to inbox during ingest" per list, if the hits on that list should be reported in the Inbox, which is recommended for very specific searches. -

-

- See (Ingest) - for more information on ingest in general. -

-

- Once there are files in the index, the Keyword Search Bar - will be available for use to manually search at any time. -

- - - + + + + + Keyword Search + + + + +

Keyword Search

+

+ Autopsy ships a keyword search module, which provides the ingest capability + and also supports a manual text search mode. +

+

The keyword search ingest module extracts text from the files on the image being ingested and adds them to the index that can then be searched.

+

+ Autopsy tries its best to extract maximum amount of text from the files being indexed. + First, the indexing will try to extract text from supported file formats, such as pure text file format, MS Office Documents, PDF files, Email files, and many others. + If the file is not supported by the standard text extractor, Autopsy will fallback to string extraction algorithm. + String extraction on unknown file formats or arbitrary binary files can often still extract a good amount of text from the file, often good enough to provide additional clues. + However, string extraction will not be able to extract text strings from binary files that have been encrypted. +

+

+ Autopsy ships with some built-in lists that define regular expressions and enable user to search for Phone Numbers, IP addresses, URLs and E-mail addresses. + However, enabling some of these very general lists can produce a very large number of hits, many of them can be false-positives. +

+

+ Once files are in the index, they can be searched quickly for specific keywords, regular expressions, + or using keyword search lists that can contain a mixture of keywords and regular expressions. + Search queries can be executed automatically by the ingest during the ingest run, or at the end of the ingest, depending on the current settings and the time it takes to ingest the image. +

+

Search queries can also be executed manually by the user at any time, as long as there are some files already indexed and ready to be searched.

+

+ Keyword search module will save the search results regardless whether the search is performed by the ingest process, or manually by the user. + The saved results are available in the Directory Tree in the left hand side panel. +

+

+ To see keyword search results in real-time while ingest is running, add keyword lists using the + Keyword Search Configuration Dialog + and select the "Use during ingest" check box. + You can select "Send messages to inbox during ingest" per list, if the hits on that list should be reported in the Inbox, which is recommended for very specific searches. +

+

+ See (Ingest) + for more information on ingest in general. +

+

+ Once there are files in the index, the Keyword Search Bar + will be available for use to manually search at any time. +

+ + + diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java index c405e92fda..849aae992e 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java @@ -150,12 +150,9 @@ public class Chrome extends Extract { Collection bbattributes = new ArrayList(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "Recent Activity", ((result.get("url").toString() != null) ? result.get("url").toString() : ""))); - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "Recent Activity", ((result.get("url").toString() != null) ? EscapeUtil.decodeURL(result.get("url").toString()) : ""))); - //TODO Revisit usage of deprecated constructor per TSK-583 - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_LAST_ACCESSED.getTypeID(), "Recent Activity", "Last Visited", ((Long.valueOf(result.get("last_visit_time").toString())) / 10000000))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "Recent Activity", ((Long.valueOf(result.get("last_visit_time").toString())) / 10000000))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_REFERRER.getTypeID(), "Recent Activity", ((result.get("from_visit").toString() != null) ? result.get("from_visit").toString() : ""))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "Recent Activity", ((result.get("title").toString() != null) ? result.get("title").toString() : ""))); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_TITLE.getTypeID(), "Recent Activity", ((result.get("title").toString() != null) ? result.get("title").toString() : ""))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "Recent Activity", "Chrome")); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "Recent Activity", (Util.extractDomain((result.get("url").toString() != null) ? result.get("url").toString() : "")))); this.addArtifact(ARTIFACT_TYPE.TSK_WEB_HISTORY, historyFile, bbattributes); @@ -276,8 +273,8 @@ public class Chrome extends Extract { //TODO Revisit usage of deprecated constructor as per TSK-583 //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_LAST_ACCESSED.getTypeID(), "Recent Activity", "Last Visited", (date / 10000000))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "Recent Activity", url)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "Recent Activity", name)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "Recent Activity", (date / 10000000))); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_TITLE.getTypeID(), "Recent Activity", name)); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED.getTypeID(), "Recent Activity", (date / 10000000))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "Recent Activity", "Chrome")); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "Recent Activity", domain)); bbart.addAttributes(bbattributes); diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java index dc3920ab8e..980c48aba4 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java @@ -157,8 +157,8 @@ public class ExtractIE extends Extract { Collection bbattributes = new ArrayList(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", url)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "RecentActivity", name)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "RecentActivity", datetime)); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_TITLE.getTypeID(), "RecentActivity", name)); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED.getTypeID(), "RecentActivity", datetime)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "Internet Explorer")); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", domain)); this.addArtifact(ARTIFACT_TYPE.TSK_WEB_BOOKMARK, favoritesFile, bbattributes); @@ -507,7 +507,7 @@ public class ExtractIE extends Extract { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "RecentActivity", ftime)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_REFERRER.getTypeID(), "RecentActivity", "")); - // @@@ NOte that other browser modules are adding NAME in hre for the title + // @@@ NOte that other browser modules are adding TITLE in hre for the title bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "Internet Explorer")); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", domain)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_USER_NAME.getTypeID(), "RecentActivity", user)); diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java index b558cc9a26..4a207ac8b6 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java @@ -132,11 +132,9 @@ public class Firefox extends Extract { Collection bbattributes = new ArrayList(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", ((result.get("url").toString() != null) ? result.get("url").toString() : ""))); //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "RecentActivity", ((result.get("url").toString() != null) ? EscapeUtil.decodeURL(result.get("url").toString()) : ""))); - //TODO Revisit usage of deprecated constructor as per TSK-583 - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_LAST_ACCESSED.getTypeID(), "RecentActivity", "Last Visited", (Long.valueOf(result.get("visit_date").toString())))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "RecentActivity", (Long.valueOf(result.get("visit_date").toString())))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_REFERRER.getTypeID(), "RecentActivity", ((result.get("ref").toString() != null) ? result.get("ref").toString() : ""))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "RecentActivity", ((result.get("title").toString() != null) ? result.get("title").toString() : ""))); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_TITLE.getTypeID(), "RecentActivity", ((result.get("title").toString() != null) ? result.get("title").toString() : ""))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "FireFox")); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", (Util.extractDomain((result.get("url").toString() != null) ? result.get("url").toString() : "")))); this.addArtifact(ARTIFACT_TYPE.TSK_WEB_HISTORY, historyFile, bbattributes); @@ -199,8 +197,8 @@ public class Firefox extends Extract { Collection bbattributes = new ArrayList(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", ((result.get("url").toString() != null) ? result.get("url").toString() : ""))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "RecentActivity", ((result.get("title").toString() != null) ? result.get("title").toString() : ""))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "RecentActivity", (Long.valueOf(result.get("dateAdded").toString())))); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_TITLE.getTypeID(), "RecentActivity", ((result.get("title").toString() != null) ? result.get("title").toString() : ""))); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED.getTypeID(), "RecentActivity", (Long.valueOf(result.get("dateAdded").toString())))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "FireFox")); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", (Util.extractDomain((result.get("url").toString() != null) ? result.get("url").toString() : "")))); this.addArtifact(ARTIFACT_TYPE.TSK_WEB_BOOKMARK, bookmarkFile, bbattributes); @@ -355,8 +353,6 @@ public class Firefox extends Extract { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", ((result.get("source").toString() != null) ? result.get("source").toString() : ""))); //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "RecentActivity", ((result.get("source").toString() != null) ? EscapeUtil.decodeURL(result.get("source").toString()) : ""))); - //TODO Revisit usage of deprecated constructor as per TSK-583 - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_LAST_ACCESSED.getTypeID(), "RecentActivity", "Last Visited", (Long.valueOf(result.get("startTime").toString())))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "RecentActivity", (Long.valueOf(result.get("startTime").toString())))); try { From 7ea6b661ad7ae487dbff46b1973ac15fbde0baed Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Fri, 15 Nov 2013 10:48:59 -0500 Subject: [PATCH 163/179] HashDb: Corrected enabling of Known radio buttons and provided for refresh of simple config after advanced config --- .../HashDbCreateDatabaseDialog.form | 1 - .../HashDbCreateDatabaseDialog.java | 1 - .../hashdatabase/HashDbIngestModule.java | 25 +++++++++++++------ .../hashdatabase/HashDbSimpleConfigPanel.java | 6 ++--- 4 files changed, 20 insertions(+), 13 deletions(-) diff --git a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbCreateDatabaseDialog.form b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbCreateDatabaseDialog.form index f788dcff2d..56cbcf2e03 100644 --- a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbCreateDatabaseDialog.form +++ b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbCreateDatabaseDialog.form @@ -153,7 +153,6 @@ - diff --git a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbCreateDatabaseDialog.java b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbCreateDatabaseDialog.java index 3203a3ccd3..cadbced05d 100644 --- a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbCreateDatabaseDialog.java +++ b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbCreateDatabaseDialog.java @@ -132,7 +132,6 @@ final class HashDbCreateDatabaseDialog extends javax.swing.JDialog { buttonGroup1.add(knownRadioButton); org.openide.awt.Mnemonics.setLocalizedText(knownRadioButton, org.openide.util.NbBundle.getMessage(HashDbCreateDatabaseDialog.class, "HashDbCreateDatabaseDialog.knownRadioButton.text")); // NOI18N - knownRadioButton.setEnabled(false); knownRadioButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { knownRadioButtonActionPerformed(evt); diff --git a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java index 6c83416fb3..b1a6a96d67 100644 --- a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java +++ b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java @@ -49,7 +49,8 @@ public class HashDbIngestModule extends IngestModuleAbstractFile { public final static String MODULE_DESCRIPTION = "Identifies known and notables files using supplied hash databases, such as a standard NSRL database."; final public static String MODULE_VERSION = Version.getVersion(); private static final Logger logger = Logger.getLogger(HashDbIngestModule.class.getName()); - private HashDbConfigPanel panel; + private HashDbSimpleConfigPanel simpleConfigPanel; + private HashDbConfigPanel advancedConfigPanel; private IngestServices services; private SleuthkitCase skCase; private static int messageId = 0; @@ -93,7 +94,11 @@ public class HashDbIngestModule extends IngestModuleAbstractFile { @Override public javax.swing.JPanel getSimpleConfiguration(String context) { - return new HashDbSimpleConfigPanel(); + if (simpleConfigPanel == null) { + simpleConfigPanel = new HashDbSimpleConfigPanel(); + } + + return simpleConfigPanel; } @Override @@ -108,18 +113,22 @@ public class HashDbIngestModule extends IngestModuleAbstractFile { @Override public javax.swing.JPanel getAdvancedConfiguration(String context) { - if (panel == null) { - panel = new HashDbConfigPanel(); + if (advancedConfigPanel == null) { + advancedConfigPanel = new HashDbConfigPanel(); } - panel.load(); - return panel; + advancedConfigPanel.load(); + return advancedConfigPanel; } @Override public void saveAdvancedConfiguration() { - if (panel != null) { - panel.store(); + if (advancedConfigPanel != null) { + advancedConfigPanel.store(); + } + + if (simpleConfigPanel != null) { + simpleConfigPanel.refreshComponents(); } } diff --git a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbSimpleConfigPanel.java b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbSimpleConfigPanel.java index f9545bac6d..bd8388fc62 100644 --- a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbSimpleConfigPanel.java +++ b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbSimpleConfigPanel.java @@ -79,9 +79,10 @@ public class HashDbSimpleConfigPanel extends javax.swing.JPanel { } } - private void refreshComponents() { - refreshAlwaysCalcHashesComponents(); + public void refreshComponents() { + knownTableModel.refresh(); knownBadTableModel.refresh(); + refreshAlwaysCalcHashesComponents(); } private void refreshAlwaysCalcHashesComponents() { @@ -164,7 +165,6 @@ public class HashDbSimpleConfigPanel extends javax.swing.JPanel { else { JOptionPane.showMessageDialog(HashDbSimpleConfigPanel.this, "Hash databases must be indexed before they can be used for ingest"); } - refreshComponents(); } } From 9f7cb9098fde4ba1f917a70963dcf346b11fca3a Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Fri, 15 Nov 2013 11:27:37 -0500 Subject: [PATCH 164/179] Changed icons for tags --- .../sleuthkit/autopsy/datamodel/BlackboardArtifactTagNode.java | 2 +- Core/src/org/sleuthkit/autopsy/datamodel/ContentTagNode.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/BlackboardArtifactTagNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/BlackboardArtifactTagNode.java index bb45253a5e..3503776324 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/BlackboardArtifactTagNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/BlackboardArtifactTagNode.java @@ -37,7 +37,7 @@ import org.sleuthkit.datamodel.TskCoreException; * either content or blackboard artifact tag nodes. */ public class BlackboardArtifactTagNode extends DisplayableItemNode { - private static final String ICON_PATH = "org/sleuthkit/autopsy/images/tag-folder-blue-icon-16.png"; + private static final String ICON_PATH = "org/sleuthkit/autopsy/images/green-tag-icon-16.png"; private final BlackboardArtifactTag tag; public BlackboardArtifactTagNode(BlackboardArtifactTag tag) { diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagNode.java index 3817bb3c9a..352b040888 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagNode.java @@ -37,7 +37,7 @@ import org.sleuthkit.datamodel.TskCoreException; * type, then by tag name. */ public class ContentTagNode extends DisplayableItemNode { - private static final String ICON_PATH = "org/sleuthkit/autopsy/images/tag-folder-blue-icon-16.png"; + private static final String ICON_PATH = "org/sleuthkit/autopsy/images/blue-tag-icon-16.png"; private final ContentTag tag; public ContentTagNode(ContentTag tag) { From b7c12a154e8f6132eacc4a3403b1e972a93e5aaa Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Fri, 15 Nov 2013 13:50:12 -0500 Subject: [PATCH 165/179] Updated Tags sub-tree nodes to populate path area of DataResultTopComponent --- .../org/sleuthkit/autopsy/datamodel/ContentTagTypeNode.java | 3 ++- Core/src/org/sleuthkit/autopsy/datamodel/TagNameNode.java | 3 ++- Core/src/org/sleuthkit/autopsy/datamodel/TagsNode.java | 3 ++- .../autopsy/directorytree/BlackboardArtifactTagTypeNode.java | 3 ++- 4 files changed, 8 insertions(+), 4 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagTypeNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagTypeNode.java index 37b14bf976..41dad8ac5b 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagTypeNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/ContentTagTypeNode.java @@ -24,6 +24,7 @@ import org.openide.nodes.ChildFactory; import org.openide.nodes.Children; import org.openide.nodes.Node; import org.openide.nodes.Sheet; +import org.openide.util.lookup.Lookups; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.datamodel.ContentTag; @@ -40,7 +41,7 @@ public class ContentTagTypeNode extends DisplayableItemNode { private static final String ICON_PATH = "org/sleuthkit/autopsy/images/tag-folder-blue-icon-16.png"; public ContentTagTypeNode(TagName tagName) { - super(Children.create(new ContentTagNodeFactory(tagName), true)); + super(Children.create(new ContentTagNodeFactory(tagName), true), Lookups.singleton(tagName.getDisplayName() + " " + DISPLAY_NAME)); long tagsCount = 0; try { diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/TagNameNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/TagNameNode.java index 74b3f195bf..38d140b039 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/TagNameNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/TagNameNode.java @@ -24,6 +24,7 @@ import org.openide.nodes.ChildFactory; import org.openide.nodes.Children; import org.openide.nodes.Node; import org.openide.nodes.Sheet; +import org.openide.util.lookup.Lookups; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.directorytree.BlackboardArtifactTagTypeNode; @@ -41,7 +42,7 @@ public class TagNameNode extends DisplayableItemNode { private final TagName tagName; public TagNameNode(TagName tagName) { - super(Children.create(new TagTypeNodeFactory(tagName), true)); + super(Children.create(new TagTypeNodeFactory(tagName), true), Lookups.singleton(tagName.getDisplayName() + " Tags")); this.tagName = tagName; long tagsCount = 0; diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/TagsNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/TagsNode.java index 635416bc1e..37669308bd 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/TagsNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/TagsNode.java @@ -24,6 +24,7 @@ import org.openide.nodes.ChildFactory; import org.openide.nodes.Children; import org.openide.nodes.Node; import org.openide.nodes.Sheet; +import org.openide.util.lookup.Lookups; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.datamodel.TagName; @@ -40,7 +41,7 @@ public class TagsNode extends DisplayableItemNode { private static final String ICON_PATH = "org/sleuthkit/autopsy/images/tag-folder-blue-icon-16.png"; public TagsNode() { - super(Children.create(new TagNameNodeFactory(), true)); + super(Children.create(new TagNameNodeFactory(), true), Lookups.singleton(DISPLAY_NAME)); super.setName(DISPLAY_NAME); super.setDisplayName(DISPLAY_NAME); this.setIconBaseWithExtension(ICON_PATH); diff --git a/Core/src/org/sleuthkit/autopsy/directorytree/BlackboardArtifactTagTypeNode.java b/Core/src/org/sleuthkit/autopsy/directorytree/BlackboardArtifactTagTypeNode.java index 5cedd82e98..067b68a49c 100755 --- a/Core/src/org/sleuthkit/autopsy/directorytree/BlackboardArtifactTagTypeNode.java +++ b/Core/src/org/sleuthkit/autopsy/directorytree/BlackboardArtifactTagTypeNode.java @@ -24,6 +24,7 @@ import org.openide.nodes.ChildFactory; import org.openide.nodes.Children; import org.openide.nodes.Node; import org.openide.nodes.Sheet; +import org.openide.util.lookup.Lookups; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.datamodel.BlackboardArtifactTagNode; @@ -45,7 +46,7 @@ public class BlackboardArtifactTagTypeNode extends DisplayableItemNode { private static final String ICON_PATH = "org/sleuthkit/autopsy/images/tag-folder-blue-icon-16.png"; public BlackboardArtifactTagTypeNode(TagName tagName) { - super(Children.create(new BlackboardArtifactTagNodeFactory(tagName), true)); + super(Children.create(new BlackboardArtifactTagNodeFactory(tagName), true), Lookups.singleton(tagName.getDisplayName() + " " + DISPLAY_NAME)); long tagsCount = 0; try { From d17d60c331dace326d425e9f01d2ac1ade7b8166 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Fri, 15 Nov 2013 13:56:08 -0500 Subject: [PATCH 166/179] Deleted out of date and unused XSD --- .../autopsy/hashdatabase/HashsetsSchema.xsd | 52 ------------------- 1 file changed, 52 deletions(-) delete mode 100644 HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashsetsSchema.xsd diff --git a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashsetsSchema.xsd b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashsetsSchema.xsd deleted file mode 100644 index c278bbd32b..0000000000 --- a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashsetsSchema.xsd +++ /dev/null @@ -1,52 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - From 606f1ae4d5c8b1246b5505f1d96e1bafcce448d3 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Fri, 15 Nov 2013 15:13:01 -0500 Subject: [PATCH 167/179] Removed default case name comment when adding to a hash db --- .../autopsy/hashdatabase/AddContentToHashDbAction.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/AddContentToHashDbAction.java b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/AddContentToHashDbAction.java index 9070858c6b..62a5ddd969 100755 --- a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/AddContentToHashDbAction.java +++ b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/AddContentToHashDbAction.java @@ -147,7 +147,7 @@ public class AddContentToHashDbAction extends AbstractAction implements Presente String md5Hash = file.getMd5Hash(); if (null != md5Hash) { try { - hashSet.add(file, Case.getCurrentCase().getName()); + hashSet.add(file); } catch (TskCoreException ex) { Logger.getLogger(AddContentToHashDbAction.class.getName()).log(Level.SEVERE, "Error adding to hash database", ex); From 630f165c297f63d8419e53a45f1cd6d374400fad Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Fri, 15 Nov 2013 17:28:17 -0500 Subject: [PATCH 168/179] Remove adding hashes of virtual directories, other small fixes --- .../casemodule/services/TagsManager.java | 24 ++++++++++--------- .../coreutils/ContextMenuExtensionPoint.java | 1 + .../datamodel/VirtualDirectoryNode.java | 2 -- .../ExplorerNodeActionVisitor.java | 1 - .../AddContentToHashDbAction.java | 4 ++-- 5 files changed, 16 insertions(+), 16 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java b/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java index 4f125ac5fe..67788f8300 100755 --- a/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java @@ -165,7 +165,7 @@ public class TagsManager implements Closeable { * @throws TskCoreException */ public ContentTag addContentTag(Content content, TagName tagName) throws TskCoreException { - return addContentTag(content, tagName, "", 0, content.getSize() - 1); + return addContentTag(content, tagName, "", -1, -1); } /** @@ -177,7 +177,7 @@ public class TagsManager implements Closeable { * @throws TskCoreException */ public ContentTag addContentTag(Content content, TagName tagName, String comment) throws TskCoreException { - return addContentTag(content, tagName, comment, 0, content.getSize() - 1); + return addContentTag(content, tagName, comment, -1, -1); } /** @@ -196,16 +196,18 @@ public class TagsManager implements Closeable { getExistingTagNames(); } - if (beginByteOffset < 0 || beginByteOffset > content.getSize() - 1) { - throw new IllegalArgumentException("beginByteOffset = " + beginByteOffset + " out of content size range (0 - " + (content.getSize() - 1) + ")"); - } + if (beginByteOffset >= 0 && endByteOffset >= 1) { + if (beginByteOffset > content.getSize() - 1) { + throw new IllegalArgumentException("beginByteOffset = " + beginByteOffset + " out of content size range (0 - " + (content.getSize() - 1) + ")"); + } - if (endByteOffset < 0 || endByteOffset > content.getSize() - 1) { - throw new IllegalArgumentException("endByteOffset = " + endByteOffset + " out of content size range (0 - " + (content.getSize() - 1) + ")"); - } - - if (endByteOffset < beginByteOffset) { - throw new IllegalArgumentException("endByteOffset < beginByteOffset"); + if (endByteOffset > content.getSize() - 1) { + throw new IllegalArgumentException("endByteOffset = " + endByteOffset + " out of content size range (0 - " + (content.getSize() - 1) + ")"); + } + + if (endByteOffset < beginByteOffset) { + throw new IllegalArgumentException("endByteOffset < beginByteOffset"); + } } return tskCase.addContentTag(content, tagName, comment, beginByteOffset, endByteOffset); diff --git a/Core/src/org/sleuthkit/autopsy/coreutils/ContextMenuExtensionPoint.java b/Core/src/org/sleuthkit/autopsy/coreutils/ContextMenuExtensionPoint.java index 868a2bafd2..10cb36ac33 100755 --- a/Core/src/org/sleuthkit/autopsy/coreutils/ContextMenuExtensionPoint.java +++ b/Core/src/org/sleuthkit/autopsy/coreutils/ContextMenuExtensionPoint.java @@ -42,6 +42,7 @@ public class ContextMenuExtensionPoint { if (!providerActions.isEmpty()) { actions.add(null); // Separator to set off this provider's actions. actions.addAll(provider.getActions()); + actions.add(null); // Separator to set off this provider's actions. } } return actions; diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/VirtualDirectoryNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/VirtualDirectoryNode.java index 6a9f5c7645..d1a66fd25c 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/VirtualDirectoryNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/VirtualDirectoryNode.java @@ -28,7 +28,6 @@ import org.sleuthkit.autopsy.coreutils.ContextMenuExtensionPoint; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.directorytree.ExtractAction; import org.sleuthkit.autopsy.directorytree.NewWindowViewAction; -import org.sleuthkit.autopsy.actions.AddContentTagAction; import org.sleuthkit.datamodel.VirtualDirectory; import org.sleuthkit.datamodel.TskData; @@ -82,7 +81,6 @@ public class VirtualDirectoryNode extends AbstractAbstractFileNode visit(final VirtualDirectory d) { List actions = new ArrayList<>(); actions.add(ExtractAction.getInstance()); - actions.add(AddContentTagAction.getInstance()); actions.addAll(ContextMenuExtensionPoint.getActions()); return actions; } diff --git a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/AddContentToHashDbAction.java b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/AddContentToHashDbAction.java index 3387128e28..bef2c3e889 100755 --- a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/AddContentToHashDbAction.java +++ b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/AddContentToHashDbAction.java @@ -104,9 +104,9 @@ public class AddContentToHashDbAction extends AbstractAction implements Presente // Get the current set of updateable hash databases and add each // one as a menu item. - List hashDatabases = HashDbManager.getInstance().getKnownBadHashSets(); + List hashDatabases = HashDbManager.getInstance().getUpdateableHashSets(); if (!hashDatabases.isEmpty()) { - for (final HashDb database : HashDbManager.getInstance().getUpdateableHashSets()) { + for (final HashDb database : hashDatabases) { JMenuItem databaseItem = add(database.getHashSetName()); databaseItem.addActionListener(new ActionListener() { @Override From 4d0788dc222b2356630a5d6516e95002abe58146 Mon Sep 17 00:00:00 2001 From: "Samuel H. Kenyon" Date: Fri, 15 Nov 2013 22:50:16 -0500 Subject: [PATCH 169/179] Add In Hashset column (no data yet). --- .../autopsy/datamodel/AbstractAbstractFileNode.java | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/AbstractAbstractFileNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/AbstractAbstractFileNode.java index eed0c43ea3..940f5b25ee 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/AbstractAbstractFileNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/AbstractAbstractFileNode.java @@ -146,6 +146,12 @@ public abstract class AbstractAbstractFileNode extends A return "Known"; } }, + HASHSETS { + @Override + public String toString() { + return "In Hashsets"; + } + }, MD5HASH { @Override public String toString() { @@ -188,6 +194,7 @@ public abstract class AbstractAbstractFileNode extends A map.put(AbstractFilePropertyType.TYPE_DIR.toString(), content.getDirType().getLabel()); map.put(AbstractFilePropertyType.TYPE_META.toString(), content.getMetaType().toString()); map.put(AbstractFilePropertyType.KNOWN.toString(), content.getKnown().getName()); + map.put(AbstractFilePropertyType.HASHSETS.toString(), ""); map.put(AbstractFilePropertyType.MD5HASH.toString(), content.getMd5Hash() == null ? "" : content.getMd5Hash()); } From e585d612e797c2032e491df98da4f689efd64849 Mon Sep 17 00:00:00 2001 From: "Samuel H. Kenyon" Date: Fri, 15 Nov 2013 23:30:12 -0500 Subject: [PATCH 170/179] Populate the In Hashsets column (empty string if there were no hits for that file). --- .../datamodel/AbstractAbstractFileNode.java | 2 +- .../autopsy/datamodel/HashsetHits.java | 37 +++++++++++++++++++ 2 files changed, 38 insertions(+), 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/AbstractAbstractFileNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/AbstractAbstractFileNode.java index 940f5b25ee..eb9001575c 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/AbstractAbstractFileNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/AbstractAbstractFileNode.java @@ -194,7 +194,7 @@ public abstract class AbstractAbstractFileNode extends A map.put(AbstractFilePropertyType.TYPE_DIR.toString(), content.getDirType().getLabel()); map.put(AbstractFilePropertyType.TYPE_META.toString(), content.getMetaType().toString()); map.put(AbstractFilePropertyType.KNOWN.toString(), content.getKnown().getName()); - map.put(AbstractFilePropertyType.HASHSETS.toString(), ""); + map.put(AbstractFilePropertyType.HASHSETS.toString(), HashsetHits.getList(content.getSleuthkitCase(), content.getId())); map.put(AbstractFilePropertyType.MD5HASH.toString(), content.getMd5Hash() == null ? "" : content.getMd5Hash()); } diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/HashsetHits.java b/Core/src/org/sleuthkit/autopsy/datamodel/HashsetHits.java index ca15f6d2e7..8ed1bcc750 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/HashsetHits.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/HashsetHits.java @@ -94,6 +94,43 @@ public class HashsetHits implements AutopsyVisitableItem { } } + static public String getList(SleuthkitCase skCase, long objId) { + ResultSet rs = null; + String strList = ""; + + try { + int setNameId = BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(); + int artId = BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID(); + String query = "SELECT value_text,blackboard_attributes.artifact_id,attribute_type_id " + + "FROM blackboard_attributes,blackboard_artifacts WHERE " + + "attribute_type_id=" + setNameId + + " AND blackboard_attributes.artifact_id=blackboard_artifacts.artifact_id" + + " AND blackboard_artifacts.artifact_type_id=" + artId + + " AND blackboard_artifacts.obj_id=" + objId; + rs = skCase.runQuery(query); + int i = 0; + while (rs.next()) { + if (i++ > 0) { + strList += ", "; + } + strList += rs.getString("value_text"); + } + + } catch (SQLException ex) { + logger.log(Level.WARNING, "SQL Exception occurred: ", ex); + } + finally { + if (rs != null) { + try { + skCase.closeRunQuery(rs); + } catch (SQLException ex) { + logger.log(Level.WARNING, "Error closing result set after getting hashset hits", ex); + } + } + } + return strList; + } + @Override public T accept(AutopsyItemVisitor v) { return v.visit(this); From bed25bad9c8539a4cb1239a653a5e2d59a7765bd Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Sun, 17 Nov 2013 21:45:17 -0500 Subject: [PATCH 171/179] updated line endings --- thunderbirdparser/manifest.mf | 14 +- .../nbproject/project.properties | 12 +- update_versions.py | 1878 ++++++++--------- 3 files changed, 952 insertions(+), 952 deletions(-) diff --git a/thunderbirdparser/manifest.mf b/thunderbirdparser/manifest.mf index c16a2f4c01..fc34c0e90a 100644 --- a/thunderbirdparser/manifest.mf +++ b/thunderbirdparser/manifest.mf @@ -1,7 +1,7 @@ -Manifest-Version: 1.0 -AutoUpdate-Show-In-Client: true -OpenIDE-Module: org.sleuthkit.autopsy.thunderbirdparser/3 -OpenIDE-Module-Implementation-Version: 9 -OpenIDE-Module-Layer: org/sleuthkit/autopsy/thunderbirdparser/layer.xml -OpenIDE-Module-Localizing-Bundle: org/sleuthkit/autopsy/thunderbirdparser/Bundle.properties - +Manifest-Version: 1.0 +AutoUpdate-Show-In-Client: true +OpenIDE-Module: org.sleuthkit.autopsy.thunderbirdparser/3 +OpenIDE-Module-Implementation-Version: 9 +OpenIDE-Module-Layer: org/sleuthkit/autopsy/thunderbirdparser/layer.xml +OpenIDE-Module-Localizing-Bundle: org/sleuthkit/autopsy/thunderbirdparser/Bundle.properties + diff --git a/thunderbirdparser/nbproject/project.properties b/thunderbirdparser/nbproject/project.properties index 6a243df466..0735c621fa 100644 --- a/thunderbirdparser/nbproject/project.properties +++ b/thunderbirdparser/nbproject/project.properties @@ -1,6 +1,6 @@ -javac.source=1.7 -javac.compilerargs=-Xlint -Xlint:-serial -license.file=../LICENSE-2.0.txt -nbm.homepage=http://www.sleuthkit.org/autopsy/ -nbm.needs.restart=true -spec.version.base=1.2 +javac.source=1.7 +javac.compilerargs=-Xlint -Xlint:-serial +license.file=../LICENSE-2.0.txt +nbm.homepage=http://www.sleuthkit.org/autopsy/ +nbm.needs.restart=true +spec.version.base=1.2 diff --git a/update_versions.py b/update_versions.py index 2883021c9f..fa228d0cca 100644 --- a/update_versions.py +++ b/update_versions.py @@ -1,939 +1,939 @@ -# -# Autopsy Forensic Browser -# -# Copyright 2012-2013 Basis Technology Corp. -# Contact: carrier sleuthkit org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -####################### -# This script exists to help us determine update the library -# versions appropriately. See this page for version details. -# -# http://wiki.sleuthkit.org/index.php?title=Autopsy_3_Module_Versions -# -# The basic idea is that this script uses javadoc/jdiff to -# compare the current state of the source code to the last -# tag and identifies if APIs were removed, added, etc. -# -# When run from the Autopsy build script, this script will: -# - Clone Autopsy and checkout to the previous release tag -# as found in the NEWS.txt file -# - Auto-discover all modules and packages -# - Run jdiff, comparing the current and previous modules -# - Use jdiff's output to determine if each module -# a) has no changes -# b) has backwards compatible changes -# c) has backwards incompatible changes -# - Based off it's compatibility, updates each module's -# a) Major version -# b) Specification version -# c) Implementation version -# - Updates the dependencies on each module depending on the -# updated version numbers -# -# Optionally, when run from the command line, one can provide the -# desired tag to compare the current version to, the directory for -# the current version of Autopsy, and whether to automatically -# update the version numbers and dependencies. -# ------------------------------------------------------------ - -import errno -import os -import shutil -import stat -import subprocess -import sys -import traceback -from os import remove, close -from shutil import move -from tempfile import mkstemp -from xml.dom.minidom import parse, parseString - -# Jdiff return codes. Described in more detail further on -NO_CHANGES = 100 -COMPATIBLE = 101 -NON_COMPATIBLE = 102 -ERROR = 1 - -# An Autopsy module object -class Module: - # Initialize it with a name, return code, and version numbers - def __init__(self, name=None, ret=None, versions=None): - self.name = name - self.ret = ret - self.versions = versions - # As a string, the module should be it's name - def __str__(self): - return self.name - def __repr__(self): - return self.name - # When compared to another module, the two are equal if the names are the same - def __cmp__(self, other): - if isinstance(other, Module): - if self.name == other.name: - return 0 - elif self.name < other.name: - return -1 - else: - return 1 - return 1 - def __eq__(self, other): - if isinstance(other, Module): - if self.name == other.name: - return True - return False - def set_name(self, name): - self.name = name - def set_ret(self, ret): - self.ret = ret - def set_versions(self, versions): - self.versions = versions - def spec(self): - return self.versions[0] - def impl(self): - return self.versions[1] - def release(self): - return self.versions[2] - -# Representation of the Specification version number -class Spec: - # Initialize specification number, where num is a string like x.y - def __init__(self, num): - self.third = None - spec_nums = num.split(".") - if len(spec_nums) == 3: - final = spec_nums[2] - self.third = int(final) - - l, r = spec_nums[0], spec_nums[1] - - self.left = int(l) - self.right = int(r) - - def __str__(self): - return self.get() - def __cmp__(self, other): - if isinstance(other, Spec): - if self.left == other.left: - if self.right == other.right: - return 0 - if self.right < other.right: - return -1 - return 1 - if self.left < other.left: - return -1 - return 1 - elif isinstance(other, str): - l, r = other.split(".") - if self.left == int(l): - if self.right == int(r): - return 0 - if self.right < int(r): - return -1 - return 1 - if self.left < int(l): - return -1 - return 1 - return -1 - - def overflow(self): - return str(self.left + 1) + ".0" - def increment(self): - return str(self.left) + "." + str(self.right + 1) - def get(self): - spec_str = str(self.left) + "." + str(self.right) - if self.third is not None: - spec_str += "." + str(self.final) - return spec_str - def set(self, num): - if isinstance(num, str): - l, r = num.split(".") - self.left = int(l) - self.right = int(r) - elif isinstance(num, Spec): - self.left = num.left - self.right = num.right - return self - -# ================================ # -# Core Functions # -# ================================ # - -# Given a list of modules and the names for each version, compare -# the generated jdiff XML for each module and output the jdiff -# JavaDocs. -# -# modules: the list of all modules both versions have in common -# apiname_tag: the api name of the previous version, most likely the tag -# apiname_cur: the api name of the current version, most likely "Current" -# -# returns the exit code from the modified jdiff.jar -# return code 1 = error in jdiff -# return code 100 = no changes -# return code 101 = compatible changes -# return code 102 = incompatible changes -def compare_xml(module, apiname_tag, apiname_cur): - global docdir - make_dir(docdir) - null_file = fix_path(os.path.abspath("./thirdparty/jdiff/v-custom/lib/Null.java")) - jdiff = fix_path(os.path.abspath("./thirdparty/jdiff/v-custom/jdiff.jar")) - oldapi = fix_path("build/jdiff-xml/" + apiname_tag + "-" + module.name) - newapi = fix_path("build/jdiff-xml/" + apiname_cur + "-" + module.name) - docs = fix_path(docdir + "/" + module.name) - # Comments are strange. They look for a file with additional user comments in a - # directory like docs/user_comments_for_xyz. The problem being that xyz is the - # path to the new/old api. So xyz turns into multiple directories for us. - # i.e. user_comments_for_build/jdiff-xml/[tag name]-[module name]_to_build/jdiff-xml - comments = fix_path(docs + "/user_comments_for_build") - jdiff_com = fix_path(comments + "/jdiff-xml") - tag_comments = fix_path(jdiff_com + "/" + apiname_tag + "-" + module.name + "_to_build") - jdiff_tag_com = fix_path(tag_comments + "/jdiff-xml") - - if not os.path.exists(jdiff): - print("JDIFF doesn't exist.") - - make_dir(docs) - make_dir(comments) - make_dir(jdiff_com) - make_dir(tag_comments) - make_dir(jdiff_tag_com) - make_dir("jdiff-logs") - log = open("jdiff-logs/COMPARE-" + module.name + ".log", "w") - cmd = ["javadoc", - "-doclet", "jdiff.JDiff", - "-docletpath", jdiff, - "-d", docs, - "-oldapi", oldapi, - "-newapi", newapi, - "-script", - null_file] - jdiff = subprocess.Popen(cmd, stdout=log, stderr=log) - jdiff.wait() - log.close() - code = jdiff.returncode - print("Compared XML for " + module.name) - if code == NO_CHANGES: - print(" No API changes") - elif code == COMPATIBLE: - print(" API Changes are backwards compatible") - elif code == NON_COMPATIBLE: - print(" API Changes are not backwards compatible") - else: - print(" *Error in XML, most likely an empty module") - sys.stdout.flush() - return code - -# Generate the jdiff xml for the given module -# path: path to the autopsy source -# module: Module object -# name: api name for jdiff -def gen_xml(path, modules, name): - for module in modules: - # If its the regression test, the source is in the "test" dir - if module.name == "Testing": - src = os.path.join(path, module.name, "test", "qa-functional", "src") - else: - src = os.path.join(path, module.name, "src") - # xerces = os.path.abspath("./lib/xerces.jar") - xml_out = fix_path(os.path.abspath("./build/jdiff-xml/" + name + "-" + module.name)) - jdiff = fix_path(os.path.abspath("./thirdparty/jdiff/v-custom/jdiff.jar")) - make_dir("build/jdiff-xml") - make_dir("jdiff-logs") - log = open("jdiff-logs/GEN_XML-" + name + "-" + module.name + ".log", "w") - cmd = ["javadoc", - "-doclet", "jdiff.JDiff", - "-docletpath", jdiff, # ;" + xerces, <-- previous problems required this - "-apiname", xml_out, # leaving it in just in case it's needed once again - "-sourcepath", fix_path(src)] - cmd = cmd + get_packages(src) - jdiff = subprocess.Popen(cmd, stdout=log, stderr=log) - jdiff.wait() - log.close() - print("Generated XML for " + name + " " + module.name) - sys.stdout.flush() - -# Find all the modules in the given path -def find_modules(path): - modules = [] - # Step into each folder in the given path and - # see if it has manifest.mf - if so, it's a module - for dir in os.listdir(path): - directory = os.path.join(path, dir) - if os.path.isdir(directory): - for file in os.listdir(directory): - if file == "manifest.mf": - modules.append(Module(dir, None, None)) - return modules - -# Detects the differences between the source and tag modules -def module_diff(source_modules, tag_modules): - added_modules = [x for x in source_modules if x not in tag_modules] - removed_modules = [x for x in tag_modules if x not in source_modules] - similar_modules = [x for x in source_modules if x in tag_modules] - - added_modules = (added_modules if added_modules else []) - removed_modules = (removed_modules if removed_modules else []) - similar_modules = (similar_modules if similar_modules else []) - return similar_modules, added_modules, removed_modules - -# Reads the previous tag from NEWS.txt -def get_tag(sourcepath): - news = open(sourcepath + "/NEWS.txt", "r") - second_instance = False - for line in news: - if "----------------" in line: - if second_instance: - ver = line.split("VERSION ")[1] - ver = ver.split(" -")[0] - return ("autopsy-" + ver).strip() - else: - second_instance = True - continue - news.close() - - -# ========================================== # -# Dependency Functions # -# ========================================== # - -# Write a new XML file, copying all the lines from projectxml -# and replacing the specification version for the code-name-base base -# with the supplied specification version spec -def set_dep_spec(projectxml, base, spec): - print(" Updating Specification version..") - orig = open(projectxml, "r") - f, abs_path = mkstemp() - new_file = open(abs_path, "w") - found_base = False - spacing = " " - sopen = "" - sclose = "\n" - for line in orig: - if base in line: - found_base = True - if found_base and sopen in line: - update = spacing + sopen + str(spec) + sclose - new_file.write(update) - else: - new_file.write(line) - new_file.close() - close(f) - orig.close() - remove(projectxml) - move(abs_path, projectxml) - -# Write a new XML file, copying all the lines from projectxml -# and replacing the release version for the code-name-base base -# with the supplied release version -def set_dep_release(projectxml, base, release): - print(" Updating Release version..") - orig = open(projectxml, "r") - f, abs_path = mkstemp() - new_file = open(abs_path, "w") - found_base = False - spacing = " " - ropen = "" - rclose = "\n" - for line in orig: - if base in line: - found_base = True - if found_base and ropen in line: - update = spacing + ropen + str(release) + rclose - new_file.write(update) - else: - new_file.write(line) - new_file.close() - close(f) - orig.close() - remove(projectxml) - move(abs_path, projectxml) - -# Return the dependency versions in the XML dependency node -def get_dep_versions(dep): - run_dependency = dep.getElementsByTagName("run-dependency")[0] - release_version = run_dependency.getElementsByTagName("release-version") - if release_version: - release_version = getTagText(release_version[0].childNodes) - specification_version = run_dependency.getElementsByTagName("specification-version") - if specification_version: - specification_version = getTagText(specification_version[0].childNodes) - return int(release_version), Spec(specification_version) - -# Given a code-name-base, see if it corresponds with any of our modules -def get_module_from_base(modules, code_name_base): - for module in modules: - if "org.sleuthkit.autopsy." + module.name.lower() == code_name_base: - return module - return None # If it didn't match one of our modules - -# Check the text between two XML tags -def getTagText(nodelist): - for node in nodelist: - if node.nodeType == node.TEXT_NODE: - return node.data - -# Check the projectxml for a dependency on any module in modules -def check_for_dependencies(projectxml, modules): - dom = parse(projectxml) - dep_list = dom.getElementsByTagName("dependency") - for dep in dep_list: - code_name_base = dep.getElementsByTagName("code-name-base")[0] - code_name_base = getTagText(code_name_base.childNodes) - module = get_module_from_base(modules, code_name_base) - if module: - print(" Found dependency on " + module.name) - release, spec = get_dep_versions(dep) - if release != module.release() and module.release() is not None: - set_dep_release(projectxml, code_name_base, module.release()) - else: print(" Release version is correct") - if spec != module.spec() and module.spec() is not None: - set_dep_spec(projectxml, code_name_base, module.spec()) - else: print(" Specification version is correct") - -# Given the module and the source directory, return -# the paths to the manifest and project properties files -def get_dependency_file(module, source): - projectxml = os.path.join(source, module.name, "nbproject", "project.xml") - if os.path.isfile(projectxml): - return projectxml - -# Verify/Update the dependencies for each module, basing the dependency -# version number off the versions in each module -def update_dependencies(modules, source): - for module in modules: - print("Checking the dependencies for " + module.name + "...") - projectxml = get_dependency_file(module, source) - if projectxml == None: - print(" Error finding project xml file") - else: - other = [x for x in modules] - check_for_dependencies(projectxml, other) - sys.stdout.flush() - -# ======================================== # -# Versioning Functions # -# ======================================== # - -# Return the specification version in the given project.properties/manifest.mf file -def get_specification(project, manifest): - try: - # Try to find it in the project file - # it will be there if impl version is set to append automatically - f = open(project, 'r') - for line in f: - if "spec.version.base" in line: - return Spec(line.split("=")[1].strip()) - f.close() - # If not found there, try the manifest file - f = open(manifest, 'r') - for line in f: - if "OpenIDE-Module-Specification-Version:" in line: - return Spec(line.split(": ")[1].strip()) - except Exception as e: - print("Error parsing Specification version for") - print(project) - print(e) - -# Set the specification version in the given project properties file -# but if it can't be found there, set it in the manifest file -def set_specification(project, manifest, num): - try: - # First try the project file - f = open(project, 'r') - for line in f: - if "spec.version.base" in line: - f.close() - replace(project, line, "spec.version.base=" + str(num) + "\n") - return - f.close() - # If it's not there, try the manifest file - f = open(manifest, 'r') - for line in f: - if "OpenIDE-Module-Specification-Version:" in line: - f.close() - replace(manifest, line, "OpenIDE-Module-Specification-Version: " + str(num) + "\n") - return - # Otherwise we're out of luck - print(" Error finding the Specification version to update") - print(" " + manifest) - f.close() - except: - print(" Error incrementing Specification version for") - print(" " + project) - -# Return the implementation version in the given manifest.mf file -def get_implementation(manifest): - try: - f = open(manifest, 'r') - for line in f: - if "OpenIDE-Module-Implementation-Version" in line: - return int(line.split(": ")[1].strip()) - f.close() - except: - print("Error parsing Implementation version for") - print(manifest) - -# Set the implementation version in the given manifest file -def set_implementation(manifest, num): - try: - f = open(manifest, 'r') - for line in f: - if "OpenIDE-Module-Implementation-Version" in line: - f.close() - replace(manifest, line, "OpenIDE-Module-Implementation-Version: " + str(num) + "\n") - return - # If it isn't there, add it - f.close() - write_implementation(manifest, num) - except: - print(" Error incrementing Implementation version for") - print(" " + manifest) - -# Rewrite the manifest file to include the implementation version -def write_implementation(manifest, num): - f = open(manifest, "r") - contents = f.read() - contents = contents[:-2] + "OpenIDE-Module-Implementation-Version: " + str(num) + "\n\n" - f.close() - f = open(manifest, "w") - f.write(contents) - f.close() - -# Return the release version in the given manifest.mf file -def get_release(manifest): - try: - f = open(manifest, 'r') - for line in f: - if "OpenIDE-Module:" in line: - return int(line.split("/")[1].strip()) - f.close() - except: - #print("Error parsing Release version for") - #print(manifest) - return 0 - -# Set the release version in the given manifest file -def set_release(manifest, num): - try: - f = open(manifest, 'r') - for line in f: - if "OpenIDE-Module:" in line: - f.close() - index = line.index('/') - len(line) + 1 - newline = line[:index] + str(num) - replace(manifest, line, newline + "\n") - return - print(" Error finding the release version to update") - print(" " + manifest) - f.close() - except: - print(" Error incrementing release version for") - print(" " + manifest) - -# Given the module and the source directory, return -# the paths to the manifest and project properties files -def get_version_files(module, source): - manifest = os.path.join(source, module.name, "manifest.mf") - project = os.path.join(source, module.name, "nbproject", "project.properties") - if os.path.isfile(manifest) and os.path.isfile(project): - return manifest, project - -# Returns a the current version numbers for the module in source -def get_versions(module, source): - manifest, project = get_version_files(module, source) - if manifest == None or project == None: - print(" Error finding manifeset and project properties files") - return - spec = get_specification(project, manifest) - impl = get_implementation(manifest) - release = get_release(manifest) - return [spec, impl, release] - -# Update the version numbers for every module in modules -def update_versions(modules, source): - for module in modules: - versions = module.versions - manifest, project = get_version_files(module, source) - print("Updating " + module.name + "...") - if manifest == None or project == None: - print(" Error finding manifeset and project properties files") - return - if module.ret == COMPATIBLE: - versions = [versions[0].set(versions[0].increment()), versions[1] + 1, versions[2]] - set_specification(project, manifest, versions[0]) - set_implementation(manifest, versions[1]) - module.set_versions(versions) - elif module.ret == NON_COMPATIBLE: - versions = [versions[0].set(versions[0].overflow()), versions[1] + 1, versions[2] + 1] - set_specification(project, manifest, versions[0]) - set_implementation(manifest, versions[1]) - set_release(manifest, versions[2]) - module.set_versions(versions) - elif module.ret == NO_CHANGES: - versions = [versions[0], versions[1] + 1, versions[2]] - set_implementation(manifest, versions[1]) - module.set_versions(versions) - elif module.ret == None: - versions = [Spec("1.0"), 1, 1] - set_specification(project, manifest, versions[0]) - set_implementation(manifest, versions[1]) - set_release(manifest, versions[2]) - module.set_versions(versions) - sys.stdout.flush() - -# Given a list of the added modules, remove the modules -# which have the correct 'new module default' version number -def remove_correct_added(modules): - correct = [x for x in modules] - for module in modules: - if module.spec() == "1.0" or module.spec() == "0.0": - if module.impl() == 1: - if module.release() == 1 or module.release() == 0: - correct.remove(module) - return correct - -# ==================================== # -# Helper Functions # -# ==================================== # - -# Replace pattern with subst in given file -def replace(file, pattern, subst): - #Create temp file - fh, abs_path = mkstemp() - new_file = open(abs_path,'w') - old_file = open(file) - for line in old_file: - new_file.write(line.replace(pattern, subst)) - #close temp file - new_file.close() - close(fh) - old_file.close() - #Remove original file - remove(file) - #Move new file - move(abs_path, file) - -# Given a list of modules print the version numbers that need changing -def print_version_updates(modules): - f = open("gen_version.txt", "a") - for module in modules: - versions = module.versions - if module.ret == COMPATIBLE: - output = (module.name + ":\n") - output += ("\tSpecification:\t" + str(versions[0]) + "\t->\t" + str(versions[0].increment()) + "\n") - output += ("\tImplementation:\t" + str(versions[1]) + "\t->\t" + str(versions[1] + 1) + "\n") - output += ("\tRelease:\tNo Change.\n") - output += ("\n") - print(output) - sys.stdout.flush() - f.write(output) - elif module.ret == NON_COMPATIBLE: - output = (module.name + ":\n") - output += ("\tSpecification:\t" + str(versions[0]) + "\t->\t" + str(versions[0].overflow()) + "\n") - output += ("\tImplementation:\t" + str(versions[1]) + "\t->\t" + str(versions[1] + 1) + "\n") - output += ("\tRelease:\t" + str(versions[2]) + "\t->\t" + str(versions[2] + 1) + "\n") - output += ("\n") - print(output) - sys.stdout.flush() - f.write(output) - elif module.ret == ERROR: - output = (module.name + ":\n") - output += ("\t*Unable to detect necessary changes\n") - output += ("\tSpecification:\t" + str(versions[0]) + "\n") - output += ("\tImplementation:\t" + str(versions[1]) + "\n") - output += ("\tRelease:\t\t" + str(versions[2]) + "\n") - output += ("\n") - print(output) - f.write(output) - sys.stdout.flush() - elif module.ret == NO_CHANGES: - output = (module.name + ":\n") - if versions[1] is None: - output += ("\tImplementation: None\n") - else: - output += ("\tImplementation:\t" + str(versions[1]) + "\t->\t" + str(versions[1] + 1) + "\n") - output += ("\n") - print(output) - sys.stdout.flush() - f.write(output) - elif module.ret is None: - output = ("Added " + module.name + ":\n") - if module.spec() != "1.0" and module.spec() != "0.0": - output += ("\tSpecification:\t" + str(module.spec()) + "\t->\t" + "1.0\n") - output += ("\n") - if module.impl() != 1: - output += ("\tImplementation:\t" + str(module.impl()) + "\t->\t" + "1\n") - output += ("\n") - if module.release() != 1 and module.release() != 0: - output += ("Release:\t\t" + str(module.release()) + "\t->\t" + "1\n") - output += ("\n") - print(output) - sys.stdout.flush() - f.write(output) - sys.stdout.flush() - f.close() - -# Changes cygwin paths to Windows -def fix_path(path): - if "cygdrive" in path: - new_path = path[11:] - return "C:/" + new_path - else: - return path - -# Print a 'title' -def printt(title): - print("\n" + title) - lines = "" - for letter in title: - lines += "-" - print(lines) - sys.stdout.flush() - -# Get a list of package names in the given path -# The path is expected to be of the form {base}/module/src -# -# NOTE: We currently only check for packages of the form -# org.sleuthkit.autopsy.x -# If we add other namespaces for commercial modules we will -# have to add a check here -def get_packages(path): - packages = [] - package_path = os.path.join(path, "org", "sleuthkit", "autopsy") - for folder in os.listdir(package_path): - package_string = "org.sleuthkit.autopsy." - packages.append(package_string + folder) - return packages - -# Create the given directory, if it doesn't already exist -def make_dir(dir): - try: - if not os.path.isdir(dir): - os.mkdir(dir) - if os.path.isdir(dir): - return True - return False - except: - print("Exception thrown when creating directory") - return False - -# Delete the given directory, and make sure it is deleted -def del_dir(dir): - try: - if os.path.isdir(dir): - shutil.rmtree(dir, ignore_errors=False, onerror=handleRemoveReadonly) - if os.path.isdir(dir): - return False - else: - return True - return True - except: - print("Exception thrown when deleting directory") - traceback.print_exc() - return False - -# Handle any permisson errors thrown by shutil.rmtree -def handleRemoveReadonly(func, path, exc): - excvalue = exc[1] - if func in (os.rmdir, os.remove) and excvalue.errno == errno.EACCES: - os.chmod(path, stat.S_IRWXU| stat.S_IRWXG| stat.S_IRWXO) # 0777 - func(path) - else: - raise - -# Run git clone and git checkout for the tag -def do_git(tag, tag_dir): - try: - printt("Cloning Autopsy tag " + tag + " into dir " + tag_dir + " (this could take a while)...") - subprocess.call(["git", "clone", "https://github.com/sleuthkit/autopsy.git", tag_dir], - stdout=subprocess.PIPE) - printt("Checking out tag " + tag + "...") - subprocess.call(["git", "checkout", tag], - stdout=subprocess.PIPE, - cwd=tag_dir) - return True - except Exception as ex: - print("Error cloning and checking out Autopsy: ", sys.exc_info()[0]) - print(str(ex)) - print("The terminal you are using most likely does not recognize git commands.") - return False - -# Get the flags from argv -def args(): - try: - sys.argv.pop(0) - while sys.argv: - arg = sys.argv.pop(0) - if arg == "-h" or arg == "--help": - return 1 - elif arg == "-t" or arg == "--tag": - global tag - tag = sys.argv.pop(0) - elif arg == "-s" or arg == "--source": - global source - source = sys.argv.pop(0) - elif arg == "-d" or arg == "--dir": - global docdir - docdir = sys.argv.pop(0) - elif arg == "-a" or arg == "--auto": - global dry - dry = False - else: - raise Exception() - except: - pass - -# Print script run info -def printinfo(): - global tag - global source - global docdir - global dry - printt("Release script information:") - if source is None: - source = fix_path(os.path.abspath(".")) - print("Using source directory:\n " + source) - if tag is None: - tag = get_tag(source) - print("Checking out to tag:\n " + tag) - if docdir is None: - docdir = fix_path(os.path.abspath("./jdiff-javadocs")) - print("Generating jdiff JavaDocs in:\n " + docdir) - if dry is True: - print("Dry run: will not auto-update version numbers") - sys.stdout.flush() - -# Print the script's usage/help -def usage(): - return \ - """ - USAGE: - Compares the API of the current Autopsy source code with a previous - tagged version. By default, it will detect the previous tag from - the NEWS file and will not update the versions in the source code. - - OPTIONAL FLAGS: - -t --tag Specify a previous tag to compare to. - Otherwise the NEWS file will be used. - - -d --dir The output directory for the jdiff JavaDocs. If no - directory is given, the default is jdiff-javadocs/{module}. - - -s --source The directory containing Autopsy's source code. - - -a --auto Automatically update version numbers (not dry). - - -h --help Prints this usage. - """ - -# ==================================== # -# Main Functionality # -# ==================================== # - -# Where the magic happens -def main(): - global tag; global source; global docdir; global dry - tag = None; source = None; docdir = None; dry = True - - ret = args() - if ret: - print(usage()) - return 0 - printinfo() - - # ----------------------------------------------- - # 1) Clone Autopsy, checkout to given tag/commit - # 2) Get the modules in the clone and the source - # 3) Generate the xml comparison - # ----------------------------------------------- - if not del_dir("./build/" + tag): - print("\n\n=========================================") - print(" Failed to delete previous Autopsy clone.") - print(" Unable to continue...") - print("=========================================") - return 1 - tag_dir = os.path.abspath("./build/" + tag) - if not do_git(tag, tag_dir): - return 1 - sys.stdout.flush() - - tag_modules = find_modules(tag_dir) - source_modules = find_modules(source) - - printt("Generating jdiff XML reports...") - apiname_tag = tag - apiname_cur = "current" - gen_xml(tag_dir, tag_modules, apiname_tag) - gen_xml(source, source_modules, apiname_cur) - - printt("Deleting cloned Autopsy directory...") - print("Clone successfully deleted" if del_dir(tag_dir) else "Failed to delete clone") - sys.stdout.flush() - - # ----------------------------------------------------- - # 1) Seperate modules into added, similar, and removed - # 2) Compare XML for each module - # ----------------------------------------------------- - printt("Comparing modules found...") - similar_modules, added_modules, removed_modules = module_diff(source_modules, tag_modules) - if added_modules or removed_modules: - for m in added_modules: - print("+ Added " + m.name) - sys.stdout.flush() - for m in removed_modules: - print("- Removed " + m.name) - sys.stdout.flush() - else: - print("No added or removed modules") - sys.stdout.flush() - - printt("Comparing jdiff outputs...") - for module in similar_modules: - module.set_ret(compare_xml(module, apiname_tag, apiname_cur)) - print("Refer to the jdiff-javadocs folder for more details") - - # ------------------------------------------------------------ - # 1) Do versioning - # 2) Auto-update version numbers in files and the_modules list - # 3) Auto-update dependencies - # ------------------------------------------------------------ - printt("Auto-detecting version numbers and changes...") - for module in added_modules: - module.set_versions(get_versions(module, source)) - for module in similar_modules: - module.set_versions(get_versions(module, source)) - - added_modules = remove_correct_added(added_modules) - the_modules = similar_modules + added_modules - print_version_updates(the_modules) - - if not dry: - printt("Auto-updating version numbers...") - update_versions(the_modules, source) - print("All auto-updates complete") - - printt("Detecting and auto-updating dependencies...") - update_dependencies(the_modules, source) - - printt("Deleting jdiff XML...") - xml_dir = os.path.abspath("./build/jdiff-xml") - print("XML successfully deleted" if del_dir(xml_dir) else "Failed to delete XML") - - print("\n--- Script completed successfully ---") - return 0 - -# Start off the script -if __name__ == "__main__": - sys.exit(main()) +# +# Autopsy Forensic Browser +# +# Copyright 2012-2013 Basis Technology Corp. +# Contact: carrier sleuthkit org +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +####################### +# This script exists to help us determine update the library +# versions appropriately. See this page for version details. +# +# http://wiki.sleuthkit.org/index.php?title=Autopsy_3_Module_Versions +# +# The basic idea is that this script uses javadoc/jdiff to +# compare the current state of the source code to the last +# tag and identifies if APIs were removed, added, etc. +# +# When run from the Autopsy build script, this script will: +# - Clone Autopsy and checkout to the previous release tag +# as found in the NEWS.txt file +# - Auto-discover all modules and packages +# - Run jdiff, comparing the current and previous modules +# - Use jdiff's output to determine if each module +# a) has no changes +# b) has backwards compatible changes +# c) has backwards incompatible changes +# - Based off it's compatibility, updates each module's +# a) Major version +# b) Specification version +# c) Implementation version +# - Updates the dependencies on each module depending on the +# updated version numbers +# +# Optionally, when run from the command line, one can provide the +# desired tag to compare the current version to, the directory for +# the current version of Autopsy, and whether to automatically +# update the version numbers and dependencies. +# ------------------------------------------------------------ + +import errno +import os +import shutil +import stat +import subprocess +import sys +import traceback +from os import remove, close +from shutil import move +from tempfile import mkstemp +from xml.dom.minidom import parse, parseString + +# Jdiff return codes. Described in more detail further on +NO_CHANGES = 100 +COMPATIBLE = 101 +NON_COMPATIBLE = 102 +ERROR = 1 + +# An Autopsy module object +class Module: + # Initialize it with a name, return code, and version numbers + def __init__(self, name=None, ret=None, versions=None): + self.name = name + self.ret = ret + self.versions = versions + # As a string, the module should be it's name + def __str__(self): + return self.name + def __repr__(self): + return self.name + # When compared to another module, the two are equal if the names are the same + def __cmp__(self, other): + if isinstance(other, Module): + if self.name == other.name: + return 0 + elif self.name < other.name: + return -1 + else: + return 1 + return 1 + def __eq__(self, other): + if isinstance(other, Module): + if self.name == other.name: + return True + return False + def set_name(self, name): + self.name = name + def set_ret(self, ret): + self.ret = ret + def set_versions(self, versions): + self.versions = versions + def spec(self): + return self.versions[0] + def impl(self): + return self.versions[1] + def release(self): + return self.versions[2] + +# Representation of the Specification version number +class Spec: + # Initialize specification number, where num is a string like x.y + def __init__(self, num): + self.third = None + spec_nums = num.split(".") + if len(spec_nums) == 3: + final = spec_nums[2] + self.third = int(final) + + l, r = spec_nums[0], spec_nums[1] + + self.left = int(l) + self.right = int(r) + + def __str__(self): + return self.get() + def __cmp__(self, other): + if isinstance(other, Spec): + if self.left == other.left: + if self.right == other.right: + return 0 + if self.right < other.right: + return -1 + return 1 + if self.left < other.left: + return -1 + return 1 + elif isinstance(other, str): + l, r = other.split(".") + if self.left == int(l): + if self.right == int(r): + return 0 + if self.right < int(r): + return -1 + return 1 + if self.left < int(l): + return -1 + return 1 + return -1 + + def overflow(self): + return str(self.left + 1) + ".0" + def increment(self): + return str(self.left) + "." + str(self.right + 1) + def get(self): + spec_str = str(self.left) + "." + str(self.right) + if self.third is not None: + spec_str += "." + str(self.final) + return spec_str + def set(self, num): + if isinstance(num, str): + l, r = num.split(".") + self.left = int(l) + self.right = int(r) + elif isinstance(num, Spec): + self.left = num.left + self.right = num.right + return self + +# ================================ # +# Core Functions # +# ================================ # + +# Given a list of modules and the names for each version, compare +# the generated jdiff XML for each module and output the jdiff +# JavaDocs. +# +# modules: the list of all modules both versions have in common +# apiname_tag: the api name of the previous version, most likely the tag +# apiname_cur: the api name of the current version, most likely "Current" +# +# returns the exit code from the modified jdiff.jar +# return code 1 = error in jdiff +# return code 100 = no changes +# return code 101 = compatible changes +# return code 102 = incompatible changes +def compare_xml(module, apiname_tag, apiname_cur): + global docdir + make_dir(docdir) + null_file = fix_path(os.path.abspath("./thirdparty/jdiff/v-custom/lib/Null.java")) + jdiff = fix_path(os.path.abspath("./thirdparty/jdiff/v-custom/jdiff.jar")) + oldapi = fix_path("build/jdiff-xml/" + apiname_tag + "-" + module.name) + newapi = fix_path("build/jdiff-xml/" + apiname_cur + "-" + module.name) + docs = fix_path(docdir + "/" + module.name) + # Comments are strange. They look for a file with additional user comments in a + # directory like docs/user_comments_for_xyz. The problem being that xyz is the + # path to the new/old api. So xyz turns into multiple directories for us. + # i.e. user_comments_for_build/jdiff-xml/[tag name]-[module name]_to_build/jdiff-xml + comments = fix_path(docs + "/user_comments_for_build") + jdiff_com = fix_path(comments + "/jdiff-xml") + tag_comments = fix_path(jdiff_com + "/" + apiname_tag + "-" + module.name + "_to_build") + jdiff_tag_com = fix_path(tag_comments + "/jdiff-xml") + + if not os.path.exists(jdiff): + print("JDIFF doesn't exist.") + + make_dir(docs) + make_dir(comments) + make_dir(jdiff_com) + make_dir(tag_comments) + make_dir(jdiff_tag_com) + make_dir("jdiff-logs") + log = open("jdiff-logs/COMPARE-" + module.name + ".log", "w") + cmd = ["javadoc", + "-doclet", "jdiff.JDiff", + "-docletpath", jdiff, + "-d", docs, + "-oldapi", oldapi, + "-newapi", newapi, + "-script", + null_file] + jdiff = subprocess.Popen(cmd, stdout=log, stderr=log) + jdiff.wait() + log.close() + code = jdiff.returncode + print("Compared XML for " + module.name) + if code == NO_CHANGES: + print(" No API changes") + elif code == COMPATIBLE: + print(" API Changes are backwards compatible") + elif code == NON_COMPATIBLE: + print(" API Changes are not backwards compatible") + else: + print(" *Error in XML, most likely an empty module") + sys.stdout.flush() + return code + +# Generate the jdiff xml for the given module +# path: path to the autopsy source +# module: Module object +# name: api name for jdiff +def gen_xml(path, modules, name): + for module in modules: + # If its the regression test, the source is in the "test" dir + if module.name == "Testing": + src = os.path.join(path, module.name, "test", "qa-functional", "src") + else: + src = os.path.join(path, module.name, "src") + # xerces = os.path.abspath("./lib/xerces.jar") + xml_out = fix_path(os.path.abspath("./build/jdiff-xml/" + name + "-" + module.name)) + jdiff = fix_path(os.path.abspath("./thirdparty/jdiff/v-custom/jdiff.jar")) + make_dir("build/jdiff-xml") + make_dir("jdiff-logs") + log = open("jdiff-logs/GEN_XML-" + name + "-" + module.name + ".log", "w") + cmd = ["javadoc", + "-doclet", "jdiff.JDiff", + "-docletpath", jdiff, # ;" + xerces, <-- previous problems required this + "-apiname", xml_out, # leaving it in just in case it's needed once again + "-sourcepath", fix_path(src)] + cmd = cmd + get_packages(src) + jdiff = subprocess.Popen(cmd, stdout=log, stderr=log) + jdiff.wait() + log.close() + print("Generated XML for " + name + " " + module.name) + sys.stdout.flush() + +# Find all the modules in the given path +def find_modules(path): + modules = [] + # Step into each folder in the given path and + # see if it has manifest.mf - if so, it's a module + for dir in os.listdir(path): + directory = os.path.join(path, dir) + if os.path.isdir(directory): + for file in os.listdir(directory): + if file == "manifest.mf": + modules.append(Module(dir, None, None)) + return modules + +# Detects the differences between the source and tag modules +def module_diff(source_modules, tag_modules): + added_modules = [x for x in source_modules if x not in tag_modules] + removed_modules = [x for x in tag_modules if x not in source_modules] + similar_modules = [x for x in source_modules if x in tag_modules] + + added_modules = (added_modules if added_modules else []) + removed_modules = (removed_modules if removed_modules else []) + similar_modules = (similar_modules if similar_modules else []) + return similar_modules, added_modules, removed_modules + +# Reads the previous tag from NEWS.txt +def get_tag(sourcepath): + news = open(sourcepath + "/NEWS.txt", "r") + second_instance = False + for line in news: + if "----------------" in line: + if second_instance: + ver = line.split("VERSION ")[1] + ver = ver.split(" -")[0] + return ("autopsy-" + ver).strip() + else: + second_instance = True + continue + news.close() + + +# ========================================== # +# Dependency Functions # +# ========================================== # + +# Write a new XML file, copying all the lines from projectxml +# and replacing the specification version for the code-name-base base +# with the supplied specification version spec +def set_dep_spec(projectxml, base, spec): + print(" Updating Specification version..") + orig = open(projectxml, "r") + f, abs_path = mkstemp() + new_file = open(abs_path, "w") + found_base = False + spacing = " " + sopen = "" + sclose = "\n" + for line in orig: + if base in line: + found_base = True + if found_base and sopen in line: + update = spacing + sopen + str(spec) + sclose + new_file.write(update) + else: + new_file.write(line) + new_file.close() + close(f) + orig.close() + remove(projectxml) + move(abs_path, projectxml) + +# Write a new XML file, copying all the lines from projectxml +# and replacing the release version for the code-name-base base +# with the supplied release version +def set_dep_release(projectxml, base, release): + print(" Updating Release version..") + orig = open(projectxml, "r") + f, abs_path = mkstemp() + new_file = open(abs_path, "w") + found_base = False + spacing = " " + ropen = "" + rclose = "\n" + for line in orig: + if base in line: + found_base = True + if found_base and ropen in line: + update = spacing + ropen + str(release) + rclose + new_file.write(update) + else: + new_file.write(line) + new_file.close() + close(f) + orig.close() + remove(projectxml) + move(abs_path, projectxml) + +# Return the dependency versions in the XML dependency node +def get_dep_versions(dep): + run_dependency = dep.getElementsByTagName("run-dependency")[0] + release_version = run_dependency.getElementsByTagName("release-version") + if release_version: + release_version = getTagText(release_version[0].childNodes) + specification_version = run_dependency.getElementsByTagName("specification-version") + if specification_version: + specification_version = getTagText(specification_version[0].childNodes) + return int(release_version), Spec(specification_version) + +# Given a code-name-base, see if it corresponds with any of our modules +def get_module_from_base(modules, code_name_base): + for module in modules: + if "org.sleuthkit.autopsy." + module.name.lower() == code_name_base: + return module + return None # If it didn't match one of our modules + +# Check the text between two XML tags +def getTagText(nodelist): + for node in nodelist: + if node.nodeType == node.TEXT_NODE: + return node.data + +# Check the projectxml for a dependency on any module in modules +def check_for_dependencies(projectxml, modules): + dom = parse(projectxml) + dep_list = dom.getElementsByTagName("dependency") + for dep in dep_list: + code_name_base = dep.getElementsByTagName("code-name-base")[0] + code_name_base = getTagText(code_name_base.childNodes) + module = get_module_from_base(modules, code_name_base) + if module: + print(" Found dependency on " + module.name) + release, spec = get_dep_versions(dep) + if release != module.release() and module.release() is not None: + set_dep_release(projectxml, code_name_base, module.release()) + else: print(" Release version is correct") + if spec != module.spec() and module.spec() is not None: + set_dep_spec(projectxml, code_name_base, module.spec()) + else: print(" Specification version is correct") + +# Given the module and the source directory, return +# the paths to the manifest and project properties files +def get_dependency_file(module, source): + projectxml = os.path.join(source, module.name, "nbproject", "project.xml") + if os.path.isfile(projectxml): + return projectxml + +# Verify/Update the dependencies for each module, basing the dependency +# version number off the versions in each module +def update_dependencies(modules, source): + for module in modules: + print("Checking the dependencies for " + module.name + "...") + projectxml = get_dependency_file(module, source) + if projectxml == None: + print(" Error finding project xml file") + else: + other = [x for x in modules] + check_for_dependencies(projectxml, other) + sys.stdout.flush() + +# ======================================== # +# Versioning Functions # +# ======================================== # + +# Return the specification version in the given project.properties/manifest.mf file +def get_specification(project, manifest): + try: + # Try to find it in the project file + # it will be there if impl version is set to append automatically + f = open(project, 'r') + for line in f: + if "spec.version.base" in line: + return Spec(line.split("=")[1].strip()) + f.close() + # If not found there, try the manifest file + f = open(manifest, 'r') + for line in f: + if "OpenIDE-Module-Specification-Version:" in line: + return Spec(line.split(": ")[1].strip()) + except Exception as e: + print("Error parsing Specification version for") + print(project) + print(e) + +# Set the specification version in the given project properties file +# but if it can't be found there, set it in the manifest file +def set_specification(project, manifest, num): + try: + # First try the project file + f = open(project, 'r') + for line in f: + if "spec.version.base" in line: + f.close() + replace(project, line, "spec.version.base=" + str(num) + "\n") + return + f.close() + # If it's not there, try the manifest file + f = open(manifest, 'r') + for line in f: + if "OpenIDE-Module-Specification-Version:" in line: + f.close() + replace(manifest, line, "OpenIDE-Module-Specification-Version: " + str(num) + "\n") + return + # Otherwise we're out of luck + print(" Error finding the Specification version to update") + print(" " + manifest) + f.close() + except: + print(" Error incrementing Specification version for") + print(" " + project) + +# Return the implementation version in the given manifest.mf file +def get_implementation(manifest): + try: + f = open(manifest, 'r') + for line in f: + if "OpenIDE-Module-Implementation-Version" in line: + return int(line.split(": ")[1].strip()) + f.close() + except: + print("Error parsing Implementation version for") + print(manifest) + +# Set the implementation version in the given manifest file +def set_implementation(manifest, num): + try: + f = open(manifest, 'r') + for line in f: + if "OpenIDE-Module-Implementation-Version" in line: + f.close() + replace(manifest, line, "OpenIDE-Module-Implementation-Version: " + str(num) + "\n") + return + # If it isn't there, add it + f.close() + write_implementation(manifest, num) + except: + print(" Error incrementing Implementation version for") + print(" " + manifest) + +# Rewrite the manifest file to include the implementation version +def write_implementation(manifest, num): + f = open(manifest, "r") + contents = f.read() + contents = contents[:-2] + "OpenIDE-Module-Implementation-Version: " + str(num) + "\n\n" + f.close() + f = open(manifest, "w") + f.write(contents) + f.close() + +# Return the release version in the given manifest.mf file +def get_release(manifest): + try: + f = open(manifest, 'r') + for line in f: + if "OpenIDE-Module:" in line: + return int(line.split("/")[1].strip()) + f.close() + except: + #print("Error parsing Release version for") + #print(manifest) + return 0 + +# Set the release version in the given manifest file +def set_release(manifest, num): + try: + f = open(manifest, 'r') + for line in f: + if "OpenIDE-Module:" in line: + f.close() + index = line.index('/') - len(line) + 1 + newline = line[:index] + str(num) + replace(manifest, line, newline + "\n") + return + print(" Error finding the release version to update") + print(" " + manifest) + f.close() + except: + print(" Error incrementing release version for") + print(" " + manifest) + +# Given the module and the source directory, return +# the paths to the manifest and project properties files +def get_version_files(module, source): + manifest = os.path.join(source, module.name, "manifest.mf") + project = os.path.join(source, module.name, "nbproject", "project.properties") + if os.path.isfile(manifest) and os.path.isfile(project): + return manifest, project + +# Returns a the current version numbers for the module in source +def get_versions(module, source): + manifest, project = get_version_files(module, source) + if manifest == None or project == None: + print(" Error finding manifeset and project properties files") + return + spec = get_specification(project, manifest) + impl = get_implementation(manifest) + release = get_release(manifest) + return [spec, impl, release] + +# Update the version numbers for every module in modules +def update_versions(modules, source): + for module in modules: + versions = module.versions + manifest, project = get_version_files(module, source) + print("Updating " + module.name + "...") + if manifest == None or project == None: + print(" Error finding manifeset and project properties files") + return + if module.ret == COMPATIBLE: + versions = [versions[0].set(versions[0].increment()), versions[1] + 1, versions[2]] + set_specification(project, manifest, versions[0]) + set_implementation(manifest, versions[1]) + module.set_versions(versions) + elif module.ret == NON_COMPATIBLE: + versions = [versions[0].set(versions[0].overflow()), versions[1] + 1, versions[2] + 1] + set_specification(project, manifest, versions[0]) + set_implementation(manifest, versions[1]) + set_release(manifest, versions[2]) + module.set_versions(versions) + elif module.ret == NO_CHANGES: + versions = [versions[0], versions[1] + 1, versions[2]] + set_implementation(manifest, versions[1]) + module.set_versions(versions) + elif module.ret == None: + versions = [Spec("1.0"), 1, 1] + set_specification(project, manifest, versions[0]) + set_implementation(manifest, versions[1]) + set_release(manifest, versions[2]) + module.set_versions(versions) + sys.stdout.flush() + +# Given a list of the added modules, remove the modules +# which have the correct 'new module default' version number +def remove_correct_added(modules): + correct = [x for x in modules] + for module in modules: + if module.spec() == "1.0" or module.spec() == "0.0": + if module.impl() == 1: + if module.release() == 1 or module.release() == 0: + correct.remove(module) + return correct + +# ==================================== # +# Helper Functions # +# ==================================== # + +# Replace pattern with subst in given file +def replace(file, pattern, subst): + #Create temp file + fh, abs_path = mkstemp() + new_file = open(abs_path,'w') + old_file = open(file) + for line in old_file: + new_file.write(line.replace(pattern, subst)) + #close temp file + new_file.close() + close(fh) + old_file.close() + #Remove original file + remove(file) + #Move new file + move(abs_path, file) + +# Given a list of modules print the version numbers that need changing +def print_version_updates(modules): + f = open("gen_version.txt", "a") + for module in modules: + versions = module.versions + if module.ret == COMPATIBLE: + output = (module.name + ":\n") + output += ("\tSpecification:\t" + str(versions[0]) + "\t->\t" + str(versions[0].increment()) + "\n") + output += ("\tImplementation:\t" + str(versions[1]) + "\t->\t" + str(versions[1] + 1) + "\n") + output += ("\tRelease:\tNo Change.\n") + output += ("\n") + print(output) + sys.stdout.flush() + f.write(output) + elif module.ret == NON_COMPATIBLE: + output = (module.name + ":\n") + output += ("\tSpecification:\t" + str(versions[0]) + "\t->\t" + str(versions[0].overflow()) + "\n") + output += ("\tImplementation:\t" + str(versions[1]) + "\t->\t" + str(versions[1] + 1) + "\n") + output += ("\tRelease:\t" + str(versions[2]) + "\t->\t" + str(versions[2] + 1) + "\n") + output += ("\n") + print(output) + sys.stdout.flush() + f.write(output) + elif module.ret == ERROR: + output = (module.name + ":\n") + output += ("\t*Unable to detect necessary changes\n") + output += ("\tSpecification:\t" + str(versions[0]) + "\n") + output += ("\tImplementation:\t" + str(versions[1]) + "\n") + output += ("\tRelease:\t\t" + str(versions[2]) + "\n") + output += ("\n") + print(output) + f.write(output) + sys.stdout.flush() + elif module.ret == NO_CHANGES: + output = (module.name + ":\n") + if versions[1] is None: + output += ("\tImplementation: None\n") + else: + output += ("\tImplementation:\t" + str(versions[1]) + "\t->\t" + str(versions[1] + 1) + "\n") + output += ("\n") + print(output) + sys.stdout.flush() + f.write(output) + elif module.ret is None: + output = ("Added " + module.name + ":\n") + if module.spec() != "1.0" and module.spec() != "0.0": + output += ("\tSpecification:\t" + str(module.spec()) + "\t->\t" + "1.0\n") + output += ("\n") + if module.impl() != 1: + output += ("\tImplementation:\t" + str(module.impl()) + "\t->\t" + "1\n") + output += ("\n") + if module.release() != 1 and module.release() != 0: + output += ("Release:\t\t" + str(module.release()) + "\t->\t" + "1\n") + output += ("\n") + print(output) + sys.stdout.flush() + f.write(output) + sys.stdout.flush() + f.close() + +# Changes cygwin paths to Windows +def fix_path(path): + if "cygdrive" in path: + new_path = path[11:] + return "C:/" + new_path + else: + return path + +# Print a 'title' +def printt(title): + print("\n" + title) + lines = "" + for letter in title: + lines += "-" + print(lines) + sys.stdout.flush() + +# Get a list of package names in the given path +# The path is expected to be of the form {base}/module/src +# +# NOTE: We currently only check for packages of the form +# org.sleuthkit.autopsy.x +# If we add other namespaces for commercial modules we will +# have to add a check here +def get_packages(path): + packages = [] + package_path = os.path.join(path, "org", "sleuthkit", "autopsy") + for folder in os.listdir(package_path): + package_string = "org.sleuthkit.autopsy." + packages.append(package_string + folder) + return packages + +# Create the given directory, if it doesn't already exist +def make_dir(dir): + try: + if not os.path.isdir(dir): + os.mkdir(dir) + if os.path.isdir(dir): + return True + return False + except: + print("Exception thrown when creating directory") + return False + +# Delete the given directory, and make sure it is deleted +def del_dir(dir): + try: + if os.path.isdir(dir): + shutil.rmtree(dir, ignore_errors=False, onerror=handleRemoveReadonly) + if os.path.isdir(dir): + return False + else: + return True + return True + except: + print("Exception thrown when deleting directory") + traceback.print_exc() + return False + +# Handle any permisson errors thrown by shutil.rmtree +def handleRemoveReadonly(func, path, exc): + excvalue = exc[1] + if func in (os.rmdir, os.remove) and excvalue.errno == errno.EACCES: + os.chmod(path, stat.S_IRWXU| stat.S_IRWXG| stat.S_IRWXO) # 0777 + func(path) + else: + raise + +# Run git clone and git checkout for the tag +def do_git(tag, tag_dir): + try: + printt("Cloning Autopsy tag " + tag + " into dir " + tag_dir + " (this could take a while)...") + subprocess.call(["git", "clone", "https://github.com/sleuthkit/autopsy.git", tag_dir], + stdout=subprocess.PIPE) + printt("Checking out tag " + tag + "...") + subprocess.call(["git", "checkout", tag], + stdout=subprocess.PIPE, + cwd=tag_dir) + return True + except Exception as ex: + print("Error cloning and checking out Autopsy: ", sys.exc_info()[0]) + print(str(ex)) + print("The terminal you are using most likely does not recognize git commands.") + return False + +# Get the flags from argv +def args(): + try: + sys.argv.pop(0) + while sys.argv: + arg = sys.argv.pop(0) + if arg == "-h" or arg == "--help": + return 1 + elif arg == "-t" or arg == "--tag": + global tag + tag = sys.argv.pop(0) + elif arg == "-s" or arg == "--source": + global source + source = sys.argv.pop(0) + elif arg == "-d" or arg == "--dir": + global docdir + docdir = sys.argv.pop(0) + elif arg == "-a" or arg == "--auto": + global dry + dry = False + else: + raise Exception() + except: + pass + +# Print script run info +def printinfo(): + global tag + global source + global docdir + global dry + printt("Release script information:") + if source is None: + source = fix_path(os.path.abspath(".")) + print("Using source directory:\n " + source) + if tag is None: + tag = get_tag(source) + print("Checking out to tag:\n " + tag) + if docdir is None: + docdir = fix_path(os.path.abspath("./jdiff-javadocs")) + print("Generating jdiff JavaDocs in:\n " + docdir) + if dry is True: + print("Dry run: will not auto-update version numbers") + sys.stdout.flush() + +# Print the script's usage/help +def usage(): + return \ + """ + USAGE: + Compares the API of the current Autopsy source code with a previous + tagged version. By default, it will detect the previous tag from + the NEWS file and will not update the versions in the source code. + + OPTIONAL FLAGS: + -t --tag Specify a previous tag to compare to. + Otherwise the NEWS file will be used. + + -d --dir The output directory for the jdiff JavaDocs. If no + directory is given, the default is jdiff-javadocs/{module}. + + -s --source The directory containing Autopsy's source code. + + -a --auto Automatically update version numbers (not dry). + + -h --help Prints this usage. + """ + +# ==================================== # +# Main Functionality # +# ==================================== # + +# Where the magic happens +def main(): + global tag; global source; global docdir; global dry + tag = None; source = None; docdir = None; dry = True + + ret = args() + if ret: + print(usage()) + return 0 + printinfo() + + # ----------------------------------------------- + # 1) Clone Autopsy, checkout to given tag/commit + # 2) Get the modules in the clone and the source + # 3) Generate the xml comparison + # ----------------------------------------------- + if not del_dir("./build/" + tag): + print("\n\n=========================================") + print(" Failed to delete previous Autopsy clone.") + print(" Unable to continue...") + print("=========================================") + return 1 + tag_dir = os.path.abspath("./build/" + tag) + if not do_git(tag, tag_dir): + return 1 + sys.stdout.flush() + + tag_modules = find_modules(tag_dir) + source_modules = find_modules(source) + + printt("Generating jdiff XML reports...") + apiname_tag = tag + apiname_cur = "current" + gen_xml(tag_dir, tag_modules, apiname_tag) + gen_xml(source, source_modules, apiname_cur) + + printt("Deleting cloned Autopsy directory...") + print("Clone successfully deleted" if del_dir(tag_dir) else "Failed to delete clone") + sys.stdout.flush() + + # ----------------------------------------------------- + # 1) Seperate modules into added, similar, and removed + # 2) Compare XML for each module + # ----------------------------------------------------- + printt("Comparing modules found...") + similar_modules, added_modules, removed_modules = module_diff(source_modules, tag_modules) + if added_modules or removed_modules: + for m in added_modules: + print("+ Added " + m.name) + sys.stdout.flush() + for m in removed_modules: + print("- Removed " + m.name) + sys.stdout.flush() + else: + print("No added or removed modules") + sys.stdout.flush() + + printt("Comparing jdiff outputs...") + for module in similar_modules: + module.set_ret(compare_xml(module, apiname_tag, apiname_cur)) + print("Refer to the jdiff-javadocs folder for more details") + + # ------------------------------------------------------------ + # 1) Do versioning + # 2) Auto-update version numbers in files and the_modules list + # 3) Auto-update dependencies + # ------------------------------------------------------------ + printt("Auto-detecting version numbers and changes...") + for module in added_modules: + module.set_versions(get_versions(module, source)) + for module in similar_modules: + module.set_versions(get_versions(module, source)) + + added_modules = remove_correct_added(added_modules) + the_modules = similar_modules + added_modules + print_version_updates(the_modules) + + if not dry: + printt("Auto-updating version numbers...") + update_versions(the_modules, source) + print("All auto-updates complete") + + printt("Detecting and auto-updating dependencies...") + update_dependencies(the_modules, source) + + printt("Deleting jdiff XML...") + xml_dir = os.path.abspath("./build/jdiff-xml") + print("XML successfully deleted" if del_dir(xml_dir) else "Failed to delete XML") + + print("\n--- Script completed successfully ---") + return 0 + +# Start off the script +if __name__ == "__main__": + sys.exit(main()) From 01bc909c20e1cafad3cdafea25c4b284358227b1 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Mon, 18 Nov 2013 14:43:46 -0500 Subject: [PATCH 172/179] Updated the labels in KnownStatusSearchPanel fro multiple known databases --- .../org/sleuthkit/autopsy/filesearch/Bundle.properties | 2 +- .../autopsy/filesearch/KnownStatusSearchPanel.form | 5 ++++- .../autopsy/filesearch/KnownStatusSearchPanel.java | 10 ++++++++++ 3 files changed, 15 insertions(+), 2 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/filesearch/Bundle.properties b/Core/src/org/sleuthkit/autopsy/filesearch/Bundle.properties index 4bf164e3de..afeec73469 100644 --- a/Core/src/org/sleuthkit/autopsy/filesearch/Bundle.properties +++ b/Core/src/org/sleuthkit/autopsy/filesearch/Bundle.properties @@ -24,7 +24,7 @@ FileSearchTopComponent.dateCheckBox1.text=Date: FileSearchTopComponent.dateFiltersButton1.text=Date Filters KnownStatusSearchPanel.knownCheckBox.text=Known Status: KnownStatusSearchPanel.knownBadOptionCheckBox.text=Known bad -KnownStatusSearchPanel.knownOptionCheckBox.text=Known (NSRL) +KnownStatusSearchPanel.knownOptionCheckBox.text=Known (NSRL or other) KnownStatusSearchPanel.unknownOptionCheckBox.text=Unknown DateSearchPanel.dateCheckBox.text=Date: DateSearchPanel.jLabel4.text=Timezone: diff --git a/Core/src/org/sleuthkit/autopsy/filesearch/KnownStatusSearchPanel.form b/Core/src/org/sleuthkit/autopsy/filesearch/KnownStatusSearchPanel.form index 8eb2e5fe2c..3237a70996 100644 --- a/Core/src/org/sleuthkit/autopsy/filesearch/KnownStatusSearchPanel.form +++ b/Core/src/org/sleuthkit/autopsy/filesearch/KnownStatusSearchPanel.form @@ -1,4 +1,4 @@ - +
@@ -64,6 +64,9 @@
+ + + diff --git a/Core/src/org/sleuthkit/autopsy/filesearch/KnownStatusSearchPanel.java b/Core/src/org/sleuthkit/autopsy/filesearch/KnownStatusSearchPanel.java index 28475e690c..e78d81a43c 100644 --- a/Core/src/org/sleuthkit/autopsy/filesearch/KnownStatusSearchPanel.java +++ b/Core/src/org/sleuthkit/autopsy/filesearch/KnownStatusSearchPanel.java @@ -74,6 +74,11 @@ class KnownStatusSearchPanel extends javax.swing.JPanel { knownOptionCheckBox.setSelected(true); knownOptionCheckBox.setText(org.openide.util.NbBundle.getMessage(KnownStatusSearchPanel.class, "KnownStatusSearchPanel.knownOptionCheckBox.text")); // NOI18N + knownOptionCheckBox.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + knownOptionCheckBoxActionPerformed(evt); + } + }); knownBadOptionCheckBox.setSelected(true); knownBadOptionCheckBox.setText(org.openide.util.NbBundle.getMessage(KnownStatusSearchPanel.class, "KnownStatusSearchPanel.knownBadOptionCheckBox.text")); // NOI18N @@ -102,6 +107,11 @@ class KnownStatusSearchPanel extends javax.swing.JPanel { .addComponent(knownBadOptionCheckBox)) ); }// //GEN-END:initComponents + + private void knownOptionCheckBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_knownOptionCheckBoxActionPerformed + // TODO add your handling code here: + }//GEN-LAST:event_knownOptionCheckBoxActionPerformed + // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JCheckBox knownBadOptionCheckBox; private javax.swing.JCheckBox knownCheckBox; From 630bf46074a3d159224c405b6d09503b31bfb401 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Mon, 18 Nov 2013 15:04:41 -0500 Subject: [PATCH 173/179] Updated HashDb for chnage in hash datbases API (accepting nulls in place of empty strings) --- .../src/org/sleuthkit/autopsy/hashdatabase/HashDb.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDb.java b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDb.java index e7d743c79d..ec6e84435f 100644 --- a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDb.java +++ b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDb.java @@ -187,7 +187,7 @@ public class HashDb { * @throws TskCoreException */ public void add(Content content) throws TskCoreException { - add(content, ""); + add(content, null); } /** @@ -204,7 +204,7 @@ public class HashDb { AbstractFile file = (AbstractFile)content; // TODO: Add support for SHA-1 and SHA-256 hashes. if (null != file.getMd5Hash()) { - SleuthkitJNI.addToHashDatabase(file.getName(), file.getMd5Hash(), "", "", comment, handle); + SleuthkitJNI.addToHashDatabase(file.getName(), file.getMd5Hash(), null, null, comment, handle); } } } From 91c9bd2036b3ad55afee660082210c4fc09ee0ec Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Mon, 18 Nov 2013 15:25:31 -0500 Subject: [PATCH 174/179] Removed superfluous import from AddContentToHashDbAction --- .../sleuthkit/autopsy/hashdatabase/AddContentToHashDbAction.java | 1 - 1 file changed, 1 deletion(-) diff --git a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/AddContentToHashDbAction.java b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/AddContentToHashDbAction.java index 5ed63cd59a..bef2c3e889 100755 --- a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/AddContentToHashDbAction.java +++ b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/AddContentToHashDbAction.java @@ -31,7 +31,6 @@ import javax.swing.JOptionPane; import org.openide.util.Utilities; import org.openide.util.Lookup; import org.openide.util.actions.Presenter; -import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.ingest.IngestConfigurator; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.datamodel.AbstractFile; From 7147cbce155550f64531f228e480d6e7665effb5 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Mon, 18 Nov 2013 15:40:20 -0500 Subject: [PATCH 175/179] Fixed hang of GetTagNameDialog when user types in an existing tag name --- Core/src/org/sleuthkit/autopsy/actions/GetTagNameDialog.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/Core/src/org/sleuthkit/autopsy/actions/GetTagNameDialog.java b/Core/src/org/sleuthkit/autopsy/actions/GetTagNameDialog.java index 2100d83ae2..fb0d50ddc4 100644 --- a/Core/src/org/sleuthkit/autopsy/actions/GetTagNameDialog.java +++ b/Core/src/org/sleuthkit/autopsy/actions/GetTagNameDialog.java @@ -299,6 +299,9 @@ public class GetTagNameDialog extends JDialog { tagName = null; } } + else { + dispose(); + } } }//GEN-LAST:event_okButtonActionPerformed From ec72bb7a81f7665d60eadcfad13d856317790ad9 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Mon, 18 Nov 2013 17:36:39 -0500 Subject: [PATCH 176/179] Added code to AddContentTagAction to handle current and parent directory entries --- .../autopsy/actions/AddContentTagAction.java | 30 +++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/Core/src/org/sleuthkit/autopsy/actions/AddContentTagAction.java b/Core/src/org/sleuthkit/autopsy/actions/AddContentTagAction.java index fa762cb7bd..70c8f9ee3e 100755 --- a/Core/src/org/sleuthkit/autopsy/actions/AddContentTagAction.java +++ b/Core/src/org/sleuthkit/autopsy/actions/AddContentTagAction.java @@ -25,6 +25,7 @@ import org.openide.util.Utilities; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.TagName; import org.sleuthkit.datamodel.TskCoreException; @@ -58,6 +59,35 @@ public class AddContentTagAction extends AddTagAction { Collection selectedFiles = Utilities.actionsGlobalContext().lookupAll(AbstractFile.class); for (AbstractFile file : selectedFiles) { try { + // Handle the special cases of "." and ".." directory entries. + if (file.getName().equals(".")) { + Content parentFile = file.getParent(); + if (parentFile instanceof AbstractFile) { + file = (AbstractFile)parentFile; + } + else { + JOptionPane.showMessageDialog(null, "Unable to tag " + parentFile.getName() + ", not a regular file.", "Cannot Apply Tag", JOptionPane.WARNING_MESSAGE); + continue; + } + } + else if (file.getName().equals("..")) { + Content parentFile = file.getParent(); + if (parentFile instanceof AbstractFile) { + parentFile = (AbstractFile)((AbstractFile)parentFile).getParent(); + if (parentFile instanceof AbstractFile) { + file = (AbstractFile)parentFile; + } + else { + JOptionPane.showMessageDialog(null, "Unable to tag " + parentFile.getName() + ", not a regular file.", "Cannot Apply Tag", JOptionPane.WARNING_MESSAGE); + continue; + } + } + else { + JOptionPane.showMessageDialog(null, "Unable to tag " + parentFile.getName() + ", not a regular file.", "Cannot Apply Tag", JOptionPane.WARNING_MESSAGE); + continue; + } + } + Case.getCurrentCase().getServices().getTagsManager().addContentTag(file, tagName, comment); } catch (TskCoreException ex) { From caef10a69b6825785ef7dcfe605e65ea5c0d4a6e Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Mon, 18 Nov 2013 18:04:53 -0500 Subject: [PATCH 177/179] Added code to AddContentTagAction to handle current and parent directory entries --- .../org/sleuthkit/autopsy/actions/AddContentTagAction.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/actions/AddContentTagAction.java b/Core/src/org/sleuthkit/autopsy/actions/AddContentTagAction.java index 70c8f9ee3e..8760ed364f 100755 --- a/Core/src/org/sleuthkit/autopsy/actions/AddContentTagAction.java +++ b/Core/src/org/sleuthkit/autopsy/actions/AddContentTagAction.java @@ -59,7 +59,7 @@ public class AddContentTagAction extends AddTagAction { Collection selectedFiles = Utilities.actionsGlobalContext().lookupAll(AbstractFile.class); for (AbstractFile file : selectedFiles) { try { - // Handle the special cases of "." and ".." directory entries. + // Handle the special cases of current (".") and parent ("..") directory entries. if (file.getName().equals(".")) { Content parentFile = file.getParent(); if (parentFile instanceof AbstractFile) { @@ -95,5 +95,5 @@ public class AddContentTagAction extends AddTagAction { JOptionPane.showMessageDialog(null, "Unable to tag " + file.getName() + ".", "Tagging Error", JOptionPane.ERROR_MESSAGE); } } - } + } } \ No newline at end of file From cc85120678c23d9ac26ddb4339a453f19073bb82 Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Tue, 19 Nov 2013 13:36:53 -0500 Subject: [PATCH 178/179] normalized line endings --- test/README.txt | 26 +- test/script/Emailer.py | 98 +- test/script/regression.py | 3708 ++++++++--------- test/script/srcupdater.py | 374 +- thunderbirdparser/manifest.mf | 14 +- .../nbproject/project.properties | 12 +- update_versions.py | 1878 ++++----- 7 files changed, 3055 insertions(+), 3055 deletions(-) diff --git a/test/README.txt b/test/README.txt index d0064b4f95..854f5e1a33 100644 --- a/test/README.txt +++ b/test/README.txt @@ -1,13 +1,13 @@ -This folder contains the data and scripts required to run regression tests -for Autopsy. There is a 'Testing' folder in the root directory that contains -the Java code that drives Autopsy to perform the tests. - -To run these tests: -- You will need python3. We run this from within Cygwin. -- Download the input images by typing 'ant test-download-imgs' in the root Autopsy folder. - This will place images in 'test/input'. -- Run 'python3 regression.py' from inside of the 'test/scripts' folder. -- Alternatively, run 'python3 regression.py -l [CONFIGFILE] to run the tests on a specified - list of images using a configuration file. See config.xml in the 'test/scripts' folder to - see configuration file formatting. -- Run 'python3 regression.py -h' to see other options. +This folder contains the data and scripts required to run regression tests +for Autopsy. There is a 'Testing' folder in the root directory that contains +the Java code that drives Autopsy to perform the tests. + +To run these tests: +- You will need python3. We run this from within Cygwin. +- Download the input images by typing 'ant test-download-imgs' in the root Autopsy folder. + This will place images in 'test/input'. +- Run 'python3 regression.py' from inside of the 'test/scripts' folder. +- Alternatively, run 'python3 regression.py -l [CONFIGFILE] to run the tests on a specified + list of images using a configuration file. See config.xml in the 'test/scripts' folder to + see configuration file formatting. +- Run 'python3 regression.py -h' to see other options. diff --git a/test/script/Emailer.py b/test/script/Emailer.py index 5d12e6afa3..7e661e12ea 100644 --- a/test/script/Emailer.py +++ b/test/script/Emailer.py @@ -1,49 +1,49 @@ -import smtplib -from email.mime.image import MIMEImage -from email.mime.multipart import MIMEMultipart -from email.mime.text import MIMEText -from email.mime.base import MIMEBase -from email import encoders -import xml -from xml.dom.minidom import parse, parseString - -def send_email(to, server, subj, body, attachments): - """Send an email with the given information. - - Args: - to: a String, the email address to send the email to - server: a String, the mail server to send from - subj: a String, the subject line of the message - body: a String, the body of the message - attachments: a listof_pathto_File, the attachements to include - """ - msg = MIMEMultipart() - msg['Subject'] = subj - # me == the sender's email address - # family = the list of all recipients' email addresses - msg['From'] = 'AutopsyTest' - msg['To'] = to - msg.preamble = 'This is a test' - container = MIMEText(body, 'plain') - msg.attach(container) - Build_email(msg, attachments) - s = smtplib.SMTP(server) - try: - print('Sending Email') - s.sendmail(msg['From'], msg['To'], msg.as_string()) - except Exception as e: - print(str(e)) - s.quit() - -def Build_email(msg, attachments): - for file in attachments: - part = MIMEBase('application', "octet-stream") - atach = open(file, "rb") - attch = atach.read() - noml = file.split("\\") - nom = noml[len(noml)-1] - part.set_payload(attch) - encoders.encode_base64(part) - part.add_header('Content-Disposition', 'attachment; filename="' + nom + '"') - msg.attach(part) - +import smtplib +from email.mime.image import MIMEImage +from email.mime.multipart import MIMEMultipart +from email.mime.text import MIMEText +from email.mime.base import MIMEBase +from email import encoders +import xml +from xml.dom.minidom import parse, parseString + +def send_email(to, server, subj, body, attachments): + """Send an email with the given information. + + Args: + to: a String, the email address to send the email to + server: a String, the mail server to send from + subj: a String, the subject line of the message + body: a String, the body of the message + attachments: a listof_pathto_File, the attachements to include + """ + msg = MIMEMultipart() + msg['Subject'] = subj + # me == the sender's email address + # family = the list of all recipients' email addresses + msg['From'] = 'AutopsyTest' + msg['To'] = to + msg.preamble = 'This is a test' + container = MIMEText(body, 'plain') + msg.attach(container) + Build_email(msg, attachments) + s = smtplib.SMTP(server) + try: + print('Sending Email') + s.sendmail(msg['From'], msg['To'], msg.as_string()) + except Exception as e: + print(str(e)) + s.quit() + +def Build_email(msg, attachments): + for file in attachments: + part = MIMEBase('application', "octet-stream") + atach = open(file, "rb") + attch = atach.read() + noml = file.split("\\") + nom = noml[len(noml)-1] + part.set_payload(attch) + encoders.encode_base64(part) + part.add_header('Content-Disposition', 'attachment; filename="' + nom + '"') + msg.attach(part) + diff --git a/test/script/regression.py b/test/script/regression.py index b2ad319963..6c640823ed 100644 --- a/test/script/regression.py +++ b/test/script/regression.py @@ -1,1854 +1,1854 @@ -#!/usr/bin/python -# -*- coding: utf_8 -*- - - # Autopsy Forensic Browser - # - # Copyright 2013 Basis Technology Corp. - # - # Licensed under the Apache License, Version 2.0 (the "License"); - # you may not use this file except in compliance with the License. - # You may obtain a copy of the License at - # - # http://www.apache.org/licenses/LICENSE-2.0 - # - # Unless required by applicable law or agreed to in writing, software - # distributed under the License is distributed on an "AS IS" BASIS, - # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - # See the License for the specific language governing permissions and - # limitations under the License. -from tskdbdiff import TskDbDiff, TskDbDiffException -import codecs -import datetime -import logging -import os -import re -import shutil -import socket -import sqlite3 -import subprocess -import sys -from sys import platform as _platform -import time -import traceback -import xml -from time import localtime, strftime -from xml.dom.minidom import parse, parseString -import smtplib -from email.mime.image import MIMEImage -from email.mime.multipart import MIMEMultipart -from email.mime.text import MIMEText -import re -import zipfile -import zlib -import Emailer -import srcupdater -from regression_utils import * - -# -# Please read me... -# -# This is the regression testing Python script. -# It uses an ant command to run build.xml for RegressionTest.java -# -# The code is cleanly sectioned and commented. -# Please follow the current formatting. -# It is a long and potentially confusing script. -# -# Variable, function, and class names are written in Python conventions: -# this_is_a_variable this_is_a_function() ThisIsAClass -# -# - - -# Data Definitions: -# -# pathto_X: A path to type X. -# ConfigFile: An XML file formatted according to the template in myconfig.xml -# ParsedConfig: A dom object that represents a ConfigFile -# SQLCursor: A cursor recieved from a connection to an SQL database -# Nat: A Natural Number -# Image: An image -# - -# Enumeration of database types used for the simplification of generating database paths -DBType = enum('OUTPUT', 'GOLD', 'BACKUP') - -# Common filename of the output and gold databases (although they are in different directories -DB_FILENAME = "autopsy.db" - -# Backup database filename -BACKUP_DB_FILENAME = "autopsy_backup.db" - -# TODO: Double check this purpose statement -# Folder name for gold standard database testing -AUTOPSY_TEST_CASE = "AutopsyTestCase" - -# TODO: Double check this purpose statement -# The filename of the log to store error messages -COMMON_LOG = "AutopsyErrors.txt" - -Day = 0 - -#----------------------# -# Main # -#----------------------# -def main(): - """Parse the command-line arguments, create the configuration, and run the tests.""" - args = Args() - parse_result = args.parse() - test_config = TestConfiguration(args) - # The arguments were given wrong: - if not parse_result: - return - if(not args.fr): - antin = ["ant"] - antin.append("-f") - antin.append(os.path.join("..","..","build.xml")) - antin.append("test-download-imgs") - if SYS is OS.CYGWIN: - subprocess.call(antin) - elif SYS is OS.WIN: - theproc = subprocess.Popen(antin, shell = True, stdout=subprocess.PIPE) - theproc.communicate() - # Otherwise test away! - TestRunner.run_tests(test_config) - - -class TestRunner(object): - """A collection of functions to run the regression tests.""" - - def run_tests(test_config): - """Run the tests specified by the main TestConfiguration. - - Executes the AutopsyIngest for each image and dispatches the results based on - the mode (rebuild or testing) - """ - test_data_list = [ TestData(image, test_config) for image in test_config.images ] - - Reports.html_add_images(test_config.html_log, test_config.images) - - logres =[] - for test_data in test_data_list: - Errors.clear_print_logs() - Errors.set_testing_phase(test_data.image) - if not (test_config.args.rebuild or os.path.exists(test_data.gold_archive)): - msg = "Gold standard doesn't exist, skipping image:" - Errors.print_error(msg) - Errors.print_error(test_data.gold_archive) - continue - TestRunner._run_autopsy_ingest(test_data) - - if test_config.args.rebuild: - TestRunner.rebuild(test_data) - else: - logres.append(TestRunner._run_test(test_data)) - test_data.printout = Errors.printout - test_data.printerror = Errors.printerror - - Reports.write_html_foot(test_config.html_log) - # TODO: move this elsewhere - if (len(logres)>0): - for lm in logres: - for ln in lm: - Errors.add_email_msg(ln) - - # TODO: possibly worth putting this in a sub method - if all([ test_data.overall_passed for test_data in test_data_list ]): - Errors.add_email_msg("All images passed.\n") - else: - msg = "The following images failed:\n" - for test_data in test_data_list: - if not test_data.overall_passed: - msg += "\t" + test_data.image + "\n" - Errors.add_email_msg(msg) - html = open(test_config.html_log) - Errors.add_email_attachment(html.name) - html.close() - - if test_config.email_enabled: - Emailer.send_email(test_config.mail_to, test_config.mail_server, - test_config.mail_subject, Errors.email_body, Errors.email_attachs) - - def _run_autopsy_ingest(test_data): - """Run Autopsy ingest for the image in the given TestData. - - Also generates the necessary logs for rebuilding or diff. - - Args: - test_data: the TestData to run the ingest on. - """ - if image_type(test_data.image_file) == IMGTYPE.UNKNOWN: - Errors.print_error("Error: Image type is unrecognized:") - Errors.print_error(test_data.image_file + "\n") - return - - logging.debug("--------------------") - logging.debug(test_data.image_name) - logging.debug("--------------------") - TestRunner._run_ant(test_data) - time.sleep(2) # Give everything a second to process - - try: - # Dump the database before we diff or use it for rebuild - TskDbDiff.dump_output_db(test_data.get_db_path(DBType.OUTPUT), test_data.get_db_dump_path(DBType.OUTPUT), - test_data.get_sorted_data_path(DBType.OUTPUT)) - except sqlite3.OperationalError as e: - print("Ingest did not run properly.", - "Make sure no other instances of Autopsy are open and try again.") - sys.exit() - - # merges logs into a single log for later diff / rebuild - copy_logs(test_data) - Logs.generate_log_data(test_data) - - TestRunner._handle_solr(test_data) - TestRunner._handle_exception(test_data) - - #TODO: figure out return type of _run_test (logres) - def _run_test(test_data): - """Compare the results of the output to the gold standard. - - Args: - test_data: the TestData - - Returns: - logres? - """ - TestRunner._extract_gold(test_data) - - # Look for core exceptions - # @@@ Should be moved to TestResultsDiffer, but it didn't know about logres -- need to look into that - logres = Logs.search_common_log("TskCoreException", test_data) - - TestResultsDiffer.run_diff(test_data) - test_data.overall_passed = (test_data.html_report_passed and - test_data.errors_diff_passed and test_data.db_diff_passed) - - Reports.generate_reports(test_data) - if(not test_data.overall_passed): - Errors.add_email_attachment(test_data.common_log_path) - return logres - - def _extract_gold(test_data): - """Extract gold archive file to output/gold/tmp/ - - Args: - test_data: the TestData - """ - extrctr = zipfile.ZipFile(test_data.gold_archive, 'r', compression=zipfile.ZIP_DEFLATED) - extrctr.extractall(test_data.main_config.gold) - extrctr.close - time.sleep(2) - - def _handle_solr(test_data): - """Clean up SOLR index if in keep mode (-k). - - Args: - test_data: the TestData - """ - if not test_data.main_config.args.keep: - if clear_dir(test_data.solr_index): - print_report([], "DELETE SOLR INDEX", "Solr index deleted.") - else: - print_report([], "KEEP SOLR INDEX", "Solr index has been kept.") - - def _handle_exception(test_data): - """If running in exception mode, print exceptions to log. - - Args: - test_data: the TestData - """ - if test_data.main_config.args.exception: - exceptions = search_logs(test_data.main_config.args.exception_string, test_data) - okay = ("No warnings or exceptions found containing text '" + - test_data.main_config.args.exception_string + "'.") - print_report(exceptions, "EXCEPTION", okay) - - def rebuild(test_data): - """Rebuild the gold standard with the given TestData. - - Copies the test-generated database and html report files into the gold directory. - """ - test_config = test_data.main_config - # Errors to print - errors = [] - # Delete the current gold standards - gold_dir = test_config.img_gold - clear_dir(test_config.img_gold) - tmpdir = make_path(gold_dir, test_data.image_name) - dbinpth = test_data.get_db_path(DBType.OUTPUT) - dboutpth = make_path(tmpdir, DB_FILENAME) - dataoutpth = make_path(tmpdir, test_data.image_name + "SortedData.txt") - dbdumpinpth = test_data.get_db_dump_path(DBType.OUTPUT) - dbdumpoutpth = make_path(tmpdir, test_data.image_name + "DBDump.txt") - if not os.path.exists(test_config.img_gold): - os.makedirs(test_config.img_gold) - if not os.path.exists(tmpdir): - os.makedirs(tmpdir) - try: - shutil.copy(dbinpth, dboutpth) - if file_exists(test_data.get_sorted_data_path(DBType.OUTPUT)): - shutil.copy(test_data.get_sorted_data_path(DBType.OUTPUT), dataoutpth) - shutil.copy(dbdumpinpth, dbdumpoutpth) - error_pth = make_path(tmpdir, test_data.image_name+"SortedErrors.txt") - shutil.copy(test_data.sorted_log, error_pth) - except IOError as e: - Errors.print_error(str(e)) - Errors.add_email_message("Not rebuilt properly") - print(str(e)) - print(traceback.format_exc()) - # Rebuild the HTML report - output_html_report_dir = test_data.get_html_report_path(DBType.OUTPUT) - gold_html_report_dir = make_path(tmpdir, "Report") - - try: - shutil.copytree(output_html_report_dir, gold_html_report_dir) - except OSError as e: - errors.append(e.error()) - except Exception as e: - errors.append("Error: Unknown fatal error when rebuilding the gold html report.") - errors.append(str(e) + "\n") - print(traceback.format_exc()) - oldcwd = os.getcwd() - zpdir = gold_dir - os.chdir(zpdir) - os.chdir("..") - img_gold = "tmp" - img_archive = make_path(test_data.image_name+"-archive.zip") - comprssr = zipfile.ZipFile(img_archive, 'w',compression=zipfile.ZIP_DEFLATED) - TestRunner.zipdir(img_gold, comprssr) - comprssr.close() - os.chdir(oldcwd) - del_dir(test_config.img_gold) - okay = "Sucessfully rebuilt all gold standards." - print_report(errors, "REBUILDING", okay) - - def zipdir(path, zip): - for root, dirs, files in os.walk(path): - for file in files: - zip.write(os.path.join(root, file)) - - def _run_ant(test_data): - """Construct and run the ant build command for the given TestData. - - Tests Autopsy by calling RegressionTest.java via the ant build file. - - Args: - test_data: the TestData - """ - test_config = test_data.main_config - # Set up the directories - if dir_exists(test_data.output_path): - shutil.rmtree(test_data.output_path) - os.makedirs(test_data.output_path) - test_data.ant = ["ant"] - test_data.ant.append("-v") - test_data.ant.append("-f") - # case.ant.append(case.build_path) - test_data.ant.append(os.path.join("..","..","Testing","build.xml")) - test_data.ant.append("regression-test") - test_data.ant.append("-l") - test_data.ant.append(test_data.antlog_dir) - test_data.ant.append("-Dimg_path=" + test_data.image_file) - test_data.ant.append("-Dknown_bad_path=" + test_config.known_bad_path) - test_data.ant.append("-Dkeyword_path=" + test_config.keyword_path) - test_data.ant.append("-Dnsrl_path=" + test_config.nsrl_path) - test_data.ant.append("-Dgold_path=" + test_config.gold) - test_data.ant.append("-Dout_path=" + - make_local_path(test_data.output_path)) - test_data.ant.append("-Dignore_unalloc=" + "%s" % test_config.args.unallocated) - test_data.ant.append("-Dtest.timeout=" + str(test_config.timeout)) - - Errors.print_out("Ingesting Image:\n" + test_data.image_file + "\n") - Errors.print_out("CMD: " + " ".join(test_data.ant)) - Errors.print_out("Starting test...\n") - antoutpth = make_local_path(test_data.main_config.output_dir, "antRunOutput.txt") - antout = open(antoutpth, "a") - if SYS is OS.CYGWIN: - subprocess.call(test_data.ant, stdout=subprocess.PIPE) - elif SYS is OS.WIN: - theproc = subprocess.Popen(test_data.ant, shell = True, stdout=subprocess.PIPE) - theproc.communicate() - antout.close() - - -class TestData(object): - """Container for the input and output of a single image. - - Represents data for the test of a single image, including path to the image, - database paths, etc. - - Attributes: - main_config: the global TestConfiguration - ant: a listof_String, the ant command for this TestData - image_file: a pathto_Image, the image for this TestData - image: a String, the image file's name - image_name: a String, the image file's name with a trailing (0) - output_path: pathto_Dir, the output directory for this TestData - autopsy_data_file: a pathto_File, the IMAGE_NAMEAutopsy_data.txt file - warning_log: a pathto_File, the AutopsyLogs.txt file - antlog_dir: a pathto_File, the antlog.txt file - test_dbdump: a pathto_File, the database dump, IMAGENAMEDump.txt - common_log_path: a pathto_File, the IMAGE_NAMECOMMON_LOG file - sorted_log: a pathto_File, the IMAGENAMESortedErrors.txt file - reports_dir: a pathto_Dir, the AutopsyTestCase/Reports folder - gold_data_dir: a pathto_Dir, the gold standard directory - gold_archive: a pathto_File, the gold standard archive - logs_dir: a pathto_Dir, the location where autopsy logs are stored - solr_index: a pathto_Dir, the locatino of the solr index - html_report_passed: a boolean, did the HTML report diff pass? - errors_diff_passed: a boolean, did the error diff pass? - db_diff_passed: a boolean, did the db diff pass? - overall_passed: a boolean, did the test pass? - total_test_time: a String representation of the test duration - start_date: a String representation of this TestData's start date - end_date: a String representation of the TestData's end date - total_ingest_time: a String representation of the total ingest time - artifact_count: a Nat, the number of artifacts - artifact_fail: a Nat, the number of artifact failures - heap_space: a String representation of TODO - service_times: a String representation of TODO - autopsy_version: a String, the version of autopsy that was run - ingest_messages: a Nat, the number of ingest messages - indexed_files: a Nat, the number of files indexed during the ingest - indexed_chunks: a Nat, the number of chunks indexed during the ingest - printerror: a listof_String, the error messages printed during this TestData's test - printout: a listof_String, the messages pritned during this TestData's test - """ - - def __init__(self, image, main_config): - """Init this TestData with it's image and the test configuration. - - Args: - image: the Image to be tested. - main_config: the global TestConfiguration. - """ - # Configuration Data - self.main_config = main_config - self.ant = [] - self.image_file = str(image) - # TODO: This 0 should be be refactored out, but it will require rebuilding and changing of outputs. - self.image = get_image_name(self.image_file) - self.image_name = self.image + "(0)" - # Directory structure and files - self.output_path = make_path(self.main_config.output_dir, self.image_name) - self.autopsy_data_file = make_path(self.output_path, self.image_name + "Autopsy_data.txt") - self.warning_log = make_local_path(self.output_path, "AutopsyLogs.txt") - self.antlog_dir = make_local_path(self.output_path, "antlog.txt") - self.test_dbdump = make_path(self.output_path, self.image_name + - "DBDump.txt") - self.common_log_path = make_local_path(self.output_path, self.image_name + COMMON_LOG) - self.sorted_log = make_local_path(self.output_path, self.image_name + "SortedErrors.txt") - self.reports_dir = make_path(self.output_path, AUTOPSY_TEST_CASE, "Reports") - self.gold_data_dir = make_path(self.main_config.img_gold, self.image_name) - self.gold_archive = make_path(self.main_config.gold, - self.image_name + "-archive.zip") - self.logs_dir = make_path(self.output_path, "logs") - self.solr_index = make_path(self.output_path, AUTOPSY_TEST_CASE, - "ModuleOutput", "KeywordSearch") - # Results and Info - self.html_report_passed = False - self.errors_diff_passed = False - self.db_diff_passed = False - self.overall_passed = False - # Ingest info - self.total_test_time = "" - self.start_date = "" - self.end_date = "" - self.total_ingest_time = "" - self.artifact_count = 0 - self.artifact_fail = 0 - self.heap_space = "" - self.service_times = "" - self.autopsy_version = "" - self.ingest_messages = 0 - self.indexed_files = 0 - self.indexed_chunks = 0 - # Error tracking - self.printerror = [] - self.printout = [] - - def ant_to_string(self): - string = "" - for arg in self.ant: - string += (arg + " ") - return string - - def get_db_path(self, db_type): - """Get the path to the database file that corresponds to the given DBType. - - Args: - DBType: the DBType of the path to be generated. - """ - if(db_type == DBType.GOLD): - db_path = make_path(self.gold_data_dir, DB_FILENAME) - elif(db_type == DBType.OUTPUT): - db_path = make_path(self.main_config.output_dir, self.image_name, AUTOPSY_TEST_CASE, DB_FILENAME) - else: - db_path = make_path(self.main_config.output_dir, self.image_name, AUTOPSY_TEST_CASE, BACKUP_DB_FILENAME) - return db_path - - def get_html_report_path(self, html_type): - """Get the path to the HTML Report folder that corresponds to the given DBType. - - Args: - DBType: the DBType of the path to be generated. - """ - if(html_type == DBType.GOLD): - return make_path(self.gold_data_dir, "Report") - else: - # Autopsy creates an HTML report folder in the form AutopsyTestCase DATE-TIME - # It's impossible to get the exact time the folder was created, but the folder - # we are looking for is the only one in the self.reports_dir folder - html_path = "" - for fs in os.listdir(self.reports_dir): - html_path = make_path(self.reports_dir, fs) - if os.path.isdir(html_path): - break - return make_path(html_path, os.listdir(html_path)[0]) - - def get_sorted_data_path(self, file_type): - """Get the path to the SortedData file that corresponds to the given DBType. - - Args: - file_type: the DBType of the path to be generated - """ - return self._get_path_to_file(file_type, "SortedData.txt") - - def get_sorted_errors_path(self, file_type): - """Get the path to the SortedErrors file that correspodns to the given - DBType. - - Args: - file_type: the DBType of the path to be generated - """ - return self._get_path_to_file(file_type, "SortedErrors.txt") - - def get_db_dump_path(self, file_type): - """Get the path to the DBDump file that corresponds to the given DBType. - - Args: - file_type: the DBType of the path to be generated - """ - return self._get_path_to_file(file_type, "DBDump.txt") - - def _get_path_to_file(self, file_type, file_name): - """Get the path to the specified file with the specified type. - - Args: - file_type: the DBType of the path to be generated - file_name: a String, the filename of the path to be generated - """ - full_filename = self.image_name + file_name - if(file_type == DBType.GOLD): - return make_path(self.gold_data_dir, full_filename) - else: - return make_path(self.output_path, full_filename) - - -class TestConfiguration(object): - """Container for test configuration data. - - The Master Test Configuration. Encapsulates consolidated high level input from - config XML file and command-line arguments. - - Attributes: - args: an Args, the command line arguments - output_dir: a pathto_Dir, the output directory - input_dir: a pathto_Dir, the input directory - gold: a pathto_Dir, the gold directory - img_gold: a pathto_Dir, the temp directory where gold images are unzipped to - csv: a pathto_File, the local csv file - global_csv: a pathto_File, the global csv file - html_log: a pathto_File - known_bad_path: - keyword_path: - nsrl_path: - build_path: a pathto_File, the ant build file which runs the tests - autopsy_version: - ingest_messages: a Nat, number of ingest messages - indexed_files: a Nat, the number of indexed files - indexed_chunks: a Nat, the number of indexed chunks - timer: - images: a listof_Image, the images to be tested - timeout: a Nat, the amount of time before killing the test - ant: a listof_String, the ant command to run the tests - """ - - def __init__(self, args): - """Inits TestConfiguration and loads a config file if available. - - Args: - args: an Args, the command line arguments. - """ - self.args = args - # Paths: - self.output_dir = "" - self.input_dir = make_local_path("..","input") - self.gold = make_path("..", "output", "gold") - self.img_gold = make_path(self.gold, 'tmp') - # Logs: - self.csv = "" - self.global_csv = "" - self.html_log = "" - # Ant info: - self.known_bad_path = make_path(self.input_dir, "notablehashes.txt-md5.idx") - self.keyword_path = make_path(self.input_dir, "notablekeywords.xml") - self.nsrl_path = make_path(self.input_dir, "nsrl.txt-md5.idx") - self.build_path = make_path("..", "build.xml") - # Infinite Testing info - timer = 0 - self.images = [] - # Email info - self.email_enabled = args.email_enabled - self.mail_server = "" - self.mail_to = "" - self.mail_subject = "" - # Set the timeout to something huge - # The entire tester should not timeout before this number in ms - # However it only seems to take about half this time - # And it's very buggy, so we're being careful - self.timeout = 24 * 60 * 60 * 1000 * 1000 - - if not self.args.single: - self._load_config_file(self.args.config_file) - else: - self.images.append(self.args.single_file) - self._init_logs() - #self._init_imgs() - #self._init_build_info() - - - def _load_config_file(self, config_file): - """Updates this TestConfiguration's attributes from the config file. - - Initializes this TestConfiguration by iterating through the XML config file - command-line argument. Populates self.images and optional email configuration - - Args: - config_file: ConfigFile - the configuration file to load - """ - try: - count = 0 - parsed_config = parse(config_file) - logres = [] - counts = {} - if parsed_config.getElementsByTagName("indir"): - self.input_dir = parsed_config.getElementsByTagName("indir")[0].getAttribute("value").encode().decode("utf_8") - if parsed_config.getElementsByTagName("global_csv"): - self.global_csv = parsed_config.getElementsByTagName("global_csv")[0].getAttribute("value").encode().decode("utf_8") - self.global_csv = make_local_path(self.global_csv) - if parsed_config.getElementsByTagName("golddir"): - self.gold = parsed_config.getElementsByTagName("golddir")[0].getAttribute("value").encode().decode("utf_8") - self.img_gold = make_path(self.gold, 'tmp') - - self._init_imgs(parsed_config) - self._init_build_info(parsed_config) - self._init_email_info(parsed_config) - - except IOError as e: - msg = "There was an error loading the configuration file.\n" - msg += "\t" + str(e) - Errors.add_email_msg(msg) - logging.critical(traceback.format_exc()) - print(traceback.format_exc()) - - def _init_logs(self): - """Setup output folder, logs, and reporting infrastructure.""" - if(not dir_exists(make_path("..", "output", "results"))): - os.makedirs(make_path("..", "output", "results",)) - self.output_dir = make_path("..", "output", "results", time.strftime("%Y.%m.%d-%H.%M.%S")) - os.makedirs(self.output_dir) - self.csv = make_local_path(self.output_dir, "CSV.txt") - self.html_log = make_path(self.output_dir, "AutopsyTestCase.html") - log_name = self.output_dir + "\\regression.log" - logging.basicConfig(filename=log_name, level=logging.DEBUG) - - def _init_build_info(self, parsed_config): - """Initializes paths that point to information necessary to run the AutopsyIngest.""" - build_elements = parsed_config.getElementsByTagName("build") - if build_elements: - build_element = build_elements[0] - build_path = build_element.getAttribute("value").encode().decode("utf_8") - self.build_path = build_path - - def _init_imgs(self, parsed_config): - """Initialize the list of images to run tests on.""" - for element in parsed_config.getElementsByTagName("image"): - value = element.getAttribute("value").encode().decode("utf_8") - print ("Image in Config File: " + value) - if file_exists(value): - self.images.append(value) - else: - msg = "File: " + value + " doesn't exist" - Errors.print_error(msg) - Errors.add_email_msg(msg) - image_count = len(self.images) - - # Sanity check to see if there are obvious gold images that we are not testing - gold_count = 0 - for file in os.listdir(self.gold): - if not(file == 'tmp'): - gold_count+=1 - - if (image_count > gold_count): - print("******Alert: There are more input images than gold standards, some images will not be properly tested.\n") - elif (image_count < gold_count): - print("******Alert: There are more gold standards than input images, this will not check all gold Standards.\n") - - def _init_email_info(self, parsed_config): - """Initializes email information dictionary""" - email_elements = parsed_config.getElementsByTagName("email") - if email_elements: - mail_to = email_elements[0] - self.mail_to = mail_to.getAttribute("value").encode().decode("utf_8") - mail_server_elements = parsed_config.getElementsByTagName("mail_server") - if mail_server_elements: - mail_from = mail_server_elements[0] - self.mail_server = mail_from.getAttribute("value").encode().decode("utf_8") - subject_elements = parsed_config.getElementsByTagName("subject") - if subject_elements: - subject = subject_elements[0] - self.mail_subject = subject.getAttribute("value").encode().decode("utf_8") - if self.mail_server and self.mail_to and self.args.email_enabled: - self.email_enabled = True - print("Email will be sent to ", self.mail_to) - else: - print("No email will be sent.") - - -#-------------------------------------------------# -# Functions relating to comparing outputs # -#-------------------------------------------------# -class TestResultsDiffer(object): - """Compares results for a single test.""" - - def run_diff(test_data): - """Compares results for a single test. - - Args: - test_data: the TestData to use. - databaseDiff: TskDbDiff object created based off test_data - """ - try: - output_db = test_data.get_db_path(DBType.OUTPUT) - gold_db = test_data.get_db_path(DBType.GOLD) - output_dir = test_data.output_path - gold_bb_dump = test_data.get_sorted_data_path(DBType.GOLD) - gold_dump = test_data.get_db_dump_path(DBType.GOLD) - test_data.db_diff_pass = all(TskDbDiff(output_db, gold_db, output_dir=output_dir, gold_bb_dump=gold_bb_dump, - gold_dump=gold_dump).run_diff()) - - # Compare Exceptions - # replace is a fucntion that replaces strings of digits with 'd' - # this is needed so dates and times will not cause the diff to fail - replace = lambda file: re.sub(re.compile("\d"), "d", file) - output_errors = test_data.get_sorted_errors_path(DBType.OUTPUT) - gold_errors = test_data.get_sorted_errors_path(DBType.GOLD) - passed = TestResultsDiffer._compare_text(output_errors, gold_errors, - replace) - test_data.errors_diff_passed = passed - - # Compare html output - gold_report_path = test_data.get_html_report_path(DBType.GOLD) - output_report_path = test_data.get_html_report_path(DBType.OUTPUT) - passed = TestResultsDiffer._html_report_diff(gold_report_path, - output_report_path) - test_data.html_report_passed = passed - - # Clean up tmp folder - del_dir(test_data.gold_data_dir) - - except sqlite3.OperationalError as e: - Errors.print_error("Tests failed while running the diff:\n") - Errors.print_error(str(e)) - except TskDbDiffException as e: - Errors.print_error(str(e)) - except Exception as e: - Errors.print_error("Tests failed due to an error, try rebuilding or creating gold standards.\n") - Errors.print_error(str(e) + "\n") - print(traceback.format_exc()) - - def _compare_text(output_file, gold_file, process=None): - """Compare two text files. - - Args: - output_file: a pathto_File, the output text file - gold_file: a pathto_File, the input text file - pre-process: (optional) a function of String -> String that will be - called on each input file before the diff, if specified. - """ - if(not file_exists(output_file)): - return False - output_data = codecs.open(output_file, "r", "utf_8").read() - gold_data = codecs.open(gold_file, "r", "utf_8").read() - - if process is not None: - output_data = process(output_data) - gold_data = process(gold_data) - - if (not(gold_data == output_data)): - diff_path = os.path.splitext(os.path.basename(output_file))[0] - diff_path += "-Diff.txt" - diff_file = codecs.open(diff_path, "wb", "utf_8") - dffcmdlst = ["diff", output_file, gold_file] - subprocess.call(dffcmdlst, stdout = diff_file) - Errors.add_email_attachment(diff_path) - msg = "There was a difference in " - msg += os.path.basename(output_file) + ".\n" - Errors.add_email_msg(msg) - Errors.print_error(msg) - return False - else: - return True - - def _html_report_diff(gold_report_path, output_report_path): - """Compare the output and gold html reports. - - Args: - gold_report_path: a pathto_Dir, the gold HTML report directory - output_report_path: a pathto_Dir, the output HTML report directory - - Returns: - true, if the reports match, false otherwise. - """ - try: - gold_html_files = get_files_by_ext(gold_report_path, ".html") - output_html_files = get_files_by_ext(output_report_path, ".html") - - #ensure both reports have the same number of files and are in the same order - if(len(gold_html_files) != len(output_html_files)): - msg = "The reports did not have the same number or files." - msg += "One of the reports may have been corrupted." - Errors.print_error(msg) - else: - gold_html_files.sort() - output_html_files.sort() - - total = {"Gold": 0, "New": 0} - for gold, output in zip(gold_html_files, output_html_files): - count = TestResultsDiffer._compare_report_files(gold, output) - total["Gold"] += count[0] - total["New"] += count[1] - - okay = "The test report matches the gold report." - errors=["Gold report had " + str(total["Gold"]) +" errors", "New report had " + str(total["New"]) + " errors."] - print_report(errors, "REPORT COMPARISON", okay) - - if total["Gold"] == total["New"]: - return True - else: - Errors.print_error("The reports did not match each other.\n " + errors[0] +" and the " + errors[1]) - return False - except OSError as e: - e.print_error() - return False - except Exception as e: - Errors.print_error("Error: Unknown fatal error comparing reports.") - Errors.print_error(str(e) + "\n") - logging.critical(traceback.format_exc()) - return False - - def _compare_report_files(a_path, b_path): - """Compares the two specified report html files. - - Args: - a_path: a pathto_File, the first html report file - b_path: a pathto_File, the second html report file - - Returns: - a tuple of (Nat, Nat), which represent the length of each - unordered list in the html report files, or (0, 0) if the - lenghts are the same. - """ - a_file = open(a_path) - b_file = open(b_path) - a = a_file.read() - b = b_file.read() - a = a[a.find("
    "):] - b = b[b.find("
      "):] - - a_list = TestResultsDiffer._split(a, 50) - b_list = TestResultsDiffer._split(b, 50) - if not len(a_list) == len(b_list): - ex = (len(a_list), len(b_list)) - return ex - else: - return (0, 0) - - # Split a string into an array of string of the given size - def _split(input, size): - return [input[start:start+size] for start in range(0, len(input), size)] - - -class Reports(object): - def generate_reports(test_data): - """Generate the reports for a single test - - Args: - test_data: the TestData - """ - Reports._generate_html(test_data) - if test_data.main_config.global_csv: - Reports._generate_csv(test_data.main_config.global_csv, test_data) - else: - Reports._generate_csv(test_data.main_config.csv, test_data) - - def _generate_html(test_data): - """Generate the HTML log file.""" - # If the file doesn't exist yet, this is the first test_config to run for - # this test, so we need to make the start of the html log - html_log = test_data.main_config.html_log - if not file_exists(html_log): - Reports.write_html_head() - with open(html_log, "a") as html: - # The image title - title = "

      " + test_data.image_name + " \ - tested on " + socket.gethostname() + "

      \ -

      \ - Errors and Warnings |\ - Information |\ - General Output |\ - Logs\ -

      " - # The script errors found - if not test_data.overall_passed: - ids = 'errors1' - else: - ids = 'errors' - errors = "
      \ -

      Errors and Warnings

      \ -
      " - # For each error we have logged in the test_config - for error in test_data.printerror: - # Replace < and > to avoid any html display errors - errors += "

      " + error.replace("<", "<").replace(">", ">") + "

      " - # If there is a \n, we probably want a
      in the html - if "\n" in error: - errors += "
      " - errors += "
      " - - # Links to the logs - logs = "
      \ -

      Logs

      \ -
      " - logs_path = test_data.logs_dir - for file in os.listdir(logs_path): - logs += "

      " + file + "

      " - logs += "
      " - - # All the testing information - info = "
      \ -

      Information

      \ -
      \ -
").append(columnHeader).append("
" - # The individual elements - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" -# info += "" -# info += "" -# info += "" -# info += "" -# info += "" -# info += "" - info += "
Image Path:" + test_data.image_file + "
Image Name:" + test_data.image_name + "
test_config Output Directory:" + test_data.main_config.output_dir + "
Autopsy Version:" + test_data.autopsy_version + "
Heap Space:" + test_data.heap_space + "
Test Start Date:" + test_data.start_date + "
Test End Date:" + test_data.end_date + "
Total Test Time:" + test_data.total_test_time + "
Total Ingest Time:" + test_data.total_ingest_time + "
Exceptions Count:" + str(len(get_exceptions(test_data))) + "
Autopsy OutOfMemoryExceptions:" + str(len(search_logs("OutOfMemoryException", test_data))) + "
Autopsy OutOfMemoryErrors:" + str(len(search_logs("OutOfMemoryError", test_data))) + "
Tika OutOfMemoryErrors/Exceptions:" + str(Reports._get_num_memory_errors("tika", test_data)) + "
Solr OutOfMemoryErrors/Exceptions:" + str(Reports._get_num_memory_errors("solr", test_data)) + "
TskCoreExceptions:" + str(len(search_log_set("autopsy", "TskCoreException", test_data))) + "
TskDataExceptions:" + str(len(search_log_set("autopsy", "TskDataException", test_data))) + "
Ingest Messages Count:" + str(test_data.ingest_messages) + "
Indexed Files Count:" + str(test_data.indexed_files) + "
Indexed File Chunks Count:" + str(test_data.indexed_chunks) + "
Out Of Disk Space:\ -

(will skew other test results)

" + str(len(search_log_set("autopsy", "Stopping ingest due to low disk space on disk", test_data))) + "
TSK Objects Count:" + str(test_data.db_diff_results.output_objs) + "
Artifacts Count:" + str(test_data.db_diff_results.output_artifacts)+ "
Attributes Count:" + str(test_data.db_diff_results.output_attrs) + "
\ -
" - # For all the general print statements in the test_config - output = "
\ -

General Output

\ -
" - # For each printout in the test_config's list - for out in test_data.printout: - output += "

" + out + "

" - # If there was a \n it probably means we want a
in the html - if "\n" in out: - output += "
" - output += "
" - - html.write(title) - html.write(errors) - html.write(info) - html.write(logs) - html.write(output) - - def write_html_head(html_log): - """Write the top of the HTML log file. - - Args: - html_log: a pathto_File, the global HTML log - """ - with open(str(html_log), "a") as html: - head = "\ - \ - AutopsyTesttest_config Output\ - \ - \ - " - html.write(head) - - def write_html_foot(html_log): - """Write the bottom of the HTML log file. - - Args: - html_log: a pathto_File, the global HTML log - """ - with open(html_log, "a") as html: - head = "" - html.write(head) - - def html_add_images(html_log, full_image_names): - """Add all the image names to the HTML log. - - Args: - full_image_names: a listof_String, each representing an image name - html_log: a pathto_File, the global HTML log - """ - # If the file doesn't exist yet, this is the first test_config to run for - # this test, so we need to make the start of the html log - if not file_exists(html_log): - Reports.write_html_head(html_log) - with open(html_log, "a") as html: - links = [] - for full_name in full_image_names: - name = get_image_name(full_name) - links.append("" + name + "") - html.write("

" + (" | ".join(links)) + "

") - - def _generate_csv(csv_path, test_data): - """Generate the CSV log file""" - # If the CSV file hasn't already been generated, this is the - # first run, and we need to add the column names - if not file_exists(csv_path): - Reports.csv_header(csv_path) - # Now add on the fields to a new row - with open(csv_path, "a") as csv: - # Variables that need to be written - vars = [] - vars.append( test_data.image_file ) - vars.append( test_data.image_name ) - vars.append( test_data.main_config.output_dir ) - vars.append( socket.gethostname() ) - vars.append( test_data.autopsy_version ) - vars.append( test_data.heap_space ) - vars.append( test_data.start_date ) - vars.append( test_data.end_date ) - vars.append( test_data.total_test_time ) - vars.append( test_data.total_ingest_time ) - vars.append( test_data.service_times ) - vars.append( str(len(get_exceptions(test_data))) ) - vars.append( str(Reports._get_num_memory_errors("autopsy", test_data)) ) - vars.append( str(Reports._get_num_memory_errors("tika", test_data)) ) - vars.append( str(Reports._get_num_memory_errors("solr", test_data)) ) - vars.append( str(len(search_log_set("autopsy", "TskCoreException", test_data))) ) - vars.append( str(len(search_log_set("autopsy", "TskDataException", test_data))) ) - vars.append( str(test_data.ingest_messages) ) - vars.append( str(test_data.indexed_files) ) - vars.append( str(test_data.indexed_chunks) ) - vars.append( str(len(search_log_set("autopsy", "Stopping ingest due to low disk space on disk", test_data))) ) -# vars.append( str(test_data.db_diff_results.output_objs) ) -# vars.append( str(test_data.db_diff_results.output_artifacts) ) -# vars.append( str(test_data.db_diff_results.output_objs) ) - vars.append( make_local_path("gold", test_data.image_name, DB_FILENAME) ) -# vars.append( test_data.db_diff_results.get_artifact_comparison() ) -# vars.append( test_data.db_diff_results.get_attribute_comparison() ) - vars.append( make_local_path("gold", test_data.image_name, "standard.html") ) - vars.append( str(test_data.html_report_passed) ) - vars.append( test_data.ant_to_string() ) - # Join it together with a ", " - output = "|".join(vars) - output += "\n" - # Write to the log! - csv.write(output) - - def csv_header(csv_path): - """Generate the CSV column names.""" - with open(csv_path, "w") as csv: - titles = [] - titles.append("Image Path") - titles.append("Image Name") - titles.append("Output test_config Directory") - titles.append("Host Name") - titles.append("Autopsy Version") - titles.append("Heap Space Setting") - titles.append("Test Start Date") - titles.append("Test End Date") - titles.append("Total Test Time") - titles.append("Total Ingest Time") - titles.append("Service Times") - titles.append("Autopsy Exceptions") - titles.append("Autopsy OutOfMemoryErrors/Exceptions") - titles.append("Tika OutOfMemoryErrors/Exceptions") - titles.append("Solr OutOfMemoryErrors/Exceptions") - titles.append("TskCoreExceptions") - titles.append("TskDataExceptions") - titles.append("Ingest Messages Count") - titles.append("Indexed Files Count") - titles.append("Indexed File Chunks Count") - titles.append("Out Of Disk Space") -# titles.append("Tsk Objects Count") -# titles.append("Artifacts Count") -# titles.append("Attributes Count") - titles.append("Gold Database Name") -# titles.append("Artifacts Comparison") -# titles.append("Attributes Comparison") - titles.append("Gold Report Name") - titles.append("Report Comparison") - titles.append("Ant Command Line") - output = "|".join(titles) - output += "\n" - csv.write(output) - - def _get_num_memory_errors(type, test_data): - """Get the number of OutOfMemory errors and Exceptions. - - Args: - type: a String representing the type of log to check. - test_data: the TestData to examine. - """ - return (len(search_log_set(type, "OutOfMemoryError", test_data)) + - len(search_log_set(type, "OutOfMemoryException", test_data))) - -class Logs(object): - - def generate_log_data(test_data): - """Find and handle relevent data from the Autopsy logs. - - Args: - test_data: the TestData whose logs to examine - """ - Logs._generate_common_log(test_data) - try: - Logs._fill_ingest_data(test_data) - except Exception as e: - Errors.print_error("Error: Unknown fatal error when filling test_config data.") - Errors.print_error(str(e) + "\n") - logging.critical(traceback.format_exc()) - # If running in verbose mode (-v) - if test_data.main_config.args.verbose: - errors = Logs._report_all_errors() - okay = "No warnings or errors in any log files." - print_report(errors, "VERBOSE", okay) - - def _generate_common_log(test_data): - """Generate the common log, the log of all exceptions and warnings from - each log file generated by Autopsy. - - Args: - test_data: the TestData to generate a log for - """ - try: - logs_path = test_data.logs_dir - common_log = codecs.open(test_data.common_log_path, "w", "utf_8") - warning_log = codecs.open(test_data.warning_log, "w", "utf_8") - common_log.write("--------------------------------------------------\n") - common_log.write(test_data.image_name + "\n") - common_log.write("--------------------------------------------------\n") - rep_path = make_local_path(test_data.main_config.output_dir) - rep_path = rep_path.replace("\\\\", "\\") - for file in os.listdir(logs_path): - log = codecs.open(make_path(logs_path, file), "r", "utf_8") - for line in log: - line = line.replace(rep_path, "test_data") - if line.startswith("Exception"): - common_log.write(file +": " + line) - elif line.startswith("Error"): - common_log.write(file +": " + line) - elif line.startswith("SEVERE"): - common_log.write(file +":" + line) - else: - warning_log.write(file +": " + line) - log.close() - common_log.write("\n") - common_log.close() - print(test_data.sorted_log) - srtcmdlst = ["sort", test_data.common_log_path, "-o", test_data.sorted_log] - subprocess.call(srtcmdlst) - except (OSError, IOError) as e: - Errors.print_error("Error: Unable to generate the common log.") - Errors.print_error(str(e) + "\n") - Errors.print_error(traceback.format_exc()) - logging.critical(traceback.format_exc()) - - def _fill_ingest_data(test_data): - """Fill the TestDatas variables that require the log files. - - Args: - test_data: the TestData to modify - """ - try: - # Open autopsy.log.0 - log_path = make_path(test_data.logs_dir, "autopsy.log.0") - log = open(log_path) - - # Set the TestData start time based off the first line of autopsy.log.0 - # *** If logging time format ever changes this will break *** - test_data.start_date = log.readline().split(" org.")[0] - - # Set the test_data ending time based off the "create" time (when the file was copied) - test_data.end_date = time.ctime(os.path.getmtime(log_path)) - except IOError as e: - Errors.print_error("Error: Unable to open autopsy.log.0.") - Errors.print_error(str(e) + "\n") - logging.warning(traceback.format_exc()) - # Start date must look like: "Jul 16, 2012 12:57:53 PM" - # End date must look like: "Mon Jul 16 13:02:42 2012" - # *** If logging time format ever changes this will break *** - start = datetime.datetime.strptime(test_data.start_date, "%b %d, %Y %I:%M:%S %p") - end = datetime.datetime.strptime(test_data.end_date, "%a %b %d %H:%M:%S %Y") - test_data.total_test_time = str(end - start) - - try: - # Set Autopsy version, heap space, ingest time, and service times - - version_line = search_logs("INFO: Application name: Autopsy, version:", test_data)[0] - test_data.autopsy_version = get_word_at(version_line, 5).rstrip(",") - - test_data.heap_space = search_logs("Heap memory usage:", test_data)[0].rstrip().split(": ")[1] - - ingest_line = search_logs("Ingest (including enqueue)", test_data)[0] - test_data.total_ingest_time = get_word_at(ingest_line, 6).rstrip() - - message_line = search_log_set("autopsy", "Ingest messages count:", test_data)[0] - test_data.ingest_messages = int(message_line.rstrip().split(": ")[2]) - - files_line = search_log_set("autopsy", "Indexed files count:", test_data)[0] - test_data.indexed_files = int(files_line.rstrip().split(": ")[2]) - - chunks_line = search_log_set("autopsy", "Indexed file chunks count:", test_data)[0] - test_data.indexed_chunks = int(chunks_line.rstrip().split(": ")[2]) - except (OSError, IOError) as e: - Errors.print_error("Error: Unable to find the required information to fill test_config data.") - Errors.print_error(str(e) + "\n") - logging.critical(traceback.format_exc()) - print(traceback.format_exc()) - try: - service_lines = search_log("autopsy.log.0", "to process()", test_data) - service_list = [] - for line in service_lines: - words = line.split(" ") - # Kind of forcing our way into getting this data - # If this format changes, the tester will break - i = words.index("secs.") - times = words[i-4] + " " - times += words[i-3] + " " - times += words[i-2] + " " - times += words[i-1] + " " - times += words[i] - service_list.append(times) - test_data.service_times = "; ".join(service_list) - except (OSError, IOError) as e: - Errors.print_error("Error: Unknown fatal error when finding service times.") - Errors.print_error(str(e) + "\n") - logging.critical(traceback.format_exc()) - - def _report_all_errors(): - """Generate a list of all the errors found in the common log. - - Returns: - a listof_String, the errors found in the common log - """ - try: - return get_warnings() + get_exceptions() - except (OSError, IOError) as e: - Errors.print_error("Error: Unknown fatal error when reporting all errors.") - Errors.print_error(str(e) + "\n") - logging.warning(traceback.format_exc()) - - def search_common_log(string, test_data): - """Search the common log for any instances of a given string. - - Args: - string: the String to search for. - test_data: the TestData that holds the log to search. - - Returns: - a listof_String, all the lines that the string is found on - """ - results = [] - log = codecs.open(test_data.common_log_path, "r", "utf_8") - for line in log: - if string in line: - results.append(line) - log.close() - return results - - -def print_report(errors, name, okay): - """Print a report with the specified information. - - Args: - errors: a listof_String, the errors to report. - name: a String, the name of the report. - okay: the String to print when there are no errors. - """ - if errors: - Errors.print_error("--------< " + name + " >----------") - for error in errors: - Errors.print_error(str(error)) - Errors.print_error("--------< / " + name + " >--------\n") - else: - Errors.print_out("-----------------------------------------------------------------") - Errors.print_out("< " + name + " - " + okay + " />") - Errors.print_out("-----------------------------------------------------------------\n") - - -def get_exceptions(test_data): - """Get a list of the exceptions in the autopsy logs. - - Args: - test_data: the TestData to use to find the exceptions. - Returns: - a listof_String, the exceptions found in the logs. - """ - exceptions = [] - logs_path = test_data.logs_dir - results = [] - for file in os.listdir(logs_path): - if "autopsy.log" in file: - log = codecs.open(make_path(logs_path, file), "r", "utf_8") - ex = re.compile("\SException") - er = re.compile("\SError") - for line in log: - if ex.search(line) or er.search(line): - exceptions.append(line) - log.close() - return exceptions - -def get_warnings(test_data): - """Get a list of the warnings listed in the common log. - - Args: - test_data: the TestData to use to find the warnings - - Returns: - listof_String, the warnings found. - """ - warnings = [] - common_log = codecs.open(test_data.warning_log, "r", "utf_8") - for line in common_log: - if "warning" in line.lower(): - warnings.append(line) - common_log.close() - return warnings - -def copy_logs(test_data): - """Copy the Autopsy generated logs to output directory. - - Args: - test_data: the TestData whose logs will be copied - """ - try: - log_dir = os.path.join("..", "..", "Testing","build","test","qa-functional","work","userdir0","var","log") - shutil.copytree(log_dir, test_data.logs_dir) - except OSError as e: - printerror(test_data,"Error: Failed to copy the logs.") - printerror(test_data,str(e) + "\n") - logging.warning(traceback.format_exc()) - -def setDay(): - global Day - Day = int(strftime("%d", localtime())) - -def getLastDay(): - return Day - -def getDay(): - return int(strftime("%d", localtime())) - -def newDay(): - return getLastDay() != getDay() - -#------------------------------------------------------------# -# Exception classes to manage "acceptable" thrown exceptions # -# versus unexpected and fatal exceptions # -#------------------------------------------------------------# - -class FileNotFoundException(Exception): - """ - If a file cannot be found by one of the helper functions, - they will throw a FileNotFoundException unless the purpose - is to return False. - """ - def __init__(self, file): - self.file = file - self.strerror = "FileNotFoundException: " + file - - def print_error(self): - Errors.print_error("Error: File could not be found at:") - Errors.print_error(self.file + "\n") - - def error(self): - error = "Error: File could not be found at:\n" + self.file + "\n" - return error - -class DirNotFoundException(Exception): - """ - If a directory cannot be found by a helper function, - it will throw this exception - """ - def __init__(self, dir): - self.dir = dir - self.strerror = "DirNotFoundException: " + dir - - def print_error(self): - Errors.print_error("Error: Directory could not be found at:") - Errors.print_error(self.dir + "\n") - - def error(self): - error = "Error: Directory could not be found at:\n" + self.dir + "\n" - return error - - -class Errors: - """A class used to manage error reporting. - - Attributes: - printout: a listof_String, the non-error messages that were printed - printerror: a listof_String, the error messages that were printed - email_body: a String, the body of the report email - email_msg_prefix: a String, the prefix for lines added to the email - email_attchs: a listof_pathto_File, the files to be attached to the - report email - """ - printout = [] - printerror = [] - email_body = "" - email_msg_prefix = "Configuration" - email_attachs = [] - - def set_testing_phase(image_name): - """Change the email message prefix to be the given testing phase. - - Args: - image_name: a String, representing the current image being tested - """ - Errors.email_msg_prefix = image_name - - def print_out(msg): - """Print out an informational message. - - Args: - msg: a String, the message to be printed - """ - print(msg) - Errors.printout.append(msg) - - def print_error(msg): - """Print out an error message. - - Args: - msg: a String, the error message to be printed. - """ - print(msg) - Errors.printerror.append(msg) - - def clear_print_logs(): - """Reset the image-specific attributes of the Errors class.""" - Errors.printout = [] - Errors.printerror = [] - - def add_email_msg(msg): - """Add the given message to the body of the report email. - - Args: - msg: a String, the message to be added to the email - """ - Errors.email_body += Errors.email_msg_prefix + ":" + msg - - def add_email_attachment(path): - """Add the given file to be an attachment for the report email - - Args: - file: a pathto_File, the file to add - """ - Errors.email_attachs.append(path) - - -class DiffResults(object): - """Container for the results of the database diff tests. - - Stores artifact, object, and attribute counts and comparisons generated by - TskDbDiff. - - Attributes: - gold_attrs: a Nat, the number of gold attributes - output_attrs: a Nat, the number of output attributes - gold_objs: a Nat, the number of gold objects - output_objs: a Nat, the number of output objects - artifact_comp: a listof_String, describing the differences - attribute_comp: a listof_String, describing the differences - passed: a boolean, did the diff pass? - """ - def __init__(self, tsk_diff): - """Inits a DiffResults - - Args: - tsk_diff: a TskDBDiff - """ - self.gold_attrs = tsk_diff.gold_attributes - self.output_attrs = tsk_diff.autopsy_attributes - self.gold_objs = tsk_diff.gold_objects - self.output_objs = tsk_diff.autopsy_objects - self.artifact_comp = tsk_diff.artifact_comparison - self.attribute_comp = tsk_diff.attribute_comparison - self.gold_artifacts = len(tsk_diff.gold_artifacts) - self.output_artifacts = len(tsk_diff.autopsy_artifacts) - self.passed = tsk_diff.passed - - def get_artifact_comparison(self): - if not self.artifact_comp: - return "All counts matched" - else: - return "; ".join(self.artifact_comp) - - def get_attribute_comparison(self): - if not self.attribute_comp: - return "All counts matched" - list = [] - for error in self.attribute_comp: - list.append(error) - return ";".join(list) - - -#-------------------------------------------------------------# -# Parses argv and stores booleans to match command line input # -#-------------------------------------------------------------# -class Args(object): - """A container for command line options and arguments. - - Attributes: - single: a boolean indicating whether to run in single file mode - single_file: an Image to run the test on - rebuild: a boolean indicating whether to run in rebuild mode - list: a boolean indicating a config file was specified - unallocated: a boolean indicating unallocated space should be ignored - ignore: a boolean indicating the input directory should be ingnored - keep: a boolean indicating whether to keep the SOLR index - verbose: a boolean indicating whether verbose output should be printed - exeception: a boolean indicating whether errors containing exception - exception_string should be printed - exception_sring: a String representing and exception name - fr: a boolean indicating whether gold standard images will be downloaded - """ - def __init__(self): - self.single = False - self.single_file = "" - self.rebuild = False - self.list = False - self.config_file = "" - self.unallocated = False - self.ignore = False - self.keep = False - self.verbose = False - self.exception = False - self.exception_string = "" - self.fr = False - self.email_enabled = False - - def parse(self): - """Get the command line arguments and parse them.""" - nxtproc = [] - nxtproc.append("python3") - nxtproc.append(sys.argv.pop(0)) - while sys.argv: - arg = sys.argv.pop(0) - nxtproc.append(arg) - if(arg == "-f"): - #try: @@@ Commented out until a more specific except statement is added - arg = sys.argv.pop(0) - print("Running on a single file:") - print(path_fix(arg) + "\n") - self.single = True - self.single_file = path_fix(arg) - #except: - # print("Error: No single file given.\n") - # return False - elif(arg == "-r" or arg == "--rebuild"): - print("Running in rebuild mode.\n") - self.rebuild = True - elif(arg == "-l" or arg == "--list"): - try: - arg = sys.argv.pop(0) - nxtproc.append(arg) - print("Running from configuration file:") - print(arg + "\n") - self.list = True - self.config_file = arg - except: - print("Error: No configuration file given.\n") - return False - elif(arg == "-u" or arg == "--unallocated"): - print("Ignoring unallocated space.\n") - self.unallocated = True - elif(arg == "-k" or arg == "--keep"): - print("Keeping the Solr index.\n") - self.keep = True - elif(arg == "-v" or arg == "--verbose"): - print("Running in verbose mode:") - print("Printing all thrown exceptions.\n") - self.verbose = True - elif(arg == "-e" or arg == "--exception"): - try: - arg = sys.argv.pop(0) - nxtproc.append(arg) - print("Running in exception mode: ") - print("Printing all exceptions with the string '" + arg + "'\n") - self.exception = True - self.exception_string = arg - except: - print("Error: No exception string given.") - elif arg == "-h" or arg == "--help": - print(usage()) - return False - elif arg == "-fr" or arg == "--forcerun": - print("Not downloading new images") - self.fr = True - elif arg == "-e" or arg == "-email": - self.email_enabled = True - else: - print(usage()) - return False - # Return the args were sucessfully parsed - return self._sanity_check() - - def _sanity_check(self): - """Check to make sure there are no conflicting arguments and the - specified files exist. - - Returns: - False if there are conflicting arguments or a specified file does - not exist, True otherwise - """ - if self.single and self.list: - print("Cannot run both from config file and on a single file.") - return False - if self.list: - if not file_exists(self.config_file): - print("Configuration file does not exist at:", - self.config_file) - return False - elif self.single: - if not file_exists(self.single_file): - msg = "Image file does not exist at: " + self.single_file - return False - if (not self.single) and (not self.ignore) and (not self.list): - self.config_file = "config.xml" - if not file_exists(self.config_file): - msg = "Configuration file does not exist at: " + self.config_file - return False - - return True - -#### -# Helper Functions -#### -def search_logs(string, test_data): - """Search through all the known log files for a given string. - - Args: - string: the String to search for. - test_data: the TestData that holds the logs to search. - - Returns: - a listof_String, the lines that contained the given String. - """ - logs_path = test_data.logs_dir - results = [] - for file in os.listdir(logs_path): - log = codecs.open(make_path(logs_path, file), "r", "utf_8") - for line in log: - if string in line: - results.append(line) - log.close() - return results - -def search_log(log, string, test_data): - """Search the given log for any instances of a given string. - - Args: - log: a pathto_File, the log to search in - string: the String to search for. - test_data: the TestData that holds the log to search. - - Returns: - a listof_String, all the lines that the string is found on - """ - logs_path = make_path(test_data.logs_dir, log) - try: - results = [] - log = codecs.open(logs_path, "r", "utf_8") - for line in log: - if string in line: - results.append(line) - log.close() - if results: - return results - except: - raise FileNotFoundException(logs_path) - -# Search through all the the logs of the given type -# Types include autopsy, tika, and solr -def search_log_set(type, string, test_data): - """Search through all logs to the given type for the given string. - - Args: - type: the type of log to search in. - string: the String to search for. - test_data: the TestData containing the logs to search. - - Returns: - a listof_String, the lines on which the String was found. - """ - logs_path = test_data.logs_dir - results = [] - for file in os.listdir(logs_path): - if type in file: - log = codecs.open(make_path(logs_path, file), "r", "utf_8") - for line in log: - if string in line: - results.append(line) - log.close() - return results - - -def clear_dir(dir): - """Clears all files from a directory and remakes it. - - Args: - dir: a pathto_Dir, the directory to clear - """ - try: - if dir_exists(dir): - shutil.rmtree(dir) - os.makedirs(dir) - return True; - except OSError as e: - printerror(test_data,"Error: Cannot clear the given directory:") - printerror(test_data,dir + "\n") - print(str(e)) - return False; - -def del_dir(dir): - """Delete the given directory. - - Args: - dir: a pathto_Dir, the directory to delete - """ - try: - if dir_exists(dir): - shutil.rmtree(dir) - return True; - except: - printerror(test_data,"Error: Cannot delete the given directory:") - printerror(test_data,dir + "\n") - return False; - -def get_file_in_dir(dir, ext): - """Returns the first file in the given directory with the given extension. - - Args: - dir: a pathto_Dir, the directory to search - ext: a String, the extension to search for - - Returns: - pathto_File, the file that was found - """ - try: - for file in os.listdir(dir): - if file.endswith(ext): - return make_path(dir, file) - # If nothing has been found, raise an exception - raise FileNotFoundException(dir) - except: - raise DirNotFoundException(dir) - -def find_file_in_dir(dir, name, ext): - """Find the file with the given name in the given directory. - - Args: - dir: a pathto_Dir, the directory to search - name: a String, the basename of the file to search for - ext: a String, the extension of the file to search for - """ - try: - for file in os.listdir(dir): - if file.startswith(name): - if file.endswith(ext): - return make_path(dir, file) - raise FileNotFoundException(dir) - except: - raise DirNotFoundException(dir) - - -class OS: - LINUX, MAC, WIN, CYGWIN = range(4) - - -if __name__ == "__main__": - global SYS - if _platform == "linux" or _platform == "linux2": - SYS = OS.LINUX - elif _platform == "darwin": - SYS = OS.MAC - elif _platform == "win32": - SYS = OS.WIN - elif _platform == "cygwin": - SYS = OS.CYGWIN - - if SYS is OS.WIN or SYS is OS.CYGWIN: - main() - else: - print("We only support Windows and Cygwin at this time.") +#!/usr/bin/python +# -*- coding: utf_8 -*- + + # Autopsy Forensic Browser + # + # Copyright 2013 Basis Technology Corp. + # + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. +from tskdbdiff import TskDbDiff, TskDbDiffException +import codecs +import datetime +import logging +import os +import re +import shutil +import socket +import sqlite3 +import subprocess +import sys +from sys import platform as _platform +import time +import traceback +import xml +from time import localtime, strftime +from xml.dom.minidom import parse, parseString +import smtplib +from email.mime.image import MIMEImage +from email.mime.multipart import MIMEMultipart +from email.mime.text import MIMEText +import re +import zipfile +import zlib +import Emailer +import srcupdater +from regression_utils import * + +# +# Please read me... +# +# This is the regression testing Python script. +# It uses an ant command to run build.xml for RegressionTest.java +# +# The code is cleanly sectioned and commented. +# Please follow the current formatting. +# It is a long and potentially confusing script. +# +# Variable, function, and class names are written in Python conventions: +# this_is_a_variable this_is_a_function() ThisIsAClass +# +# + + +# Data Definitions: +# +# pathto_X: A path to type X. +# ConfigFile: An XML file formatted according to the template in myconfig.xml +# ParsedConfig: A dom object that represents a ConfigFile +# SQLCursor: A cursor recieved from a connection to an SQL database +# Nat: A Natural Number +# Image: An image +# + +# Enumeration of database types used for the simplification of generating database paths +DBType = enum('OUTPUT', 'GOLD', 'BACKUP') + +# Common filename of the output and gold databases (although they are in different directories +DB_FILENAME = "autopsy.db" + +# Backup database filename +BACKUP_DB_FILENAME = "autopsy_backup.db" + +# TODO: Double check this purpose statement +# Folder name for gold standard database testing +AUTOPSY_TEST_CASE = "AutopsyTestCase" + +# TODO: Double check this purpose statement +# The filename of the log to store error messages +COMMON_LOG = "AutopsyErrors.txt" + +Day = 0 + +#----------------------# +# Main # +#----------------------# +def main(): + """Parse the command-line arguments, create the configuration, and run the tests.""" + args = Args() + parse_result = args.parse() + test_config = TestConfiguration(args) + # The arguments were given wrong: + if not parse_result: + return + if(not args.fr): + antin = ["ant"] + antin.append("-f") + antin.append(os.path.join("..","..","build.xml")) + antin.append("test-download-imgs") + if SYS is OS.CYGWIN: + subprocess.call(antin) + elif SYS is OS.WIN: + theproc = subprocess.Popen(antin, shell = True, stdout=subprocess.PIPE) + theproc.communicate() + # Otherwise test away! + TestRunner.run_tests(test_config) + + +class TestRunner(object): + """A collection of functions to run the regression tests.""" + + def run_tests(test_config): + """Run the tests specified by the main TestConfiguration. + + Executes the AutopsyIngest for each image and dispatches the results based on + the mode (rebuild or testing) + """ + test_data_list = [ TestData(image, test_config) for image in test_config.images ] + + Reports.html_add_images(test_config.html_log, test_config.images) + + logres =[] + for test_data in test_data_list: + Errors.clear_print_logs() + Errors.set_testing_phase(test_data.image) + if not (test_config.args.rebuild or os.path.exists(test_data.gold_archive)): + msg = "Gold standard doesn't exist, skipping image:" + Errors.print_error(msg) + Errors.print_error(test_data.gold_archive) + continue + TestRunner._run_autopsy_ingest(test_data) + + if test_config.args.rebuild: + TestRunner.rebuild(test_data) + else: + logres.append(TestRunner._run_test(test_data)) + test_data.printout = Errors.printout + test_data.printerror = Errors.printerror + + Reports.write_html_foot(test_config.html_log) + # TODO: move this elsewhere + if (len(logres)>0): + for lm in logres: + for ln in lm: + Errors.add_email_msg(ln) + + # TODO: possibly worth putting this in a sub method + if all([ test_data.overall_passed for test_data in test_data_list ]): + Errors.add_email_msg("All images passed.\n") + else: + msg = "The following images failed:\n" + for test_data in test_data_list: + if not test_data.overall_passed: + msg += "\t" + test_data.image + "\n" + Errors.add_email_msg(msg) + html = open(test_config.html_log) + Errors.add_email_attachment(html.name) + html.close() + + if test_config.email_enabled: + Emailer.send_email(test_config.mail_to, test_config.mail_server, + test_config.mail_subject, Errors.email_body, Errors.email_attachs) + + def _run_autopsy_ingest(test_data): + """Run Autopsy ingest for the image in the given TestData. + + Also generates the necessary logs for rebuilding or diff. + + Args: + test_data: the TestData to run the ingest on. + """ + if image_type(test_data.image_file) == IMGTYPE.UNKNOWN: + Errors.print_error("Error: Image type is unrecognized:") + Errors.print_error(test_data.image_file + "\n") + return + + logging.debug("--------------------") + logging.debug(test_data.image_name) + logging.debug("--------------------") + TestRunner._run_ant(test_data) + time.sleep(2) # Give everything a second to process + + try: + # Dump the database before we diff or use it for rebuild + TskDbDiff.dump_output_db(test_data.get_db_path(DBType.OUTPUT), test_data.get_db_dump_path(DBType.OUTPUT), + test_data.get_sorted_data_path(DBType.OUTPUT)) + except sqlite3.OperationalError as e: + print("Ingest did not run properly.", + "Make sure no other instances of Autopsy are open and try again.") + sys.exit() + + # merges logs into a single log for later diff / rebuild + copy_logs(test_data) + Logs.generate_log_data(test_data) + + TestRunner._handle_solr(test_data) + TestRunner._handle_exception(test_data) + + #TODO: figure out return type of _run_test (logres) + def _run_test(test_data): + """Compare the results of the output to the gold standard. + + Args: + test_data: the TestData + + Returns: + logres? + """ + TestRunner._extract_gold(test_data) + + # Look for core exceptions + # @@@ Should be moved to TestResultsDiffer, but it didn't know about logres -- need to look into that + logres = Logs.search_common_log("TskCoreException", test_data) + + TestResultsDiffer.run_diff(test_data) + test_data.overall_passed = (test_data.html_report_passed and + test_data.errors_diff_passed and test_data.db_diff_passed) + + Reports.generate_reports(test_data) + if(not test_data.overall_passed): + Errors.add_email_attachment(test_data.common_log_path) + return logres + + def _extract_gold(test_data): + """Extract gold archive file to output/gold/tmp/ + + Args: + test_data: the TestData + """ + extrctr = zipfile.ZipFile(test_data.gold_archive, 'r', compression=zipfile.ZIP_DEFLATED) + extrctr.extractall(test_data.main_config.gold) + extrctr.close + time.sleep(2) + + def _handle_solr(test_data): + """Clean up SOLR index if in keep mode (-k). + + Args: + test_data: the TestData + """ + if not test_data.main_config.args.keep: + if clear_dir(test_data.solr_index): + print_report([], "DELETE SOLR INDEX", "Solr index deleted.") + else: + print_report([], "KEEP SOLR INDEX", "Solr index has been kept.") + + def _handle_exception(test_data): + """If running in exception mode, print exceptions to log. + + Args: + test_data: the TestData + """ + if test_data.main_config.args.exception: + exceptions = search_logs(test_data.main_config.args.exception_string, test_data) + okay = ("No warnings or exceptions found containing text '" + + test_data.main_config.args.exception_string + "'.") + print_report(exceptions, "EXCEPTION", okay) + + def rebuild(test_data): + """Rebuild the gold standard with the given TestData. + + Copies the test-generated database and html report files into the gold directory. + """ + test_config = test_data.main_config + # Errors to print + errors = [] + # Delete the current gold standards + gold_dir = test_config.img_gold + clear_dir(test_config.img_gold) + tmpdir = make_path(gold_dir, test_data.image_name) + dbinpth = test_data.get_db_path(DBType.OUTPUT) + dboutpth = make_path(tmpdir, DB_FILENAME) + dataoutpth = make_path(tmpdir, test_data.image_name + "SortedData.txt") + dbdumpinpth = test_data.get_db_dump_path(DBType.OUTPUT) + dbdumpoutpth = make_path(tmpdir, test_data.image_name + "DBDump.txt") + if not os.path.exists(test_config.img_gold): + os.makedirs(test_config.img_gold) + if not os.path.exists(tmpdir): + os.makedirs(tmpdir) + try: + shutil.copy(dbinpth, dboutpth) + if file_exists(test_data.get_sorted_data_path(DBType.OUTPUT)): + shutil.copy(test_data.get_sorted_data_path(DBType.OUTPUT), dataoutpth) + shutil.copy(dbdumpinpth, dbdumpoutpth) + error_pth = make_path(tmpdir, test_data.image_name+"SortedErrors.txt") + shutil.copy(test_data.sorted_log, error_pth) + except IOError as e: + Errors.print_error(str(e)) + Errors.add_email_message("Not rebuilt properly") + print(str(e)) + print(traceback.format_exc()) + # Rebuild the HTML report + output_html_report_dir = test_data.get_html_report_path(DBType.OUTPUT) + gold_html_report_dir = make_path(tmpdir, "Report") + + try: + shutil.copytree(output_html_report_dir, gold_html_report_dir) + except OSError as e: + errors.append(e.error()) + except Exception as e: + errors.append("Error: Unknown fatal error when rebuilding the gold html report.") + errors.append(str(e) + "\n") + print(traceback.format_exc()) + oldcwd = os.getcwd() + zpdir = gold_dir + os.chdir(zpdir) + os.chdir("..") + img_gold = "tmp" + img_archive = make_path(test_data.image_name+"-archive.zip") + comprssr = zipfile.ZipFile(img_archive, 'w',compression=zipfile.ZIP_DEFLATED) + TestRunner.zipdir(img_gold, comprssr) + comprssr.close() + os.chdir(oldcwd) + del_dir(test_config.img_gold) + okay = "Sucessfully rebuilt all gold standards." + print_report(errors, "REBUILDING", okay) + + def zipdir(path, zip): + for root, dirs, files in os.walk(path): + for file in files: + zip.write(os.path.join(root, file)) + + def _run_ant(test_data): + """Construct and run the ant build command for the given TestData. + + Tests Autopsy by calling RegressionTest.java via the ant build file. + + Args: + test_data: the TestData + """ + test_config = test_data.main_config + # Set up the directories + if dir_exists(test_data.output_path): + shutil.rmtree(test_data.output_path) + os.makedirs(test_data.output_path) + test_data.ant = ["ant"] + test_data.ant.append("-v") + test_data.ant.append("-f") + # case.ant.append(case.build_path) + test_data.ant.append(os.path.join("..","..","Testing","build.xml")) + test_data.ant.append("regression-test") + test_data.ant.append("-l") + test_data.ant.append(test_data.antlog_dir) + test_data.ant.append("-Dimg_path=" + test_data.image_file) + test_data.ant.append("-Dknown_bad_path=" + test_config.known_bad_path) + test_data.ant.append("-Dkeyword_path=" + test_config.keyword_path) + test_data.ant.append("-Dnsrl_path=" + test_config.nsrl_path) + test_data.ant.append("-Dgold_path=" + test_config.gold) + test_data.ant.append("-Dout_path=" + + make_local_path(test_data.output_path)) + test_data.ant.append("-Dignore_unalloc=" + "%s" % test_config.args.unallocated) + test_data.ant.append("-Dtest.timeout=" + str(test_config.timeout)) + + Errors.print_out("Ingesting Image:\n" + test_data.image_file + "\n") + Errors.print_out("CMD: " + " ".join(test_data.ant)) + Errors.print_out("Starting test...\n") + antoutpth = make_local_path(test_data.main_config.output_dir, "antRunOutput.txt") + antout = open(antoutpth, "a") + if SYS is OS.CYGWIN: + subprocess.call(test_data.ant, stdout=subprocess.PIPE) + elif SYS is OS.WIN: + theproc = subprocess.Popen(test_data.ant, shell = True, stdout=subprocess.PIPE) + theproc.communicate() + antout.close() + + +class TestData(object): + """Container for the input and output of a single image. + + Represents data for the test of a single image, including path to the image, + database paths, etc. + + Attributes: + main_config: the global TestConfiguration + ant: a listof_String, the ant command for this TestData + image_file: a pathto_Image, the image for this TestData + image: a String, the image file's name + image_name: a String, the image file's name with a trailing (0) + output_path: pathto_Dir, the output directory for this TestData + autopsy_data_file: a pathto_File, the IMAGE_NAMEAutopsy_data.txt file + warning_log: a pathto_File, the AutopsyLogs.txt file + antlog_dir: a pathto_File, the antlog.txt file + test_dbdump: a pathto_File, the database dump, IMAGENAMEDump.txt + common_log_path: a pathto_File, the IMAGE_NAMECOMMON_LOG file + sorted_log: a pathto_File, the IMAGENAMESortedErrors.txt file + reports_dir: a pathto_Dir, the AutopsyTestCase/Reports folder + gold_data_dir: a pathto_Dir, the gold standard directory + gold_archive: a pathto_File, the gold standard archive + logs_dir: a pathto_Dir, the location where autopsy logs are stored + solr_index: a pathto_Dir, the locatino of the solr index + html_report_passed: a boolean, did the HTML report diff pass? + errors_diff_passed: a boolean, did the error diff pass? + db_diff_passed: a boolean, did the db diff pass? + overall_passed: a boolean, did the test pass? + total_test_time: a String representation of the test duration + start_date: a String representation of this TestData's start date + end_date: a String representation of the TestData's end date + total_ingest_time: a String representation of the total ingest time + artifact_count: a Nat, the number of artifacts + artifact_fail: a Nat, the number of artifact failures + heap_space: a String representation of TODO + service_times: a String representation of TODO + autopsy_version: a String, the version of autopsy that was run + ingest_messages: a Nat, the number of ingest messages + indexed_files: a Nat, the number of files indexed during the ingest + indexed_chunks: a Nat, the number of chunks indexed during the ingest + printerror: a listof_String, the error messages printed during this TestData's test + printout: a listof_String, the messages pritned during this TestData's test + """ + + def __init__(self, image, main_config): + """Init this TestData with it's image and the test configuration. + + Args: + image: the Image to be tested. + main_config: the global TestConfiguration. + """ + # Configuration Data + self.main_config = main_config + self.ant = [] + self.image_file = str(image) + # TODO: This 0 should be be refactored out, but it will require rebuilding and changing of outputs. + self.image = get_image_name(self.image_file) + self.image_name = self.image + "(0)" + # Directory structure and files + self.output_path = make_path(self.main_config.output_dir, self.image_name) + self.autopsy_data_file = make_path(self.output_path, self.image_name + "Autopsy_data.txt") + self.warning_log = make_local_path(self.output_path, "AutopsyLogs.txt") + self.antlog_dir = make_local_path(self.output_path, "antlog.txt") + self.test_dbdump = make_path(self.output_path, self.image_name + + "DBDump.txt") + self.common_log_path = make_local_path(self.output_path, self.image_name + COMMON_LOG) + self.sorted_log = make_local_path(self.output_path, self.image_name + "SortedErrors.txt") + self.reports_dir = make_path(self.output_path, AUTOPSY_TEST_CASE, "Reports") + self.gold_data_dir = make_path(self.main_config.img_gold, self.image_name) + self.gold_archive = make_path(self.main_config.gold, + self.image_name + "-archive.zip") + self.logs_dir = make_path(self.output_path, "logs") + self.solr_index = make_path(self.output_path, AUTOPSY_TEST_CASE, + "ModuleOutput", "KeywordSearch") + # Results and Info + self.html_report_passed = False + self.errors_diff_passed = False + self.db_diff_passed = False + self.overall_passed = False + # Ingest info + self.total_test_time = "" + self.start_date = "" + self.end_date = "" + self.total_ingest_time = "" + self.artifact_count = 0 + self.artifact_fail = 0 + self.heap_space = "" + self.service_times = "" + self.autopsy_version = "" + self.ingest_messages = 0 + self.indexed_files = 0 + self.indexed_chunks = 0 + # Error tracking + self.printerror = [] + self.printout = [] + + def ant_to_string(self): + string = "" + for arg in self.ant: + string += (arg + " ") + return string + + def get_db_path(self, db_type): + """Get the path to the database file that corresponds to the given DBType. + + Args: + DBType: the DBType of the path to be generated. + """ + if(db_type == DBType.GOLD): + db_path = make_path(self.gold_data_dir, DB_FILENAME) + elif(db_type == DBType.OUTPUT): + db_path = make_path(self.main_config.output_dir, self.image_name, AUTOPSY_TEST_CASE, DB_FILENAME) + else: + db_path = make_path(self.main_config.output_dir, self.image_name, AUTOPSY_TEST_CASE, BACKUP_DB_FILENAME) + return db_path + + def get_html_report_path(self, html_type): + """Get the path to the HTML Report folder that corresponds to the given DBType. + + Args: + DBType: the DBType of the path to be generated. + """ + if(html_type == DBType.GOLD): + return make_path(self.gold_data_dir, "Report") + else: + # Autopsy creates an HTML report folder in the form AutopsyTestCase DATE-TIME + # It's impossible to get the exact time the folder was created, but the folder + # we are looking for is the only one in the self.reports_dir folder + html_path = "" + for fs in os.listdir(self.reports_dir): + html_path = make_path(self.reports_dir, fs) + if os.path.isdir(html_path): + break + return make_path(html_path, os.listdir(html_path)[0]) + + def get_sorted_data_path(self, file_type): + """Get the path to the SortedData file that corresponds to the given DBType. + + Args: + file_type: the DBType of the path to be generated + """ + return self._get_path_to_file(file_type, "SortedData.txt") + + def get_sorted_errors_path(self, file_type): + """Get the path to the SortedErrors file that correspodns to the given + DBType. + + Args: + file_type: the DBType of the path to be generated + """ + return self._get_path_to_file(file_type, "SortedErrors.txt") + + def get_db_dump_path(self, file_type): + """Get the path to the DBDump file that corresponds to the given DBType. + + Args: + file_type: the DBType of the path to be generated + """ + return self._get_path_to_file(file_type, "DBDump.txt") + + def _get_path_to_file(self, file_type, file_name): + """Get the path to the specified file with the specified type. + + Args: + file_type: the DBType of the path to be generated + file_name: a String, the filename of the path to be generated + """ + full_filename = self.image_name + file_name + if(file_type == DBType.GOLD): + return make_path(self.gold_data_dir, full_filename) + else: + return make_path(self.output_path, full_filename) + + +class TestConfiguration(object): + """Container for test configuration data. + + The Master Test Configuration. Encapsulates consolidated high level input from + config XML file and command-line arguments. + + Attributes: + args: an Args, the command line arguments + output_dir: a pathto_Dir, the output directory + input_dir: a pathto_Dir, the input directory + gold: a pathto_Dir, the gold directory + img_gold: a pathto_Dir, the temp directory where gold images are unzipped to + csv: a pathto_File, the local csv file + global_csv: a pathto_File, the global csv file + html_log: a pathto_File + known_bad_path: + keyword_path: + nsrl_path: + build_path: a pathto_File, the ant build file which runs the tests + autopsy_version: + ingest_messages: a Nat, number of ingest messages + indexed_files: a Nat, the number of indexed files + indexed_chunks: a Nat, the number of indexed chunks + timer: + images: a listof_Image, the images to be tested + timeout: a Nat, the amount of time before killing the test + ant: a listof_String, the ant command to run the tests + """ + + def __init__(self, args): + """Inits TestConfiguration and loads a config file if available. + + Args: + args: an Args, the command line arguments. + """ + self.args = args + # Paths: + self.output_dir = "" + self.input_dir = make_local_path("..","input") + self.gold = make_path("..", "output", "gold") + self.img_gold = make_path(self.gold, 'tmp') + # Logs: + self.csv = "" + self.global_csv = "" + self.html_log = "" + # Ant info: + self.known_bad_path = make_path(self.input_dir, "notablehashes.txt-md5.idx") + self.keyword_path = make_path(self.input_dir, "notablekeywords.xml") + self.nsrl_path = make_path(self.input_dir, "nsrl.txt-md5.idx") + self.build_path = make_path("..", "build.xml") + # Infinite Testing info + timer = 0 + self.images = [] + # Email info + self.email_enabled = args.email_enabled + self.mail_server = "" + self.mail_to = "" + self.mail_subject = "" + # Set the timeout to something huge + # The entire tester should not timeout before this number in ms + # However it only seems to take about half this time + # And it's very buggy, so we're being careful + self.timeout = 24 * 60 * 60 * 1000 * 1000 + + if not self.args.single: + self._load_config_file(self.args.config_file) + else: + self.images.append(self.args.single_file) + self._init_logs() + #self._init_imgs() + #self._init_build_info() + + + def _load_config_file(self, config_file): + """Updates this TestConfiguration's attributes from the config file. + + Initializes this TestConfiguration by iterating through the XML config file + command-line argument. Populates self.images and optional email configuration + + Args: + config_file: ConfigFile - the configuration file to load + """ + try: + count = 0 + parsed_config = parse(config_file) + logres = [] + counts = {} + if parsed_config.getElementsByTagName("indir"): + self.input_dir = parsed_config.getElementsByTagName("indir")[0].getAttribute("value").encode().decode("utf_8") + if parsed_config.getElementsByTagName("global_csv"): + self.global_csv = parsed_config.getElementsByTagName("global_csv")[0].getAttribute("value").encode().decode("utf_8") + self.global_csv = make_local_path(self.global_csv) + if parsed_config.getElementsByTagName("golddir"): + self.gold = parsed_config.getElementsByTagName("golddir")[0].getAttribute("value").encode().decode("utf_8") + self.img_gold = make_path(self.gold, 'tmp') + + self._init_imgs(parsed_config) + self._init_build_info(parsed_config) + self._init_email_info(parsed_config) + + except IOError as e: + msg = "There was an error loading the configuration file.\n" + msg += "\t" + str(e) + Errors.add_email_msg(msg) + logging.critical(traceback.format_exc()) + print(traceback.format_exc()) + + def _init_logs(self): + """Setup output folder, logs, and reporting infrastructure.""" + if(not dir_exists(make_path("..", "output", "results"))): + os.makedirs(make_path("..", "output", "results",)) + self.output_dir = make_path("..", "output", "results", time.strftime("%Y.%m.%d-%H.%M.%S")) + os.makedirs(self.output_dir) + self.csv = make_local_path(self.output_dir, "CSV.txt") + self.html_log = make_path(self.output_dir, "AutopsyTestCase.html") + log_name = self.output_dir + "\\regression.log" + logging.basicConfig(filename=log_name, level=logging.DEBUG) + + def _init_build_info(self, parsed_config): + """Initializes paths that point to information necessary to run the AutopsyIngest.""" + build_elements = parsed_config.getElementsByTagName("build") + if build_elements: + build_element = build_elements[0] + build_path = build_element.getAttribute("value").encode().decode("utf_8") + self.build_path = build_path + + def _init_imgs(self, parsed_config): + """Initialize the list of images to run tests on.""" + for element in parsed_config.getElementsByTagName("image"): + value = element.getAttribute("value").encode().decode("utf_8") + print ("Image in Config File: " + value) + if file_exists(value): + self.images.append(value) + else: + msg = "File: " + value + " doesn't exist" + Errors.print_error(msg) + Errors.add_email_msg(msg) + image_count = len(self.images) + + # Sanity check to see if there are obvious gold images that we are not testing + gold_count = 0 + for file in os.listdir(self.gold): + if not(file == 'tmp'): + gold_count+=1 + + if (image_count > gold_count): + print("******Alert: There are more input images than gold standards, some images will not be properly tested.\n") + elif (image_count < gold_count): + print("******Alert: There are more gold standards than input images, this will not check all gold Standards.\n") + + def _init_email_info(self, parsed_config): + """Initializes email information dictionary""" + email_elements = parsed_config.getElementsByTagName("email") + if email_elements: + mail_to = email_elements[0] + self.mail_to = mail_to.getAttribute("value").encode().decode("utf_8") + mail_server_elements = parsed_config.getElementsByTagName("mail_server") + if mail_server_elements: + mail_from = mail_server_elements[0] + self.mail_server = mail_from.getAttribute("value").encode().decode("utf_8") + subject_elements = parsed_config.getElementsByTagName("subject") + if subject_elements: + subject = subject_elements[0] + self.mail_subject = subject.getAttribute("value").encode().decode("utf_8") + if self.mail_server and self.mail_to and self.args.email_enabled: + self.email_enabled = True + print("Email will be sent to ", self.mail_to) + else: + print("No email will be sent.") + + +#-------------------------------------------------# +# Functions relating to comparing outputs # +#-------------------------------------------------# +class TestResultsDiffer(object): + """Compares results for a single test.""" + + def run_diff(test_data): + """Compares results for a single test. + + Args: + test_data: the TestData to use. + databaseDiff: TskDbDiff object created based off test_data + """ + try: + output_db = test_data.get_db_path(DBType.OUTPUT) + gold_db = test_data.get_db_path(DBType.GOLD) + output_dir = test_data.output_path + gold_bb_dump = test_data.get_sorted_data_path(DBType.GOLD) + gold_dump = test_data.get_db_dump_path(DBType.GOLD) + test_data.db_diff_pass = all(TskDbDiff(output_db, gold_db, output_dir=output_dir, gold_bb_dump=gold_bb_dump, + gold_dump=gold_dump).run_diff()) + + # Compare Exceptions + # replace is a fucntion that replaces strings of digits with 'd' + # this is needed so dates and times will not cause the diff to fail + replace = lambda file: re.sub(re.compile("\d"), "d", file) + output_errors = test_data.get_sorted_errors_path(DBType.OUTPUT) + gold_errors = test_data.get_sorted_errors_path(DBType.GOLD) + passed = TestResultsDiffer._compare_text(output_errors, gold_errors, + replace) + test_data.errors_diff_passed = passed + + # Compare html output + gold_report_path = test_data.get_html_report_path(DBType.GOLD) + output_report_path = test_data.get_html_report_path(DBType.OUTPUT) + passed = TestResultsDiffer._html_report_diff(gold_report_path, + output_report_path) + test_data.html_report_passed = passed + + # Clean up tmp folder + del_dir(test_data.gold_data_dir) + + except sqlite3.OperationalError as e: + Errors.print_error("Tests failed while running the diff:\n") + Errors.print_error(str(e)) + except TskDbDiffException as e: + Errors.print_error(str(e)) + except Exception as e: + Errors.print_error("Tests failed due to an error, try rebuilding or creating gold standards.\n") + Errors.print_error(str(e) + "\n") + print(traceback.format_exc()) + + def _compare_text(output_file, gold_file, process=None): + """Compare two text files. + + Args: + output_file: a pathto_File, the output text file + gold_file: a pathto_File, the input text file + pre-process: (optional) a function of String -> String that will be + called on each input file before the diff, if specified. + """ + if(not file_exists(output_file)): + return False + output_data = codecs.open(output_file, "r", "utf_8").read() + gold_data = codecs.open(gold_file, "r", "utf_8").read() + + if process is not None: + output_data = process(output_data) + gold_data = process(gold_data) + + if (not(gold_data == output_data)): + diff_path = os.path.splitext(os.path.basename(output_file))[0] + diff_path += "-Diff.txt" + diff_file = codecs.open(diff_path, "wb", "utf_8") + dffcmdlst = ["diff", output_file, gold_file] + subprocess.call(dffcmdlst, stdout = diff_file) + Errors.add_email_attachment(diff_path) + msg = "There was a difference in " + msg += os.path.basename(output_file) + ".\n" + Errors.add_email_msg(msg) + Errors.print_error(msg) + return False + else: + return True + + def _html_report_diff(gold_report_path, output_report_path): + """Compare the output and gold html reports. + + Args: + gold_report_path: a pathto_Dir, the gold HTML report directory + output_report_path: a pathto_Dir, the output HTML report directory + + Returns: + true, if the reports match, false otherwise. + """ + try: + gold_html_files = get_files_by_ext(gold_report_path, ".html") + output_html_files = get_files_by_ext(output_report_path, ".html") + + #ensure both reports have the same number of files and are in the same order + if(len(gold_html_files) != len(output_html_files)): + msg = "The reports did not have the same number or files." + msg += "One of the reports may have been corrupted." + Errors.print_error(msg) + else: + gold_html_files.sort() + output_html_files.sort() + + total = {"Gold": 0, "New": 0} + for gold, output in zip(gold_html_files, output_html_files): + count = TestResultsDiffer._compare_report_files(gold, output) + total["Gold"] += count[0] + total["New"] += count[1] + + okay = "The test report matches the gold report." + errors=["Gold report had " + str(total["Gold"]) +" errors", "New report had " + str(total["New"]) + " errors."] + print_report(errors, "REPORT COMPARISON", okay) + + if total["Gold"] == total["New"]: + return True + else: + Errors.print_error("The reports did not match each other.\n " + errors[0] +" and the " + errors[1]) + return False + except OSError as e: + e.print_error() + return False + except Exception as e: + Errors.print_error("Error: Unknown fatal error comparing reports.") + Errors.print_error(str(e) + "\n") + logging.critical(traceback.format_exc()) + return False + + def _compare_report_files(a_path, b_path): + """Compares the two specified report html files. + + Args: + a_path: a pathto_File, the first html report file + b_path: a pathto_File, the second html report file + + Returns: + a tuple of (Nat, Nat), which represent the length of each + unordered list in the html report files, or (0, 0) if the + lenghts are the same. + """ + a_file = open(a_path) + b_file = open(b_path) + a = a_file.read() + b = b_file.read() + a = a[a.find("
    "):] + b = b[b.find("
      "):] + + a_list = TestResultsDiffer._split(a, 50) + b_list = TestResultsDiffer._split(b, 50) + if not len(a_list) == len(b_list): + ex = (len(a_list), len(b_list)) + return ex + else: + return (0, 0) + + # Split a string into an array of string of the given size + def _split(input, size): + return [input[start:start+size] for start in range(0, len(input), size)] + + +class Reports(object): + def generate_reports(test_data): + """Generate the reports for a single test + + Args: + test_data: the TestData + """ + Reports._generate_html(test_data) + if test_data.main_config.global_csv: + Reports._generate_csv(test_data.main_config.global_csv, test_data) + else: + Reports._generate_csv(test_data.main_config.csv, test_data) + + def _generate_html(test_data): + """Generate the HTML log file.""" + # If the file doesn't exist yet, this is the first test_config to run for + # this test, so we need to make the start of the html log + html_log = test_data.main_config.html_log + if not file_exists(html_log): + Reports.write_html_head() + with open(html_log, "a") as html: + # The image title + title = "

      " + test_data.image_name + " \ + tested on " + socket.gethostname() + "

      \ +

      \ + Errors and Warnings |\ + Information |\ + General Output |\ + Logs\ +

      " + # The script errors found + if not test_data.overall_passed: + ids = 'errors1' + else: + ids = 'errors' + errors = "
      \ +

      Errors and Warnings

      \ +
      " + # For each error we have logged in the test_config + for error in test_data.printerror: + # Replace < and > to avoid any html display errors + errors += "

      " + error.replace("<", "<").replace(">", ">") + "

      " + # If there is a \n, we probably want a
      in the html + if "\n" in error: + errors += "
      " + errors += "
      " + + # Links to the logs + logs = "
      \ +

      Logs

      \ +
      " + logs_path = test_data.logs_dir + for file in os.listdir(logs_path): + logs += "

      " + file + "

      " + logs += "
      " + + # All the testing information + info = "
      \ +

      Information

      \ +
      \ + " + # The individual elements + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" +# info += "" +# info += "" +# info += "" +# info += "" +# info += "" +# info += "" + info += "
      Image Path:" + test_data.image_file + "
      Image Name:" + test_data.image_name + "
      test_config Output Directory:" + test_data.main_config.output_dir + "
      Autopsy Version:" + test_data.autopsy_version + "
      Heap Space:" + test_data.heap_space + "
      Test Start Date:" + test_data.start_date + "
      Test End Date:" + test_data.end_date + "
      Total Test Time:" + test_data.total_test_time + "
      Total Ingest Time:" + test_data.total_ingest_time + "
      Exceptions Count:" + str(len(get_exceptions(test_data))) + "
      Autopsy OutOfMemoryExceptions:" + str(len(search_logs("OutOfMemoryException", test_data))) + "
      Autopsy OutOfMemoryErrors:" + str(len(search_logs("OutOfMemoryError", test_data))) + "
      Tika OutOfMemoryErrors/Exceptions:" + str(Reports._get_num_memory_errors("tika", test_data)) + "
      Solr OutOfMemoryErrors/Exceptions:" + str(Reports._get_num_memory_errors("solr", test_data)) + "
      TskCoreExceptions:" + str(len(search_log_set("autopsy", "TskCoreException", test_data))) + "
      TskDataExceptions:" + str(len(search_log_set("autopsy", "TskDataException", test_data))) + "
      Ingest Messages Count:" + str(test_data.ingest_messages) + "
      Indexed Files Count:" + str(test_data.indexed_files) + "
      Indexed File Chunks Count:" + str(test_data.indexed_chunks) + "
      Out Of Disk Space:\ +

      (will skew other test results)

      " + str(len(search_log_set("autopsy", "Stopping ingest due to low disk space on disk", test_data))) + "
      TSK Objects Count:" + str(test_data.db_diff_results.output_objs) + "
      Artifacts Count:" + str(test_data.db_diff_results.output_artifacts)+ "
      Attributes Count:" + str(test_data.db_diff_results.output_attrs) + "
      \ +
      " + # For all the general print statements in the test_config + output = "
      \ +

      General Output

      \ +
      " + # For each printout in the test_config's list + for out in test_data.printout: + output += "

      " + out + "

      " + # If there was a \n it probably means we want a
      in the html + if "\n" in out: + output += "
      " + output += "
      " + + html.write(title) + html.write(errors) + html.write(info) + html.write(logs) + html.write(output) + + def write_html_head(html_log): + """Write the top of the HTML log file. + + Args: + html_log: a pathto_File, the global HTML log + """ + with open(str(html_log), "a") as html: + head = "\ + \ + AutopsyTesttest_config Output\ + \ + \ + " + html.write(head) + + def write_html_foot(html_log): + """Write the bottom of the HTML log file. + + Args: + html_log: a pathto_File, the global HTML log + """ + with open(html_log, "a") as html: + head = "" + html.write(head) + + def html_add_images(html_log, full_image_names): + """Add all the image names to the HTML log. + + Args: + full_image_names: a listof_String, each representing an image name + html_log: a pathto_File, the global HTML log + """ + # If the file doesn't exist yet, this is the first test_config to run for + # this test, so we need to make the start of the html log + if not file_exists(html_log): + Reports.write_html_head(html_log) + with open(html_log, "a") as html: + links = [] + for full_name in full_image_names: + name = get_image_name(full_name) + links.append("" + name + "") + html.write("

      " + (" | ".join(links)) + "

      ") + + def _generate_csv(csv_path, test_data): + """Generate the CSV log file""" + # If the CSV file hasn't already been generated, this is the + # first run, and we need to add the column names + if not file_exists(csv_path): + Reports.csv_header(csv_path) + # Now add on the fields to a new row + with open(csv_path, "a") as csv: + # Variables that need to be written + vars = [] + vars.append( test_data.image_file ) + vars.append( test_data.image_name ) + vars.append( test_data.main_config.output_dir ) + vars.append( socket.gethostname() ) + vars.append( test_data.autopsy_version ) + vars.append( test_data.heap_space ) + vars.append( test_data.start_date ) + vars.append( test_data.end_date ) + vars.append( test_data.total_test_time ) + vars.append( test_data.total_ingest_time ) + vars.append( test_data.service_times ) + vars.append( str(len(get_exceptions(test_data))) ) + vars.append( str(Reports._get_num_memory_errors("autopsy", test_data)) ) + vars.append( str(Reports._get_num_memory_errors("tika", test_data)) ) + vars.append( str(Reports._get_num_memory_errors("solr", test_data)) ) + vars.append( str(len(search_log_set("autopsy", "TskCoreException", test_data))) ) + vars.append( str(len(search_log_set("autopsy", "TskDataException", test_data))) ) + vars.append( str(test_data.ingest_messages) ) + vars.append( str(test_data.indexed_files) ) + vars.append( str(test_data.indexed_chunks) ) + vars.append( str(len(search_log_set("autopsy", "Stopping ingest due to low disk space on disk", test_data))) ) +# vars.append( str(test_data.db_diff_results.output_objs) ) +# vars.append( str(test_data.db_diff_results.output_artifacts) ) +# vars.append( str(test_data.db_diff_results.output_objs) ) + vars.append( make_local_path("gold", test_data.image_name, DB_FILENAME) ) +# vars.append( test_data.db_diff_results.get_artifact_comparison() ) +# vars.append( test_data.db_diff_results.get_attribute_comparison() ) + vars.append( make_local_path("gold", test_data.image_name, "standard.html") ) + vars.append( str(test_data.html_report_passed) ) + vars.append( test_data.ant_to_string() ) + # Join it together with a ", " + output = "|".join(vars) + output += "\n" + # Write to the log! + csv.write(output) + + def csv_header(csv_path): + """Generate the CSV column names.""" + with open(csv_path, "w") as csv: + titles = [] + titles.append("Image Path") + titles.append("Image Name") + titles.append("Output test_config Directory") + titles.append("Host Name") + titles.append("Autopsy Version") + titles.append("Heap Space Setting") + titles.append("Test Start Date") + titles.append("Test End Date") + titles.append("Total Test Time") + titles.append("Total Ingest Time") + titles.append("Service Times") + titles.append("Autopsy Exceptions") + titles.append("Autopsy OutOfMemoryErrors/Exceptions") + titles.append("Tika OutOfMemoryErrors/Exceptions") + titles.append("Solr OutOfMemoryErrors/Exceptions") + titles.append("TskCoreExceptions") + titles.append("TskDataExceptions") + titles.append("Ingest Messages Count") + titles.append("Indexed Files Count") + titles.append("Indexed File Chunks Count") + titles.append("Out Of Disk Space") +# titles.append("Tsk Objects Count") +# titles.append("Artifacts Count") +# titles.append("Attributes Count") + titles.append("Gold Database Name") +# titles.append("Artifacts Comparison") +# titles.append("Attributes Comparison") + titles.append("Gold Report Name") + titles.append("Report Comparison") + titles.append("Ant Command Line") + output = "|".join(titles) + output += "\n" + csv.write(output) + + def _get_num_memory_errors(type, test_data): + """Get the number of OutOfMemory errors and Exceptions. + + Args: + type: a String representing the type of log to check. + test_data: the TestData to examine. + """ + return (len(search_log_set(type, "OutOfMemoryError", test_data)) + + len(search_log_set(type, "OutOfMemoryException", test_data))) + +class Logs(object): + + def generate_log_data(test_data): + """Find and handle relevent data from the Autopsy logs. + + Args: + test_data: the TestData whose logs to examine + """ + Logs._generate_common_log(test_data) + try: + Logs._fill_ingest_data(test_data) + except Exception as e: + Errors.print_error("Error: Unknown fatal error when filling test_config data.") + Errors.print_error(str(e) + "\n") + logging.critical(traceback.format_exc()) + # If running in verbose mode (-v) + if test_data.main_config.args.verbose: + errors = Logs._report_all_errors() + okay = "No warnings or errors in any log files." + print_report(errors, "VERBOSE", okay) + + def _generate_common_log(test_data): + """Generate the common log, the log of all exceptions and warnings from + each log file generated by Autopsy. + + Args: + test_data: the TestData to generate a log for + """ + try: + logs_path = test_data.logs_dir + common_log = codecs.open(test_data.common_log_path, "w", "utf_8") + warning_log = codecs.open(test_data.warning_log, "w", "utf_8") + common_log.write("--------------------------------------------------\n") + common_log.write(test_data.image_name + "\n") + common_log.write("--------------------------------------------------\n") + rep_path = make_local_path(test_data.main_config.output_dir) + rep_path = rep_path.replace("\\\\", "\\") + for file in os.listdir(logs_path): + log = codecs.open(make_path(logs_path, file), "r", "utf_8") + for line in log: + line = line.replace(rep_path, "test_data") + if line.startswith("Exception"): + common_log.write(file +": " + line) + elif line.startswith("Error"): + common_log.write(file +": " + line) + elif line.startswith("SEVERE"): + common_log.write(file +":" + line) + else: + warning_log.write(file +": " + line) + log.close() + common_log.write("\n") + common_log.close() + print(test_data.sorted_log) + srtcmdlst = ["sort", test_data.common_log_path, "-o", test_data.sorted_log] + subprocess.call(srtcmdlst) + except (OSError, IOError) as e: + Errors.print_error("Error: Unable to generate the common log.") + Errors.print_error(str(e) + "\n") + Errors.print_error(traceback.format_exc()) + logging.critical(traceback.format_exc()) + + def _fill_ingest_data(test_data): + """Fill the TestDatas variables that require the log files. + + Args: + test_data: the TestData to modify + """ + try: + # Open autopsy.log.0 + log_path = make_path(test_data.logs_dir, "autopsy.log.0") + log = open(log_path) + + # Set the TestData start time based off the first line of autopsy.log.0 + # *** If logging time format ever changes this will break *** + test_data.start_date = log.readline().split(" org.")[0] + + # Set the test_data ending time based off the "create" time (when the file was copied) + test_data.end_date = time.ctime(os.path.getmtime(log_path)) + except IOError as e: + Errors.print_error("Error: Unable to open autopsy.log.0.") + Errors.print_error(str(e) + "\n") + logging.warning(traceback.format_exc()) + # Start date must look like: "Jul 16, 2012 12:57:53 PM" + # End date must look like: "Mon Jul 16 13:02:42 2012" + # *** If logging time format ever changes this will break *** + start = datetime.datetime.strptime(test_data.start_date, "%b %d, %Y %I:%M:%S %p") + end = datetime.datetime.strptime(test_data.end_date, "%a %b %d %H:%M:%S %Y") + test_data.total_test_time = str(end - start) + + try: + # Set Autopsy version, heap space, ingest time, and service times + + version_line = search_logs("INFO: Application name: Autopsy, version:", test_data)[0] + test_data.autopsy_version = get_word_at(version_line, 5).rstrip(",") + + test_data.heap_space = search_logs("Heap memory usage:", test_data)[0].rstrip().split(": ")[1] + + ingest_line = search_logs("Ingest (including enqueue)", test_data)[0] + test_data.total_ingest_time = get_word_at(ingest_line, 6).rstrip() + + message_line = search_log_set("autopsy", "Ingest messages count:", test_data)[0] + test_data.ingest_messages = int(message_line.rstrip().split(": ")[2]) + + files_line = search_log_set("autopsy", "Indexed files count:", test_data)[0] + test_data.indexed_files = int(files_line.rstrip().split(": ")[2]) + + chunks_line = search_log_set("autopsy", "Indexed file chunks count:", test_data)[0] + test_data.indexed_chunks = int(chunks_line.rstrip().split(": ")[2]) + except (OSError, IOError) as e: + Errors.print_error("Error: Unable to find the required information to fill test_config data.") + Errors.print_error(str(e) + "\n") + logging.critical(traceback.format_exc()) + print(traceback.format_exc()) + try: + service_lines = search_log("autopsy.log.0", "to process()", test_data) + service_list = [] + for line in service_lines: + words = line.split(" ") + # Kind of forcing our way into getting this data + # If this format changes, the tester will break + i = words.index("secs.") + times = words[i-4] + " " + times += words[i-3] + " " + times += words[i-2] + " " + times += words[i-1] + " " + times += words[i] + service_list.append(times) + test_data.service_times = "; ".join(service_list) + except (OSError, IOError) as e: + Errors.print_error("Error: Unknown fatal error when finding service times.") + Errors.print_error(str(e) + "\n") + logging.critical(traceback.format_exc()) + + def _report_all_errors(): + """Generate a list of all the errors found in the common log. + + Returns: + a listof_String, the errors found in the common log + """ + try: + return get_warnings() + get_exceptions() + except (OSError, IOError) as e: + Errors.print_error("Error: Unknown fatal error when reporting all errors.") + Errors.print_error(str(e) + "\n") + logging.warning(traceback.format_exc()) + + def search_common_log(string, test_data): + """Search the common log for any instances of a given string. + + Args: + string: the String to search for. + test_data: the TestData that holds the log to search. + + Returns: + a listof_String, all the lines that the string is found on + """ + results = [] + log = codecs.open(test_data.common_log_path, "r", "utf_8") + for line in log: + if string in line: + results.append(line) + log.close() + return results + + +def print_report(errors, name, okay): + """Print a report with the specified information. + + Args: + errors: a listof_String, the errors to report. + name: a String, the name of the report. + okay: the String to print when there are no errors. + """ + if errors: + Errors.print_error("--------< " + name + " >----------") + for error in errors: + Errors.print_error(str(error)) + Errors.print_error("--------< / " + name + " >--------\n") + else: + Errors.print_out("-----------------------------------------------------------------") + Errors.print_out("< " + name + " - " + okay + " />") + Errors.print_out("-----------------------------------------------------------------\n") + + +def get_exceptions(test_data): + """Get a list of the exceptions in the autopsy logs. + + Args: + test_data: the TestData to use to find the exceptions. + Returns: + a listof_String, the exceptions found in the logs. + """ + exceptions = [] + logs_path = test_data.logs_dir + results = [] + for file in os.listdir(logs_path): + if "autopsy.log" in file: + log = codecs.open(make_path(logs_path, file), "r", "utf_8") + ex = re.compile("\SException") + er = re.compile("\SError") + for line in log: + if ex.search(line) or er.search(line): + exceptions.append(line) + log.close() + return exceptions + +def get_warnings(test_data): + """Get a list of the warnings listed in the common log. + + Args: + test_data: the TestData to use to find the warnings + + Returns: + listof_String, the warnings found. + """ + warnings = [] + common_log = codecs.open(test_data.warning_log, "r", "utf_8") + for line in common_log: + if "warning" in line.lower(): + warnings.append(line) + common_log.close() + return warnings + +def copy_logs(test_data): + """Copy the Autopsy generated logs to output directory. + + Args: + test_data: the TestData whose logs will be copied + """ + try: + log_dir = os.path.join("..", "..", "Testing","build","test","qa-functional","work","userdir0","var","log") + shutil.copytree(log_dir, test_data.logs_dir) + except OSError as e: + printerror(test_data,"Error: Failed to copy the logs.") + printerror(test_data,str(e) + "\n") + logging.warning(traceback.format_exc()) + +def setDay(): + global Day + Day = int(strftime("%d", localtime())) + +def getLastDay(): + return Day + +def getDay(): + return int(strftime("%d", localtime())) + +def newDay(): + return getLastDay() != getDay() + +#------------------------------------------------------------# +# Exception classes to manage "acceptable" thrown exceptions # +# versus unexpected and fatal exceptions # +#------------------------------------------------------------# + +class FileNotFoundException(Exception): + """ + If a file cannot be found by one of the helper functions, + they will throw a FileNotFoundException unless the purpose + is to return False. + """ + def __init__(self, file): + self.file = file + self.strerror = "FileNotFoundException: " + file + + def print_error(self): + Errors.print_error("Error: File could not be found at:") + Errors.print_error(self.file + "\n") + + def error(self): + error = "Error: File could not be found at:\n" + self.file + "\n" + return error + +class DirNotFoundException(Exception): + """ + If a directory cannot be found by a helper function, + it will throw this exception + """ + def __init__(self, dir): + self.dir = dir + self.strerror = "DirNotFoundException: " + dir + + def print_error(self): + Errors.print_error("Error: Directory could not be found at:") + Errors.print_error(self.dir + "\n") + + def error(self): + error = "Error: Directory could not be found at:\n" + self.dir + "\n" + return error + + +class Errors: + """A class used to manage error reporting. + + Attributes: + printout: a listof_String, the non-error messages that were printed + printerror: a listof_String, the error messages that were printed + email_body: a String, the body of the report email + email_msg_prefix: a String, the prefix for lines added to the email + email_attchs: a listof_pathto_File, the files to be attached to the + report email + """ + printout = [] + printerror = [] + email_body = "" + email_msg_prefix = "Configuration" + email_attachs = [] + + def set_testing_phase(image_name): + """Change the email message prefix to be the given testing phase. + + Args: + image_name: a String, representing the current image being tested + """ + Errors.email_msg_prefix = image_name + + def print_out(msg): + """Print out an informational message. + + Args: + msg: a String, the message to be printed + """ + print(msg) + Errors.printout.append(msg) + + def print_error(msg): + """Print out an error message. + + Args: + msg: a String, the error message to be printed. + """ + print(msg) + Errors.printerror.append(msg) + + def clear_print_logs(): + """Reset the image-specific attributes of the Errors class.""" + Errors.printout = [] + Errors.printerror = [] + + def add_email_msg(msg): + """Add the given message to the body of the report email. + + Args: + msg: a String, the message to be added to the email + """ + Errors.email_body += Errors.email_msg_prefix + ":" + msg + + def add_email_attachment(path): + """Add the given file to be an attachment for the report email + + Args: + file: a pathto_File, the file to add + """ + Errors.email_attachs.append(path) + + +class DiffResults(object): + """Container for the results of the database diff tests. + + Stores artifact, object, and attribute counts and comparisons generated by + TskDbDiff. + + Attributes: + gold_attrs: a Nat, the number of gold attributes + output_attrs: a Nat, the number of output attributes + gold_objs: a Nat, the number of gold objects + output_objs: a Nat, the number of output objects + artifact_comp: a listof_String, describing the differences + attribute_comp: a listof_String, describing the differences + passed: a boolean, did the diff pass? + """ + def __init__(self, tsk_diff): + """Inits a DiffResults + + Args: + tsk_diff: a TskDBDiff + """ + self.gold_attrs = tsk_diff.gold_attributes + self.output_attrs = tsk_diff.autopsy_attributes + self.gold_objs = tsk_diff.gold_objects + self.output_objs = tsk_diff.autopsy_objects + self.artifact_comp = tsk_diff.artifact_comparison + self.attribute_comp = tsk_diff.attribute_comparison + self.gold_artifacts = len(tsk_diff.gold_artifacts) + self.output_artifacts = len(tsk_diff.autopsy_artifacts) + self.passed = tsk_diff.passed + + def get_artifact_comparison(self): + if not self.artifact_comp: + return "All counts matched" + else: + return "; ".join(self.artifact_comp) + + def get_attribute_comparison(self): + if not self.attribute_comp: + return "All counts matched" + list = [] + for error in self.attribute_comp: + list.append(error) + return ";".join(list) + + +#-------------------------------------------------------------# +# Parses argv and stores booleans to match command line input # +#-------------------------------------------------------------# +class Args(object): + """A container for command line options and arguments. + + Attributes: + single: a boolean indicating whether to run in single file mode + single_file: an Image to run the test on + rebuild: a boolean indicating whether to run in rebuild mode + list: a boolean indicating a config file was specified + unallocated: a boolean indicating unallocated space should be ignored + ignore: a boolean indicating the input directory should be ingnored + keep: a boolean indicating whether to keep the SOLR index + verbose: a boolean indicating whether verbose output should be printed + exeception: a boolean indicating whether errors containing exception + exception_string should be printed + exception_sring: a String representing and exception name + fr: a boolean indicating whether gold standard images will be downloaded + """ + def __init__(self): + self.single = False + self.single_file = "" + self.rebuild = False + self.list = False + self.config_file = "" + self.unallocated = False + self.ignore = False + self.keep = False + self.verbose = False + self.exception = False + self.exception_string = "" + self.fr = False + self.email_enabled = False + + def parse(self): + """Get the command line arguments and parse them.""" + nxtproc = [] + nxtproc.append("python3") + nxtproc.append(sys.argv.pop(0)) + while sys.argv: + arg = sys.argv.pop(0) + nxtproc.append(arg) + if(arg == "-f"): + #try: @@@ Commented out until a more specific except statement is added + arg = sys.argv.pop(0) + print("Running on a single file:") + print(path_fix(arg) + "\n") + self.single = True + self.single_file = path_fix(arg) + #except: + # print("Error: No single file given.\n") + # return False + elif(arg == "-r" or arg == "--rebuild"): + print("Running in rebuild mode.\n") + self.rebuild = True + elif(arg == "-l" or arg == "--list"): + try: + arg = sys.argv.pop(0) + nxtproc.append(arg) + print("Running from configuration file:") + print(arg + "\n") + self.list = True + self.config_file = arg + except: + print("Error: No configuration file given.\n") + return False + elif(arg == "-u" or arg == "--unallocated"): + print("Ignoring unallocated space.\n") + self.unallocated = True + elif(arg == "-k" or arg == "--keep"): + print("Keeping the Solr index.\n") + self.keep = True + elif(arg == "-v" or arg == "--verbose"): + print("Running in verbose mode:") + print("Printing all thrown exceptions.\n") + self.verbose = True + elif(arg == "-e" or arg == "--exception"): + try: + arg = sys.argv.pop(0) + nxtproc.append(arg) + print("Running in exception mode: ") + print("Printing all exceptions with the string '" + arg + "'\n") + self.exception = True + self.exception_string = arg + except: + print("Error: No exception string given.") + elif arg == "-h" or arg == "--help": + print(usage()) + return False + elif arg == "-fr" or arg == "--forcerun": + print("Not downloading new images") + self.fr = True + elif arg == "-e" or arg == "-email": + self.email_enabled = True + else: + print(usage()) + return False + # Return the args were sucessfully parsed + return self._sanity_check() + + def _sanity_check(self): + """Check to make sure there are no conflicting arguments and the + specified files exist. + + Returns: + False if there are conflicting arguments or a specified file does + not exist, True otherwise + """ + if self.single and self.list: + print("Cannot run both from config file and on a single file.") + return False + if self.list: + if not file_exists(self.config_file): + print("Configuration file does not exist at:", + self.config_file) + return False + elif self.single: + if not file_exists(self.single_file): + msg = "Image file does not exist at: " + self.single_file + return False + if (not self.single) and (not self.ignore) and (not self.list): + self.config_file = "config.xml" + if not file_exists(self.config_file): + msg = "Configuration file does not exist at: " + self.config_file + return False + + return True + +#### +# Helper Functions +#### +def search_logs(string, test_data): + """Search through all the known log files for a given string. + + Args: + string: the String to search for. + test_data: the TestData that holds the logs to search. + + Returns: + a listof_String, the lines that contained the given String. + """ + logs_path = test_data.logs_dir + results = [] + for file in os.listdir(logs_path): + log = codecs.open(make_path(logs_path, file), "r", "utf_8") + for line in log: + if string in line: + results.append(line) + log.close() + return results + +def search_log(log, string, test_data): + """Search the given log for any instances of a given string. + + Args: + log: a pathto_File, the log to search in + string: the String to search for. + test_data: the TestData that holds the log to search. + + Returns: + a listof_String, all the lines that the string is found on + """ + logs_path = make_path(test_data.logs_dir, log) + try: + results = [] + log = codecs.open(logs_path, "r", "utf_8") + for line in log: + if string in line: + results.append(line) + log.close() + if results: + return results + except: + raise FileNotFoundException(logs_path) + +# Search through all the the logs of the given type +# Types include autopsy, tika, and solr +def search_log_set(type, string, test_data): + """Search through all logs to the given type for the given string. + + Args: + type: the type of log to search in. + string: the String to search for. + test_data: the TestData containing the logs to search. + + Returns: + a listof_String, the lines on which the String was found. + """ + logs_path = test_data.logs_dir + results = [] + for file in os.listdir(logs_path): + if type in file: + log = codecs.open(make_path(logs_path, file), "r", "utf_8") + for line in log: + if string in line: + results.append(line) + log.close() + return results + + +def clear_dir(dir): + """Clears all files from a directory and remakes it. + + Args: + dir: a pathto_Dir, the directory to clear + """ + try: + if dir_exists(dir): + shutil.rmtree(dir) + os.makedirs(dir) + return True; + except OSError as e: + printerror(test_data,"Error: Cannot clear the given directory:") + printerror(test_data,dir + "\n") + print(str(e)) + return False; + +def del_dir(dir): + """Delete the given directory. + + Args: + dir: a pathto_Dir, the directory to delete + """ + try: + if dir_exists(dir): + shutil.rmtree(dir) + return True; + except: + printerror(test_data,"Error: Cannot delete the given directory:") + printerror(test_data,dir + "\n") + return False; + +def get_file_in_dir(dir, ext): + """Returns the first file in the given directory with the given extension. + + Args: + dir: a pathto_Dir, the directory to search + ext: a String, the extension to search for + + Returns: + pathto_File, the file that was found + """ + try: + for file in os.listdir(dir): + if file.endswith(ext): + return make_path(dir, file) + # If nothing has been found, raise an exception + raise FileNotFoundException(dir) + except: + raise DirNotFoundException(dir) + +def find_file_in_dir(dir, name, ext): + """Find the file with the given name in the given directory. + + Args: + dir: a pathto_Dir, the directory to search + name: a String, the basename of the file to search for + ext: a String, the extension of the file to search for + """ + try: + for file in os.listdir(dir): + if file.startswith(name): + if file.endswith(ext): + return make_path(dir, file) + raise FileNotFoundException(dir) + except: + raise DirNotFoundException(dir) + + +class OS: + LINUX, MAC, WIN, CYGWIN = range(4) + + +if __name__ == "__main__": + global SYS + if _platform == "linux" or _platform == "linux2": + SYS = OS.LINUX + elif _platform == "darwin": + SYS = OS.MAC + elif _platform == "win32": + SYS = OS.WIN + elif _platform == "cygwin": + SYS = OS.CYGWIN + + if SYS is OS.WIN or SYS is OS.CYGWIN: + main() + else: + print("We only support Windows and Cygwin at this time.") diff --git a/test/script/srcupdater.py b/test/script/srcupdater.py index 99a393d9eb..c8c7d5410b 100644 --- a/test/script/srcupdater.py +++ b/test/script/srcupdater.py @@ -1,187 +1,187 @@ -import codecs -import datetime -import logging -import os -import re -import shutil -import socket -import sqlite3 -import subprocess -import sys -from sys import platform as _platform -import time -import traceback -import xml -from xml.dom.minidom import parse, parseString -import Emailer -from regression_utils import * - -def compile(errore, attachli, parsedin): - global redo - global tryredo - global failedbool - global errorem - errorem = errore - global attachl - attachl = attachli - global passed - global parsed - parsed = parsedin - passed = True - tryredo = False - redo = True - while(redo): - passed = True - if(passed): - gitPull("sleuthkit") - if(passed): - vsBuild() - if(passed): - gitPull("autopsy") - if(passed): - antBuild("datamodel", False) - if(passed): - antBuild("autopsy", True) - if(passed): - redo = False - else: - print("Compile Failed") - time.sleep(3600) - attachl = [] - errorem = "The test standard didn't match the gold standard.\n" - failedbool = False - if(tryredo): - errorem = "" - errorem += "Rebuilt properly.\n" - Emailer.send_email(parsed, errorem, attachl, True) - attachl = [] - passed = True - -#Pulls from git -def gitPull(TskOrAutopsy): - global SYS - global errorem - global attachl - ccwd = "" - gppth = make_local_path("..", "GitPullOutput" + TskOrAutopsy + ".txt") - attachl.append(gppth) - gpout = open(gppth, 'a') - toPull = "https://www.github.com/sleuthkit/" + TskOrAutopsy - call = ["git", "pull", toPull] - if TskOrAutopsy == "sleuthkit": - ccwd = os.path.join("..", "..", "..", "sleuthkit") - else: - ccwd = os.path.join("..", "..") - subprocess.call(call, stdout=sys.stdout, cwd=ccwd) - gpout.close() - - -#Builds TSK as a win32 applicatiion -def vsBuild(): - global redo - global tryredo - global passed - global parsed - #Please ensure that the current working directory is $autopsy/testing/script - oldpath = os.getcwd() - os.chdir(os.path.join("..", "..", "..","sleuthkit", "win32")) - vs = [] - vs.append("/cygdrive/c/windows/microsoft.NET/framework/v4.0.30319/MSBuild.exe") - vs.append(os.path.join("Tsk-win.sln")) - vs.append("/p:configuration=release") - vs.append("/p:platform=win32") - vs.append("/t:clean") - vs.append("/t:rebuild") - print(vs) - VSpth = make_local_path("..", "VSOutput.txt") - VSout = open(VSpth, 'a') - subprocess.call(vs, stdout=VSout) - VSout.close() - os.chdir(oldpath) - chk = os.path.join("..", "..", "..","sleuthkit", "win32", "Release", "libtsk_jni.dll") - try: - open(chk) - except IOError as e: - global errorem - global attachl - if(not tryredo): - errorem += "LIBTSK C++ failed to build.\n" - attachl.append(VSpth) - send_email(parsed, errorem, attachl, False) - tryredo = True - passed = False - redo = True - - - -#Builds Autopsy or the Datamodel -def antBuild(which, Build): - global redo - global passed - global tryredo - global parsed - directory = os.path.join("..", "..") - ant = [] - if which == "datamodel": - directory = os.path.join("..", "..", "..", "sleuthkit", "bindings", "java") - ant.append("ant") - ant.append("-f") - ant.append(directory) - ant.append("clean") - if(Build): - ant.append("build") - else: - ant.append("dist") - antpth = make_local_path("..", "ant" + which + "Output.txt") - antout = open(antpth, 'a') - succd = subprocess.call(ant, stdout=antout) - antout.close() - global errorem - global attachl - if which == "datamodel": - chk = os.path.join("..", "..", "..","sleuthkit", "bindings", "java", "dist", "TSK_DataModel.jar") - try: - open(chk) - except IOError as e: - if(not tryredo): - errorem += "DataModel Java build failed.\n" - attachl.append(antpth) - Emailer.send_email(parsed, errorem, attachl, False) - passed = False - tryredo = True - elif (succd != 0 and (not tryredo)): - errorem += "Autopsy build failed.\n" - attachl.append(antpth) - Emailer.send_email(parsed, errorem, attachl, False) - tryredo = True - elif (succd != 0): - passed = False - - -def main(): - errore = "" - attachli = [] - config_file = "" - arg = sys.argv.pop(0) - arg = sys.argv.pop(0) - config_file = arg - parsedin = parse(config_file) - compile(errore, attachli, parsedin) - -class OS: - LINUX, MAC, WIN, CYGWIN = range(4) -if __name__ == "__main__": - global SYS - if _platform == "linux" or _platform == "linux2": - SYS = OS.LINUX - elif _platform == "darwin": - SYS = OS.MAC - elif _platform == "win32": - SYS = OS.WIN - elif _platform == "cygwin": - SYS = OS.CYGWIN - - if SYS is OS.WIN or SYS is OS.CYGWIN: - main() - else: - print("We only support Windows and Cygwin at this time.") +import codecs +import datetime +import logging +import os +import re +import shutil +import socket +import sqlite3 +import subprocess +import sys +from sys import platform as _platform +import time +import traceback +import xml +from xml.dom.minidom import parse, parseString +import Emailer +from regression_utils import * + +def compile(errore, attachli, parsedin): + global redo + global tryredo + global failedbool + global errorem + errorem = errore + global attachl + attachl = attachli + global passed + global parsed + parsed = parsedin + passed = True + tryredo = False + redo = True + while(redo): + passed = True + if(passed): + gitPull("sleuthkit") + if(passed): + vsBuild() + if(passed): + gitPull("autopsy") + if(passed): + antBuild("datamodel", False) + if(passed): + antBuild("autopsy", True) + if(passed): + redo = False + else: + print("Compile Failed") + time.sleep(3600) + attachl = [] + errorem = "The test standard didn't match the gold standard.\n" + failedbool = False + if(tryredo): + errorem = "" + errorem += "Rebuilt properly.\n" + Emailer.send_email(parsed, errorem, attachl, True) + attachl = [] + passed = True + +#Pulls from git +def gitPull(TskOrAutopsy): + global SYS + global errorem + global attachl + ccwd = "" + gppth = make_local_path("..", "GitPullOutput" + TskOrAutopsy + ".txt") + attachl.append(gppth) + gpout = open(gppth, 'a') + toPull = "https://www.github.com/sleuthkit/" + TskOrAutopsy + call = ["git", "pull", toPull] + if TskOrAutopsy == "sleuthkit": + ccwd = os.path.join("..", "..", "..", "sleuthkit") + else: + ccwd = os.path.join("..", "..") + subprocess.call(call, stdout=sys.stdout, cwd=ccwd) + gpout.close() + + +#Builds TSK as a win32 applicatiion +def vsBuild(): + global redo + global tryredo + global passed + global parsed + #Please ensure that the current working directory is $autopsy/testing/script + oldpath = os.getcwd() + os.chdir(os.path.join("..", "..", "..","sleuthkit", "win32")) + vs = [] + vs.append("/cygdrive/c/windows/microsoft.NET/framework/v4.0.30319/MSBuild.exe") + vs.append(os.path.join("Tsk-win.sln")) + vs.append("/p:configuration=release") + vs.append("/p:platform=win32") + vs.append("/t:clean") + vs.append("/t:rebuild") + print(vs) + VSpth = make_local_path("..", "VSOutput.txt") + VSout = open(VSpth, 'a') + subprocess.call(vs, stdout=VSout) + VSout.close() + os.chdir(oldpath) + chk = os.path.join("..", "..", "..","sleuthkit", "win32", "Release", "libtsk_jni.dll") + try: + open(chk) + except IOError as e: + global errorem + global attachl + if(not tryredo): + errorem += "LIBTSK C++ failed to build.\n" + attachl.append(VSpth) + send_email(parsed, errorem, attachl, False) + tryredo = True + passed = False + redo = True + + + +#Builds Autopsy or the Datamodel +def antBuild(which, Build): + global redo + global passed + global tryredo + global parsed + directory = os.path.join("..", "..") + ant = [] + if which == "datamodel": + directory = os.path.join("..", "..", "..", "sleuthkit", "bindings", "java") + ant.append("ant") + ant.append("-f") + ant.append(directory) + ant.append("clean") + if(Build): + ant.append("build") + else: + ant.append("dist") + antpth = make_local_path("..", "ant" + which + "Output.txt") + antout = open(antpth, 'a') + succd = subprocess.call(ant, stdout=antout) + antout.close() + global errorem + global attachl + if which == "datamodel": + chk = os.path.join("..", "..", "..","sleuthkit", "bindings", "java", "dist", "TSK_DataModel.jar") + try: + open(chk) + except IOError as e: + if(not tryredo): + errorem += "DataModel Java build failed.\n" + attachl.append(antpth) + Emailer.send_email(parsed, errorem, attachl, False) + passed = False + tryredo = True + elif (succd != 0 and (not tryredo)): + errorem += "Autopsy build failed.\n" + attachl.append(antpth) + Emailer.send_email(parsed, errorem, attachl, False) + tryredo = True + elif (succd != 0): + passed = False + + +def main(): + errore = "" + attachli = [] + config_file = "" + arg = sys.argv.pop(0) + arg = sys.argv.pop(0) + config_file = arg + parsedin = parse(config_file) + compile(errore, attachli, parsedin) + +class OS: + LINUX, MAC, WIN, CYGWIN = range(4) +if __name__ == "__main__": + global SYS + if _platform == "linux" or _platform == "linux2": + SYS = OS.LINUX + elif _platform == "darwin": + SYS = OS.MAC + elif _platform == "win32": + SYS = OS.WIN + elif _platform == "cygwin": + SYS = OS.CYGWIN + + if SYS is OS.WIN or SYS is OS.CYGWIN: + main() + else: + print("We only support Windows and Cygwin at this time.") diff --git a/thunderbirdparser/manifest.mf b/thunderbirdparser/manifest.mf index c16a2f4c01..fc34c0e90a 100644 --- a/thunderbirdparser/manifest.mf +++ b/thunderbirdparser/manifest.mf @@ -1,7 +1,7 @@ -Manifest-Version: 1.0 -AutoUpdate-Show-In-Client: true -OpenIDE-Module: org.sleuthkit.autopsy.thunderbirdparser/3 -OpenIDE-Module-Implementation-Version: 9 -OpenIDE-Module-Layer: org/sleuthkit/autopsy/thunderbirdparser/layer.xml -OpenIDE-Module-Localizing-Bundle: org/sleuthkit/autopsy/thunderbirdparser/Bundle.properties - +Manifest-Version: 1.0 +AutoUpdate-Show-In-Client: true +OpenIDE-Module: org.sleuthkit.autopsy.thunderbirdparser/3 +OpenIDE-Module-Implementation-Version: 9 +OpenIDE-Module-Layer: org/sleuthkit/autopsy/thunderbirdparser/layer.xml +OpenIDE-Module-Localizing-Bundle: org/sleuthkit/autopsy/thunderbirdparser/Bundle.properties + diff --git a/thunderbirdparser/nbproject/project.properties b/thunderbirdparser/nbproject/project.properties index 6a243df466..0735c621fa 100644 --- a/thunderbirdparser/nbproject/project.properties +++ b/thunderbirdparser/nbproject/project.properties @@ -1,6 +1,6 @@ -javac.source=1.7 -javac.compilerargs=-Xlint -Xlint:-serial -license.file=../LICENSE-2.0.txt -nbm.homepage=http://www.sleuthkit.org/autopsy/ -nbm.needs.restart=true -spec.version.base=1.2 +javac.source=1.7 +javac.compilerargs=-Xlint -Xlint:-serial +license.file=../LICENSE-2.0.txt +nbm.homepage=http://www.sleuthkit.org/autopsy/ +nbm.needs.restart=true +spec.version.base=1.2 diff --git a/update_versions.py b/update_versions.py index 2883021c9f..fa228d0cca 100644 --- a/update_versions.py +++ b/update_versions.py @@ -1,939 +1,939 @@ -# -# Autopsy Forensic Browser -# -# Copyright 2012-2013 Basis Technology Corp. -# Contact: carrier sleuthkit org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -####################### -# This script exists to help us determine update the library -# versions appropriately. See this page for version details. -# -# http://wiki.sleuthkit.org/index.php?title=Autopsy_3_Module_Versions -# -# The basic idea is that this script uses javadoc/jdiff to -# compare the current state of the source code to the last -# tag and identifies if APIs were removed, added, etc. -# -# When run from the Autopsy build script, this script will: -# - Clone Autopsy and checkout to the previous release tag -# as found in the NEWS.txt file -# - Auto-discover all modules and packages -# - Run jdiff, comparing the current and previous modules -# - Use jdiff's output to determine if each module -# a) has no changes -# b) has backwards compatible changes -# c) has backwards incompatible changes -# - Based off it's compatibility, updates each module's -# a) Major version -# b) Specification version -# c) Implementation version -# - Updates the dependencies on each module depending on the -# updated version numbers -# -# Optionally, when run from the command line, one can provide the -# desired tag to compare the current version to, the directory for -# the current version of Autopsy, and whether to automatically -# update the version numbers and dependencies. -# ------------------------------------------------------------ - -import errno -import os -import shutil -import stat -import subprocess -import sys -import traceback -from os import remove, close -from shutil import move -from tempfile import mkstemp -from xml.dom.minidom import parse, parseString - -# Jdiff return codes. Described in more detail further on -NO_CHANGES = 100 -COMPATIBLE = 101 -NON_COMPATIBLE = 102 -ERROR = 1 - -# An Autopsy module object -class Module: - # Initialize it with a name, return code, and version numbers - def __init__(self, name=None, ret=None, versions=None): - self.name = name - self.ret = ret - self.versions = versions - # As a string, the module should be it's name - def __str__(self): - return self.name - def __repr__(self): - return self.name - # When compared to another module, the two are equal if the names are the same - def __cmp__(self, other): - if isinstance(other, Module): - if self.name == other.name: - return 0 - elif self.name < other.name: - return -1 - else: - return 1 - return 1 - def __eq__(self, other): - if isinstance(other, Module): - if self.name == other.name: - return True - return False - def set_name(self, name): - self.name = name - def set_ret(self, ret): - self.ret = ret - def set_versions(self, versions): - self.versions = versions - def spec(self): - return self.versions[0] - def impl(self): - return self.versions[1] - def release(self): - return self.versions[2] - -# Representation of the Specification version number -class Spec: - # Initialize specification number, where num is a string like x.y - def __init__(self, num): - self.third = None - spec_nums = num.split(".") - if len(spec_nums) == 3: - final = spec_nums[2] - self.third = int(final) - - l, r = spec_nums[0], spec_nums[1] - - self.left = int(l) - self.right = int(r) - - def __str__(self): - return self.get() - def __cmp__(self, other): - if isinstance(other, Spec): - if self.left == other.left: - if self.right == other.right: - return 0 - if self.right < other.right: - return -1 - return 1 - if self.left < other.left: - return -1 - return 1 - elif isinstance(other, str): - l, r = other.split(".") - if self.left == int(l): - if self.right == int(r): - return 0 - if self.right < int(r): - return -1 - return 1 - if self.left < int(l): - return -1 - return 1 - return -1 - - def overflow(self): - return str(self.left + 1) + ".0" - def increment(self): - return str(self.left) + "." + str(self.right + 1) - def get(self): - spec_str = str(self.left) + "." + str(self.right) - if self.third is not None: - spec_str += "." + str(self.final) - return spec_str - def set(self, num): - if isinstance(num, str): - l, r = num.split(".") - self.left = int(l) - self.right = int(r) - elif isinstance(num, Spec): - self.left = num.left - self.right = num.right - return self - -# ================================ # -# Core Functions # -# ================================ # - -# Given a list of modules and the names for each version, compare -# the generated jdiff XML for each module and output the jdiff -# JavaDocs. -# -# modules: the list of all modules both versions have in common -# apiname_tag: the api name of the previous version, most likely the tag -# apiname_cur: the api name of the current version, most likely "Current" -# -# returns the exit code from the modified jdiff.jar -# return code 1 = error in jdiff -# return code 100 = no changes -# return code 101 = compatible changes -# return code 102 = incompatible changes -def compare_xml(module, apiname_tag, apiname_cur): - global docdir - make_dir(docdir) - null_file = fix_path(os.path.abspath("./thirdparty/jdiff/v-custom/lib/Null.java")) - jdiff = fix_path(os.path.abspath("./thirdparty/jdiff/v-custom/jdiff.jar")) - oldapi = fix_path("build/jdiff-xml/" + apiname_tag + "-" + module.name) - newapi = fix_path("build/jdiff-xml/" + apiname_cur + "-" + module.name) - docs = fix_path(docdir + "/" + module.name) - # Comments are strange. They look for a file with additional user comments in a - # directory like docs/user_comments_for_xyz. The problem being that xyz is the - # path to the new/old api. So xyz turns into multiple directories for us. - # i.e. user_comments_for_build/jdiff-xml/[tag name]-[module name]_to_build/jdiff-xml - comments = fix_path(docs + "/user_comments_for_build") - jdiff_com = fix_path(comments + "/jdiff-xml") - tag_comments = fix_path(jdiff_com + "/" + apiname_tag + "-" + module.name + "_to_build") - jdiff_tag_com = fix_path(tag_comments + "/jdiff-xml") - - if not os.path.exists(jdiff): - print("JDIFF doesn't exist.") - - make_dir(docs) - make_dir(comments) - make_dir(jdiff_com) - make_dir(tag_comments) - make_dir(jdiff_tag_com) - make_dir("jdiff-logs") - log = open("jdiff-logs/COMPARE-" + module.name + ".log", "w") - cmd = ["javadoc", - "-doclet", "jdiff.JDiff", - "-docletpath", jdiff, - "-d", docs, - "-oldapi", oldapi, - "-newapi", newapi, - "-script", - null_file] - jdiff = subprocess.Popen(cmd, stdout=log, stderr=log) - jdiff.wait() - log.close() - code = jdiff.returncode - print("Compared XML for " + module.name) - if code == NO_CHANGES: - print(" No API changes") - elif code == COMPATIBLE: - print(" API Changes are backwards compatible") - elif code == NON_COMPATIBLE: - print(" API Changes are not backwards compatible") - else: - print(" *Error in XML, most likely an empty module") - sys.stdout.flush() - return code - -# Generate the jdiff xml for the given module -# path: path to the autopsy source -# module: Module object -# name: api name for jdiff -def gen_xml(path, modules, name): - for module in modules: - # If its the regression test, the source is in the "test" dir - if module.name == "Testing": - src = os.path.join(path, module.name, "test", "qa-functional", "src") - else: - src = os.path.join(path, module.name, "src") - # xerces = os.path.abspath("./lib/xerces.jar") - xml_out = fix_path(os.path.abspath("./build/jdiff-xml/" + name + "-" + module.name)) - jdiff = fix_path(os.path.abspath("./thirdparty/jdiff/v-custom/jdiff.jar")) - make_dir("build/jdiff-xml") - make_dir("jdiff-logs") - log = open("jdiff-logs/GEN_XML-" + name + "-" + module.name + ".log", "w") - cmd = ["javadoc", - "-doclet", "jdiff.JDiff", - "-docletpath", jdiff, # ;" + xerces, <-- previous problems required this - "-apiname", xml_out, # leaving it in just in case it's needed once again - "-sourcepath", fix_path(src)] - cmd = cmd + get_packages(src) - jdiff = subprocess.Popen(cmd, stdout=log, stderr=log) - jdiff.wait() - log.close() - print("Generated XML for " + name + " " + module.name) - sys.stdout.flush() - -# Find all the modules in the given path -def find_modules(path): - modules = [] - # Step into each folder in the given path and - # see if it has manifest.mf - if so, it's a module - for dir in os.listdir(path): - directory = os.path.join(path, dir) - if os.path.isdir(directory): - for file in os.listdir(directory): - if file == "manifest.mf": - modules.append(Module(dir, None, None)) - return modules - -# Detects the differences between the source and tag modules -def module_diff(source_modules, tag_modules): - added_modules = [x for x in source_modules if x not in tag_modules] - removed_modules = [x for x in tag_modules if x not in source_modules] - similar_modules = [x for x in source_modules if x in tag_modules] - - added_modules = (added_modules if added_modules else []) - removed_modules = (removed_modules if removed_modules else []) - similar_modules = (similar_modules if similar_modules else []) - return similar_modules, added_modules, removed_modules - -# Reads the previous tag from NEWS.txt -def get_tag(sourcepath): - news = open(sourcepath + "/NEWS.txt", "r") - second_instance = False - for line in news: - if "----------------" in line: - if second_instance: - ver = line.split("VERSION ")[1] - ver = ver.split(" -")[0] - return ("autopsy-" + ver).strip() - else: - second_instance = True - continue - news.close() - - -# ========================================== # -# Dependency Functions # -# ========================================== # - -# Write a new XML file, copying all the lines from projectxml -# and replacing the specification version for the code-name-base base -# with the supplied specification version spec -def set_dep_spec(projectxml, base, spec): - print(" Updating Specification version..") - orig = open(projectxml, "r") - f, abs_path = mkstemp() - new_file = open(abs_path, "w") - found_base = False - spacing = " " - sopen = "" - sclose = "\n" - for line in orig: - if base in line: - found_base = True - if found_base and sopen in line: - update = spacing + sopen + str(spec) + sclose - new_file.write(update) - else: - new_file.write(line) - new_file.close() - close(f) - orig.close() - remove(projectxml) - move(abs_path, projectxml) - -# Write a new XML file, copying all the lines from projectxml -# and replacing the release version for the code-name-base base -# with the supplied release version -def set_dep_release(projectxml, base, release): - print(" Updating Release version..") - orig = open(projectxml, "r") - f, abs_path = mkstemp() - new_file = open(abs_path, "w") - found_base = False - spacing = " " - ropen = "" - rclose = "\n" - for line in orig: - if base in line: - found_base = True - if found_base and ropen in line: - update = spacing + ropen + str(release) + rclose - new_file.write(update) - else: - new_file.write(line) - new_file.close() - close(f) - orig.close() - remove(projectxml) - move(abs_path, projectxml) - -# Return the dependency versions in the XML dependency node -def get_dep_versions(dep): - run_dependency = dep.getElementsByTagName("run-dependency")[0] - release_version = run_dependency.getElementsByTagName("release-version") - if release_version: - release_version = getTagText(release_version[0].childNodes) - specification_version = run_dependency.getElementsByTagName("specification-version") - if specification_version: - specification_version = getTagText(specification_version[0].childNodes) - return int(release_version), Spec(specification_version) - -# Given a code-name-base, see if it corresponds with any of our modules -def get_module_from_base(modules, code_name_base): - for module in modules: - if "org.sleuthkit.autopsy." + module.name.lower() == code_name_base: - return module - return None # If it didn't match one of our modules - -# Check the text between two XML tags -def getTagText(nodelist): - for node in nodelist: - if node.nodeType == node.TEXT_NODE: - return node.data - -# Check the projectxml for a dependency on any module in modules -def check_for_dependencies(projectxml, modules): - dom = parse(projectxml) - dep_list = dom.getElementsByTagName("dependency") - for dep in dep_list: - code_name_base = dep.getElementsByTagName("code-name-base")[0] - code_name_base = getTagText(code_name_base.childNodes) - module = get_module_from_base(modules, code_name_base) - if module: - print(" Found dependency on " + module.name) - release, spec = get_dep_versions(dep) - if release != module.release() and module.release() is not None: - set_dep_release(projectxml, code_name_base, module.release()) - else: print(" Release version is correct") - if spec != module.spec() and module.spec() is not None: - set_dep_spec(projectxml, code_name_base, module.spec()) - else: print(" Specification version is correct") - -# Given the module and the source directory, return -# the paths to the manifest and project properties files -def get_dependency_file(module, source): - projectxml = os.path.join(source, module.name, "nbproject", "project.xml") - if os.path.isfile(projectxml): - return projectxml - -# Verify/Update the dependencies for each module, basing the dependency -# version number off the versions in each module -def update_dependencies(modules, source): - for module in modules: - print("Checking the dependencies for " + module.name + "...") - projectxml = get_dependency_file(module, source) - if projectxml == None: - print(" Error finding project xml file") - else: - other = [x for x in modules] - check_for_dependencies(projectxml, other) - sys.stdout.flush() - -# ======================================== # -# Versioning Functions # -# ======================================== # - -# Return the specification version in the given project.properties/manifest.mf file -def get_specification(project, manifest): - try: - # Try to find it in the project file - # it will be there if impl version is set to append automatically - f = open(project, 'r') - for line in f: - if "spec.version.base" in line: - return Spec(line.split("=")[1].strip()) - f.close() - # If not found there, try the manifest file - f = open(manifest, 'r') - for line in f: - if "OpenIDE-Module-Specification-Version:" in line: - return Spec(line.split(": ")[1].strip()) - except Exception as e: - print("Error parsing Specification version for") - print(project) - print(e) - -# Set the specification version in the given project properties file -# but if it can't be found there, set it in the manifest file -def set_specification(project, manifest, num): - try: - # First try the project file - f = open(project, 'r') - for line in f: - if "spec.version.base" in line: - f.close() - replace(project, line, "spec.version.base=" + str(num) + "\n") - return - f.close() - # If it's not there, try the manifest file - f = open(manifest, 'r') - for line in f: - if "OpenIDE-Module-Specification-Version:" in line: - f.close() - replace(manifest, line, "OpenIDE-Module-Specification-Version: " + str(num) + "\n") - return - # Otherwise we're out of luck - print(" Error finding the Specification version to update") - print(" " + manifest) - f.close() - except: - print(" Error incrementing Specification version for") - print(" " + project) - -# Return the implementation version in the given manifest.mf file -def get_implementation(manifest): - try: - f = open(manifest, 'r') - for line in f: - if "OpenIDE-Module-Implementation-Version" in line: - return int(line.split(": ")[1].strip()) - f.close() - except: - print("Error parsing Implementation version for") - print(manifest) - -# Set the implementation version in the given manifest file -def set_implementation(manifest, num): - try: - f = open(manifest, 'r') - for line in f: - if "OpenIDE-Module-Implementation-Version" in line: - f.close() - replace(manifest, line, "OpenIDE-Module-Implementation-Version: " + str(num) + "\n") - return - # If it isn't there, add it - f.close() - write_implementation(manifest, num) - except: - print(" Error incrementing Implementation version for") - print(" " + manifest) - -# Rewrite the manifest file to include the implementation version -def write_implementation(manifest, num): - f = open(manifest, "r") - contents = f.read() - contents = contents[:-2] + "OpenIDE-Module-Implementation-Version: " + str(num) + "\n\n" - f.close() - f = open(manifest, "w") - f.write(contents) - f.close() - -# Return the release version in the given manifest.mf file -def get_release(manifest): - try: - f = open(manifest, 'r') - for line in f: - if "OpenIDE-Module:" in line: - return int(line.split("/")[1].strip()) - f.close() - except: - #print("Error parsing Release version for") - #print(manifest) - return 0 - -# Set the release version in the given manifest file -def set_release(manifest, num): - try: - f = open(manifest, 'r') - for line in f: - if "OpenIDE-Module:" in line: - f.close() - index = line.index('/') - len(line) + 1 - newline = line[:index] + str(num) - replace(manifest, line, newline + "\n") - return - print(" Error finding the release version to update") - print(" " + manifest) - f.close() - except: - print(" Error incrementing release version for") - print(" " + manifest) - -# Given the module and the source directory, return -# the paths to the manifest and project properties files -def get_version_files(module, source): - manifest = os.path.join(source, module.name, "manifest.mf") - project = os.path.join(source, module.name, "nbproject", "project.properties") - if os.path.isfile(manifest) and os.path.isfile(project): - return manifest, project - -# Returns a the current version numbers for the module in source -def get_versions(module, source): - manifest, project = get_version_files(module, source) - if manifest == None or project == None: - print(" Error finding manifeset and project properties files") - return - spec = get_specification(project, manifest) - impl = get_implementation(manifest) - release = get_release(manifest) - return [spec, impl, release] - -# Update the version numbers for every module in modules -def update_versions(modules, source): - for module in modules: - versions = module.versions - manifest, project = get_version_files(module, source) - print("Updating " + module.name + "...") - if manifest == None or project == None: - print(" Error finding manifeset and project properties files") - return - if module.ret == COMPATIBLE: - versions = [versions[0].set(versions[0].increment()), versions[1] + 1, versions[2]] - set_specification(project, manifest, versions[0]) - set_implementation(manifest, versions[1]) - module.set_versions(versions) - elif module.ret == NON_COMPATIBLE: - versions = [versions[0].set(versions[0].overflow()), versions[1] + 1, versions[2] + 1] - set_specification(project, manifest, versions[0]) - set_implementation(manifest, versions[1]) - set_release(manifest, versions[2]) - module.set_versions(versions) - elif module.ret == NO_CHANGES: - versions = [versions[0], versions[1] + 1, versions[2]] - set_implementation(manifest, versions[1]) - module.set_versions(versions) - elif module.ret == None: - versions = [Spec("1.0"), 1, 1] - set_specification(project, manifest, versions[0]) - set_implementation(manifest, versions[1]) - set_release(manifest, versions[2]) - module.set_versions(versions) - sys.stdout.flush() - -# Given a list of the added modules, remove the modules -# which have the correct 'new module default' version number -def remove_correct_added(modules): - correct = [x for x in modules] - for module in modules: - if module.spec() == "1.0" or module.spec() == "0.0": - if module.impl() == 1: - if module.release() == 1 or module.release() == 0: - correct.remove(module) - return correct - -# ==================================== # -# Helper Functions # -# ==================================== # - -# Replace pattern with subst in given file -def replace(file, pattern, subst): - #Create temp file - fh, abs_path = mkstemp() - new_file = open(abs_path,'w') - old_file = open(file) - for line in old_file: - new_file.write(line.replace(pattern, subst)) - #close temp file - new_file.close() - close(fh) - old_file.close() - #Remove original file - remove(file) - #Move new file - move(abs_path, file) - -# Given a list of modules print the version numbers that need changing -def print_version_updates(modules): - f = open("gen_version.txt", "a") - for module in modules: - versions = module.versions - if module.ret == COMPATIBLE: - output = (module.name + ":\n") - output += ("\tSpecification:\t" + str(versions[0]) + "\t->\t" + str(versions[0].increment()) + "\n") - output += ("\tImplementation:\t" + str(versions[1]) + "\t->\t" + str(versions[1] + 1) + "\n") - output += ("\tRelease:\tNo Change.\n") - output += ("\n") - print(output) - sys.stdout.flush() - f.write(output) - elif module.ret == NON_COMPATIBLE: - output = (module.name + ":\n") - output += ("\tSpecification:\t" + str(versions[0]) + "\t->\t" + str(versions[0].overflow()) + "\n") - output += ("\tImplementation:\t" + str(versions[1]) + "\t->\t" + str(versions[1] + 1) + "\n") - output += ("\tRelease:\t" + str(versions[2]) + "\t->\t" + str(versions[2] + 1) + "\n") - output += ("\n") - print(output) - sys.stdout.flush() - f.write(output) - elif module.ret == ERROR: - output = (module.name + ":\n") - output += ("\t*Unable to detect necessary changes\n") - output += ("\tSpecification:\t" + str(versions[0]) + "\n") - output += ("\tImplementation:\t" + str(versions[1]) + "\n") - output += ("\tRelease:\t\t" + str(versions[2]) + "\n") - output += ("\n") - print(output) - f.write(output) - sys.stdout.flush() - elif module.ret == NO_CHANGES: - output = (module.name + ":\n") - if versions[1] is None: - output += ("\tImplementation: None\n") - else: - output += ("\tImplementation:\t" + str(versions[1]) + "\t->\t" + str(versions[1] + 1) + "\n") - output += ("\n") - print(output) - sys.stdout.flush() - f.write(output) - elif module.ret is None: - output = ("Added " + module.name + ":\n") - if module.spec() != "1.0" and module.spec() != "0.0": - output += ("\tSpecification:\t" + str(module.spec()) + "\t->\t" + "1.0\n") - output += ("\n") - if module.impl() != 1: - output += ("\tImplementation:\t" + str(module.impl()) + "\t->\t" + "1\n") - output += ("\n") - if module.release() != 1 and module.release() != 0: - output += ("Release:\t\t" + str(module.release()) + "\t->\t" + "1\n") - output += ("\n") - print(output) - sys.stdout.flush() - f.write(output) - sys.stdout.flush() - f.close() - -# Changes cygwin paths to Windows -def fix_path(path): - if "cygdrive" in path: - new_path = path[11:] - return "C:/" + new_path - else: - return path - -# Print a 'title' -def printt(title): - print("\n" + title) - lines = "" - for letter in title: - lines += "-" - print(lines) - sys.stdout.flush() - -# Get a list of package names in the given path -# The path is expected to be of the form {base}/module/src -# -# NOTE: We currently only check for packages of the form -# org.sleuthkit.autopsy.x -# If we add other namespaces for commercial modules we will -# have to add a check here -def get_packages(path): - packages = [] - package_path = os.path.join(path, "org", "sleuthkit", "autopsy") - for folder in os.listdir(package_path): - package_string = "org.sleuthkit.autopsy." - packages.append(package_string + folder) - return packages - -# Create the given directory, if it doesn't already exist -def make_dir(dir): - try: - if not os.path.isdir(dir): - os.mkdir(dir) - if os.path.isdir(dir): - return True - return False - except: - print("Exception thrown when creating directory") - return False - -# Delete the given directory, and make sure it is deleted -def del_dir(dir): - try: - if os.path.isdir(dir): - shutil.rmtree(dir, ignore_errors=False, onerror=handleRemoveReadonly) - if os.path.isdir(dir): - return False - else: - return True - return True - except: - print("Exception thrown when deleting directory") - traceback.print_exc() - return False - -# Handle any permisson errors thrown by shutil.rmtree -def handleRemoveReadonly(func, path, exc): - excvalue = exc[1] - if func in (os.rmdir, os.remove) and excvalue.errno == errno.EACCES: - os.chmod(path, stat.S_IRWXU| stat.S_IRWXG| stat.S_IRWXO) # 0777 - func(path) - else: - raise - -# Run git clone and git checkout for the tag -def do_git(tag, tag_dir): - try: - printt("Cloning Autopsy tag " + tag + " into dir " + tag_dir + " (this could take a while)...") - subprocess.call(["git", "clone", "https://github.com/sleuthkit/autopsy.git", tag_dir], - stdout=subprocess.PIPE) - printt("Checking out tag " + tag + "...") - subprocess.call(["git", "checkout", tag], - stdout=subprocess.PIPE, - cwd=tag_dir) - return True - except Exception as ex: - print("Error cloning and checking out Autopsy: ", sys.exc_info()[0]) - print(str(ex)) - print("The terminal you are using most likely does not recognize git commands.") - return False - -# Get the flags from argv -def args(): - try: - sys.argv.pop(0) - while sys.argv: - arg = sys.argv.pop(0) - if arg == "-h" or arg == "--help": - return 1 - elif arg == "-t" or arg == "--tag": - global tag - tag = sys.argv.pop(0) - elif arg == "-s" or arg == "--source": - global source - source = sys.argv.pop(0) - elif arg == "-d" or arg == "--dir": - global docdir - docdir = sys.argv.pop(0) - elif arg == "-a" or arg == "--auto": - global dry - dry = False - else: - raise Exception() - except: - pass - -# Print script run info -def printinfo(): - global tag - global source - global docdir - global dry - printt("Release script information:") - if source is None: - source = fix_path(os.path.abspath(".")) - print("Using source directory:\n " + source) - if tag is None: - tag = get_tag(source) - print("Checking out to tag:\n " + tag) - if docdir is None: - docdir = fix_path(os.path.abspath("./jdiff-javadocs")) - print("Generating jdiff JavaDocs in:\n " + docdir) - if dry is True: - print("Dry run: will not auto-update version numbers") - sys.stdout.flush() - -# Print the script's usage/help -def usage(): - return \ - """ - USAGE: - Compares the API of the current Autopsy source code with a previous - tagged version. By default, it will detect the previous tag from - the NEWS file and will not update the versions in the source code. - - OPTIONAL FLAGS: - -t --tag Specify a previous tag to compare to. - Otherwise the NEWS file will be used. - - -d --dir The output directory for the jdiff JavaDocs. If no - directory is given, the default is jdiff-javadocs/{module}. - - -s --source The directory containing Autopsy's source code. - - -a --auto Automatically update version numbers (not dry). - - -h --help Prints this usage. - """ - -# ==================================== # -# Main Functionality # -# ==================================== # - -# Where the magic happens -def main(): - global tag; global source; global docdir; global dry - tag = None; source = None; docdir = None; dry = True - - ret = args() - if ret: - print(usage()) - return 0 - printinfo() - - # ----------------------------------------------- - # 1) Clone Autopsy, checkout to given tag/commit - # 2) Get the modules in the clone and the source - # 3) Generate the xml comparison - # ----------------------------------------------- - if not del_dir("./build/" + tag): - print("\n\n=========================================") - print(" Failed to delete previous Autopsy clone.") - print(" Unable to continue...") - print("=========================================") - return 1 - tag_dir = os.path.abspath("./build/" + tag) - if not do_git(tag, tag_dir): - return 1 - sys.stdout.flush() - - tag_modules = find_modules(tag_dir) - source_modules = find_modules(source) - - printt("Generating jdiff XML reports...") - apiname_tag = tag - apiname_cur = "current" - gen_xml(tag_dir, tag_modules, apiname_tag) - gen_xml(source, source_modules, apiname_cur) - - printt("Deleting cloned Autopsy directory...") - print("Clone successfully deleted" if del_dir(tag_dir) else "Failed to delete clone") - sys.stdout.flush() - - # ----------------------------------------------------- - # 1) Seperate modules into added, similar, and removed - # 2) Compare XML for each module - # ----------------------------------------------------- - printt("Comparing modules found...") - similar_modules, added_modules, removed_modules = module_diff(source_modules, tag_modules) - if added_modules or removed_modules: - for m in added_modules: - print("+ Added " + m.name) - sys.stdout.flush() - for m in removed_modules: - print("- Removed " + m.name) - sys.stdout.flush() - else: - print("No added or removed modules") - sys.stdout.flush() - - printt("Comparing jdiff outputs...") - for module in similar_modules: - module.set_ret(compare_xml(module, apiname_tag, apiname_cur)) - print("Refer to the jdiff-javadocs folder for more details") - - # ------------------------------------------------------------ - # 1) Do versioning - # 2) Auto-update version numbers in files and the_modules list - # 3) Auto-update dependencies - # ------------------------------------------------------------ - printt("Auto-detecting version numbers and changes...") - for module in added_modules: - module.set_versions(get_versions(module, source)) - for module in similar_modules: - module.set_versions(get_versions(module, source)) - - added_modules = remove_correct_added(added_modules) - the_modules = similar_modules + added_modules - print_version_updates(the_modules) - - if not dry: - printt("Auto-updating version numbers...") - update_versions(the_modules, source) - print("All auto-updates complete") - - printt("Detecting and auto-updating dependencies...") - update_dependencies(the_modules, source) - - printt("Deleting jdiff XML...") - xml_dir = os.path.abspath("./build/jdiff-xml") - print("XML successfully deleted" if del_dir(xml_dir) else "Failed to delete XML") - - print("\n--- Script completed successfully ---") - return 0 - -# Start off the script -if __name__ == "__main__": - sys.exit(main()) +# +# Autopsy Forensic Browser +# +# Copyright 2012-2013 Basis Technology Corp. +# Contact: carrier sleuthkit org +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +####################### +# This script exists to help us determine update the library +# versions appropriately. See this page for version details. +# +# http://wiki.sleuthkit.org/index.php?title=Autopsy_3_Module_Versions +# +# The basic idea is that this script uses javadoc/jdiff to +# compare the current state of the source code to the last +# tag and identifies if APIs were removed, added, etc. +# +# When run from the Autopsy build script, this script will: +# - Clone Autopsy and checkout to the previous release tag +# as found in the NEWS.txt file +# - Auto-discover all modules and packages +# - Run jdiff, comparing the current and previous modules +# - Use jdiff's output to determine if each module +# a) has no changes +# b) has backwards compatible changes +# c) has backwards incompatible changes +# - Based off it's compatibility, updates each module's +# a) Major version +# b) Specification version +# c) Implementation version +# - Updates the dependencies on each module depending on the +# updated version numbers +# +# Optionally, when run from the command line, one can provide the +# desired tag to compare the current version to, the directory for +# the current version of Autopsy, and whether to automatically +# update the version numbers and dependencies. +# ------------------------------------------------------------ + +import errno +import os +import shutil +import stat +import subprocess +import sys +import traceback +from os import remove, close +from shutil import move +from tempfile import mkstemp +from xml.dom.minidom import parse, parseString + +# Jdiff return codes. Described in more detail further on +NO_CHANGES = 100 +COMPATIBLE = 101 +NON_COMPATIBLE = 102 +ERROR = 1 + +# An Autopsy module object +class Module: + # Initialize it with a name, return code, and version numbers + def __init__(self, name=None, ret=None, versions=None): + self.name = name + self.ret = ret + self.versions = versions + # As a string, the module should be it's name + def __str__(self): + return self.name + def __repr__(self): + return self.name + # When compared to another module, the two are equal if the names are the same + def __cmp__(self, other): + if isinstance(other, Module): + if self.name == other.name: + return 0 + elif self.name < other.name: + return -1 + else: + return 1 + return 1 + def __eq__(self, other): + if isinstance(other, Module): + if self.name == other.name: + return True + return False + def set_name(self, name): + self.name = name + def set_ret(self, ret): + self.ret = ret + def set_versions(self, versions): + self.versions = versions + def spec(self): + return self.versions[0] + def impl(self): + return self.versions[1] + def release(self): + return self.versions[2] + +# Representation of the Specification version number +class Spec: + # Initialize specification number, where num is a string like x.y + def __init__(self, num): + self.third = None + spec_nums = num.split(".") + if len(spec_nums) == 3: + final = spec_nums[2] + self.third = int(final) + + l, r = spec_nums[0], spec_nums[1] + + self.left = int(l) + self.right = int(r) + + def __str__(self): + return self.get() + def __cmp__(self, other): + if isinstance(other, Spec): + if self.left == other.left: + if self.right == other.right: + return 0 + if self.right < other.right: + return -1 + return 1 + if self.left < other.left: + return -1 + return 1 + elif isinstance(other, str): + l, r = other.split(".") + if self.left == int(l): + if self.right == int(r): + return 0 + if self.right < int(r): + return -1 + return 1 + if self.left < int(l): + return -1 + return 1 + return -1 + + def overflow(self): + return str(self.left + 1) + ".0" + def increment(self): + return str(self.left) + "." + str(self.right + 1) + def get(self): + spec_str = str(self.left) + "." + str(self.right) + if self.third is not None: + spec_str += "." + str(self.final) + return spec_str + def set(self, num): + if isinstance(num, str): + l, r = num.split(".") + self.left = int(l) + self.right = int(r) + elif isinstance(num, Spec): + self.left = num.left + self.right = num.right + return self + +# ================================ # +# Core Functions # +# ================================ # + +# Given a list of modules and the names for each version, compare +# the generated jdiff XML for each module and output the jdiff +# JavaDocs. +# +# modules: the list of all modules both versions have in common +# apiname_tag: the api name of the previous version, most likely the tag +# apiname_cur: the api name of the current version, most likely "Current" +# +# returns the exit code from the modified jdiff.jar +# return code 1 = error in jdiff +# return code 100 = no changes +# return code 101 = compatible changes +# return code 102 = incompatible changes +def compare_xml(module, apiname_tag, apiname_cur): + global docdir + make_dir(docdir) + null_file = fix_path(os.path.abspath("./thirdparty/jdiff/v-custom/lib/Null.java")) + jdiff = fix_path(os.path.abspath("./thirdparty/jdiff/v-custom/jdiff.jar")) + oldapi = fix_path("build/jdiff-xml/" + apiname_tag + "-" + module.name) + newapi = fix_path("build/jdiff-xml/" + apiname_cur + "-" + module.name) + docs = fix_path(docdir + "/" + module.name) + # Comments are strange. They look for a file with additional user comments in a + # directory like docs/user_comments_for_xyz. The problem being that xyz is the + # path to the new/old api. So xyz turns into multiple directories for us. + # i.e. user_comments_for_build/jdiff-xml/[tag name]-[module name]_to_build/jdiff-xml + comments = fix_path(docs + "/user_comments_for_build") + jdiff_com = fix_path(comments + "/jdiff-xml") + tag_comments = fix_path(jdiff_com + "/" + apiname_tag + "-" + module.name + "_to_build") + jdiff_tag_com = fix_path(tag_comments + "/jdiff-xml") + + if not os.path.exists(jdiff): + print("JDIFF doesn't exist.") + + make_dir(docs) + make_dir(comments) + make_dir(jdiff_com) + make_dir(tag_comments) + make_dir(jdiff_tag_com) + make_dir("jdiff-logs") + log = open("jdiff-logs/COMPARE-" + module.name + ".log", "w") + cmd = ["javadoc", + "-doclet", "jdiff.JDiff", + "-docletpath", jdiff, + "-d", docs, + "-oldapi", oldapi, + "-newapi", newapi, + "-script", + null_file] + jdiff = subprocess.Popen(cmd, stdout=log, stderr=log) + jdiff.wait() + log.close() + code = jdiff.returncode + print("Compared XML for " + module.name) + if code == NO_CHANGES: + print(" No API changes") + elif code == COMPATIBLE: + print(" API Changes are backwards compatible") + elif code == NON_COMPATIBLE: + print(" API Changes are not backwards compatible") + else: + print(" *Error in XML, most likely an empty module") + sys.stdout.flush() + return code + +# Generate the jdiff xml for the given module +# path: path to the autopsy source +# module: Module object +# name: api name for jdiff +def gen_xml(path, modules, name): + for module in modules: + # If its the regression test, the source is in the "test" dir + if module.name == "Testing": + src = os.path.join(path, module.name, "test", "qa-functional", "src") + else: + src = os.path.join(path, module.name, "src") + # xerces = os.path.abspath("./lib/xerces.jar") + xml_out = fix_path(os.path.abspath("./build/jdiff-xml/" + name + "-" + module.name)) + jdiff = fix_path(os.path.abspath("./thirdparty/jdiff/v-custom/jdiff.jar")) + make_dir("build/jdiff-xml") + make_dir("jdiff-logs") + log = open("jdiff-logs/GEN_XML-" + name + "-" + module.name + ".log", "w") + cmd = ["javadoc", + "-doclet", "jdiff.JDiff", + "-docletpath", jdiff, # ;" + xerces, <-- previous problems required this + "-apiname", xml_out, # leaving it in just in case it's needed once again + "-sourcepath", fix_path(src)] + cmd = cmd + get_packages(src) + jdiff = subprocess.Popen(cmd, stdout=log, stderr=log) + jdiff.wait() + log.close() + print("Generated XML for " + name + " " + module.name) + sys.stdout.flush() + +# Find all the modules in the given path +def find_modules(path): + modules = [] + # Step into each folder in the given path and + # see if it has manifest.mf - if so, it's a module + for dir in os.listdir(path): + directory = os.path.join(path, dir) + if os.path.isdir(directory): + for file in os.listdir(directory): + if file == "manifest.mf": + modules.append(Module(dir, None, None)) + return modules + +# Detects the differences between the source and tag modules +def module_diff(source_modules, tag_modules): + added_modules = [x for x in source_modules if x not in tag_modules] + removed_modules = [x for x in tag_modules if x not in source_modules] + similar_modules = [x for x in source_modules if x in tag_modules] + + added_modules = (added_modules if added_modules else []) + removed_modules = (removed_modules if removed_modules else []) + similar_modules = (similar_modules if similar_modules else []) + return similar_modules, added_modules, removed_modules + +# Reads the previous tag from NEWS.txt +def get_tag(sourcepath): + news = open(sourcepath + "/NEWS.txt", "r") + second_instance = False + for line in news: + if "----------------" in line: + if second_instance: + ver = line.split("VERSION ")[1] + ver = ver.split(" -")[0] + return ("autopsy-" + ver).strip() + else: + second_instance = True + continue + news.close() + + +# ========================================== # +# Dependency Functions # +# ========================================== # + +# Write a new XML file, copying all the lines from projectxml +# and replacing the specification version for the code-name-base base +# with the supplied specification version spec +def set_dep_spec(projectxml, base, spec): + print(" Updating Specification version..") + orig = open(projectxml, "r") + f, abs_path = mkstemp() + new_file = open(abs_path, "w") + found_base = False + spacing = " " + sopen = "" + sclose = "\n" + for line in orig: + if base in line: + found_base = True + if found_base and sopen in line: + update = spacing + sopen + str(spec) + sclose + new_file.write(update) + else: + new_file.write(line) + new_file.close() + close(f) + orig.close() + remove(projectxml) + move(abs_path, projectxml) + +# Write a new XML file, copying all the lines from projectxml +# and replacing the release version for the code-name-base base +# with the supplied release version +def set_dep_release(projectxml, base, release): + print(" Updating Release version..") + orig = open(projectxml, "r") + f, abs_path = mkstemp() + new_file = open(abs_path, "w") + found_base = False + spacing = " " + ropen = "" + rclose = "\n" + for line in orig: + if base in line: + found_base = True + if found_base and ropen in line: + update = spacing + ropen + str(release) + rclose + new_file.write(update) + else: + new_file.write(line) + new_file.close() + close(f) + orig.close() + remove(projectxml) + move(abs_path, projectxml) + +# Return the dependency versions in the XML dependency node +def get_dep_versions(dep): + run_dependency = dep.getElementsByTagName("run-dependency")[0] + release_version = run_dependency.getElementsByTagName("release-version") + if release_version: + release_version = getTagText(release_version[0].childNodes) + specification_version = run_dependency.getElementsByTagName("specification-version") + if specification_version: + specification_version = getTagText(specification_version[0].childNodes) + return int(release_version), Spec(specification_version) + +# Given a code-name-base, see if it corresponds with any of our modules +def get_module_from_base(modules, code_name_base): + for module in modules: + if "org.sleuthkit.autopsy." + module.name.lower() == code_name_base: + return module + return None # If it didn't match one of our modules + +# Check the text between two XML tags +def getTagText(nodelist): + for node in nodelist: + if node.nodeType == node.TEXT_NODE: + return node.data + +# Check the projectxml for a dependency on any module in modules +def check_for_dependencies(projectxml, modules): + dom = parse(projectxml) + dep_list = dom.getElementsByTagName("dependency") + for dep in dep_list: + code_name_base = dep.getElementsByTagName("code-name-base")[0] + code_name_base = getTagText(code_name_base.childNodes) + module = get_module_from_base(modules, code_name_base) + if module: + print(" Found dependency on " + module.name) + release, spec = get_dep_versions(dep) + if release != module.release() and module.release() is not None: + set_dep_release(projectxml, code_name_base, module.release()) + else: print(" Release version is correct") + if spec != module.spec() and module.spec() is not None: + set_dep_spec(projectxml, code_name_base, module.spec()) + else: print(" Specification version is correct") + +# Given the module and the source directory, return +# the paths to the manifest and project properties files +def get_dependency_file(module, source): + projectxml = os.path.join(source, module.name, "nbproject", "project.xml") + if os.path.isfile(projectxml): + return projectxml + +# Verify/Update the dependencies for each module, basing the dependency +# version number off the versions in each module +def update_dependencies(modules, source): + for module in modules: + print("Checking the dependencies for " + module.name + "...") + projectxml = get_dependency_file(module, source) + if projectxml == None: + print(" Error finding project xml file") + else: + other = [x for x in modules] + check_for_dependencies(projectxml, other) + sys.stdout.flush() + +# ======================================== # +# Versioning Functions # +# ======================================== # + +# Return the specification version in the given project.properties/manifest.mf file +def get_specification(project, manifest): + try: + # Try to find it in the project file + # it will be there if impl version is set to append automatically + f = open(project, 'r') + for line in f: + if "spec.version.base" in line: + return Spec(line.split("=")[1].strip()) + f.close() + # If not found there, try the manifest file + f = open(manifest, 'r') + for line in f: + if "OpenIDE-Module-Specification-Version:" in line: + return Spec(line.split(": ")[1].strip()) + except Exception as e: + print("Error parsing Specification version for") + print(project) + print(e) + +# Set the specification version in the given project properties file +# but if it can't be found there, set it in the manifest file +def set_specification(project, manifest, num): + try: + # First try the project file + f = open(project, 'r') + for line in f: + if "spec.version.base" in line: + f.close() + replace(project, line, "spec.version.base=" + str(num) + "\n") + return + f.close() + # If it's not there, try the manifest file + f = open(manifest, 'r') + for line in f: + if "OpenIDE-Module-Specification-Version:" in line: + f.close() + replace(manifest, line, "OpenIDE-Module-Specification-Version: " + str(num) + "\n") + return + # Otherwise we're out of luck + print(" Error finding the Specification version to update") + print(" " + manifest) + f.close() + except: + print(" Error incrementing Specification version for") + print(" " + project) + +# Return the implementation version in the given manifest.mf file +def get_implementation(manifest): + try: + f = open(manifest, 'r') + for line in f: + if "OpenIDE-Module-Implementation-Version" in line: + return int(line.split(": ")[1].strip()) + f.close() + except: + print("Error parsing Implementation version for") + print(manifest) + +# Set the implementation version in the given manifest file +def set_implementation(manifest, num): + try: + f = open(manifest, 'r') + for line in f: + if "OpenIDE-Module-Implementation-Version" in line: + f.close() + replace(manifest, line, "OpenIDE-Module-Implementation-Version: " + str(num) + "\n") + return + # If it isn't there, add it + f.close() + write_implementation(manifest, num) + except: + print(" Error incrementing Implementation version for") + print(" " + manifest) + +# Rewrite the manifest file to include the implementation version +def write_implementation(manifest, num): + f = open(manifest, "r") + contents = f.read() + contents = contents[:-2] + "OpenIDE-Module-Implementation-Version: " + str(num) + "\n\n" + f.close() + f = open(manifest, "w") + f.write(contents) + f.close() + +# Return the release version in the given manifest.mf file +def get_release(manifest): + try: + f = open(manifest, 'r') + for line in f: + if "OpenIDE-Module:" in line: + return int(line.split("/")[1].strip()) + f.close() + except: + #print("Error parsing Release version for") + #print(manifest) + return 0 + +# Set the release version in the given manifest file +def set_release(manifest, num): + try: + f = open(manifest, 'r') + for line in f: + if "OpenIDE-Module:" in line: + f.close() + index = line.index('/') - len(line) + 1 + newline = line[:index] + str(num) + replace(manifest, line, newline + "\n") + return + print(" Error finding the release version to update") + print(" " + manifest) + f.close() + except: + print(" Error incrementing release version for") + print(" " + manifest) + +# Given the module and the source directory, return +# the paths to the manifest and project properties files +def get_version_files(module, source): + manifest = os.path.join(source, module.name, "manifest.mf") + project = os.path.join(source, module.name, "nbproject", "project.properties") + if os.path.isfile(manifest) and os.path.isfile(project): + return manifest, project + +# Returns a the current version numbers for the module in source +def get_versions(module, source): + manifest, project = get_version_files(module, source) + if manifest == None or project == None: + print(" Error finding manifeset and project properties files") + return + spec = get_specification(project, manifest) + impl = get_implementation(manifest) + release = get_release(manifest) + return [spec, impl, release] + +# Update the version numbers for every module in modules +def update_versions(modules, source): + for module in modules: + versions = module.versions + manifest, project = get_version_files(module, source) + print("Updating " + module.name + "...") + if manifest == None or project == None: + print(" Error finding manifeset and project properties files") + return + if module.ret == COMPATIBLE: + versions = [versions[0].set(versions[0].increment()), versions[1] + 1, versions[2]] + set_specification(project, manifest, versions[0]) + set_implementation(manifest, versions[1]) + module.set_versions(versions) + elif module.ret == NON_COMPATIBLE: + versions = [versions[0].set(versions[0].overflow()), versions[1] + 1, versions[2] + 1] + set_specification(project, manifest, versions[0]) + set_implementation(manifest, versions[1]) + set_release(manifest, versions[2]) + module.set_versions(versions) + elif module.ret == NO_CHANGES: + versions = [versions[0], versions[1] + 1, versions[2]] + set_implementation(manifest, versions[1]) + module.set_versions(versions) + elif module.ret == None: + versions = [Spec("1.0"), 1, 1] + set_specification(project, manifest, versions[0]) + set_implementation(manifest, versions[1]) + set_release(manifest, versions[2]) + module.set_versions(versions) + sys.stdout.flush() + +# Given a list of the added modules, remove the modules +# which have the correct 'new module default' version number +def remove_correct_added(modules): + correct = [x for x in modules] + for module in modules: + if module.spec() == "1.0" or module.spec() == "0.0": + if module.impl() == 1: + if module.release() == 1 or module.release() == 0: + correct.remove(module) + return correct + +# ==================================== # +# Helper Functions # +# ==================================== # + +# Replace pattern with subst in given file +def replace(file, pattern, subst): + #Create temp file + fh, abs_path = mkstemp() + new_file = open(abs_path,'w') + old_file = open(file) + for line in old_file: + new_file.write(line.replace(pattern, subst)) + #close temp file + new_file.close() + close(fh) + old_file.close() + #Remove original file + remove(file) + #Move new file + move(abs_path, file) + +# Given a list of modules print the version numbers that need changing +def print_version_updates(modules): + f = open("gen_version.txt", "a") + for module in modules: + versions = module.versions + if module.ret == COMPATIBLE: + output = (module.name + ":\n") + output += ("\tSpecification:\t" + str(versions[0]) + "\t->\t" + str(versions[0].increment()) + "\n") + output += ("\tImplementation:\t" + str(versions[1]) + "\t->\t" + str(versions[1] + 1) + "\n") + output += ("\tRelease:\tNo Change.\n") + output += ("\n") + print(output) + sys.stdout.flush() + f.write(output) + elif module.ret == NON_COMPATIBLE: + output = (module.name + ":\n") + output += ("\tSpecification:\t" + str(versions[0]) + "\t->\t" + str(versions[0].overflow()) + "\n") + output += ("\tImplementation:\t" + str(versions[1]) + "\t->\t" + str(versions[1] + 1) + "\n") + output += ("\tRelease:\t" + str(versions[2]) + "\t->\t" + str(versions[2] + 1) + "\n") + output += ("\n") + print(output) + sys.stdout.flush() + f.write(output) + elif module.ret == ERROR: + output = (module.name + ":\n") + output += ("\t*Unable to detect necessary changes\n") + output += ("\tSpecification:\t" + str(versions[0]) + "\n") + output += ("\tImplementation:\t" + str(versions[1]) + "\n") + output += ("\tRelease:\t\t" + str(versions[2]) + "\n") + output += ("\n") + print(output) + f.write(output) + sys.stdout.flush() + elif module.ret == NO_CHANGES: + output = (module.name + ":\n") + if versions[1] is None: + output += ("\tImplementation: None\n") + else: + output += ("\tImplementation:\t" + str(versions[1]) + "\t->\t" + str(versions[1] + 1) + "\n") + output += ("\n") + print(output) + sys.stdout.flush() + f.write(output) + elif module.ret is None: + output = ("Added " + module.name + ":\n") + if module.spec() != "1.0" and module.spec() != "0.0": + output += ("\tSpecification:\t" + str(module.spec()) + "\t->\t" + "1.0\n") + output += ("\n") + if module.impl() != 1: + output += ("\tImplementation:\t" + str(module.impl()) + "\t->\t" + "1\n") + output += ("\n") + if module.release() != 1 and module.release() != 0: + output += ("Release:\t\t" + str(module.release()) + "\t->\t" + "1\n") + output += ("\n") + print(output) + sys.stdout.flush() + f.write(output) + sys.stdout.flush() + f.close() + +# Changes cygwin paths to Windows +def fix_path(path): + if "cygdrive" in path: + new_path = path[11:] + return "C:/" + new_path + else: + return path + +# Print a 'title' +def printt(title): + print("\n" + title) + lines = "" + for letter in title: + lines += "-" + print(lines) + sys.stdout.flush() + +# Get a list of package names in the given path +# The path is expected to be of the form {base}/module/src +# +# NOTE: We currently only check for packages of the form +# org.sleuthkit.autopsy.x +# If we add other namespaces for commercial modules we will +# have to add a check here +def get_packages(path): + packages = [] + package_path = os.path.join(path, "org", "sleuthkit", "autopsy") + for folder in os.listdir(package_path): + package_string = "org.sleuthkit.autopsy." + packages.append(package_string + folder) + return packages + +# Create the given directory, if it doesn't already exist +def make_dir(dir): + try: + if not os.path.isdir(dir): + os.mkdir(dir) + if os.path.isdir(dir): + return True + return False + except: + print("Exception thrown when creating directory") + return False + +# Delete the given directory, and make sure it is deleted +def del_dir(dir): + try: + if os.path.isdir(dir): + shutil.rmtree(dir, ignore_errors=False, onerror=handleRemoveReadonly) + if os.path.isdir(dir): + return False + else: + return True + return True + except: + print("Exception thrown when deleting directory") + traceback.print_exc() + return False + +# Handle any permisson errors thrown by shutil.rmtree +def handleRemoveReadonly(func, path, exc): + excvalue = exc[1] + if func in (os.rmdir, os.remove) and excvalue.errno == errno.EACCES: + os.chmod(path, stat.S_IRWXU| stat.S_IRWXG| stat.S_IRWXO) # 0777 + func(path) + else: + raise + +# Run git clone and git checkout for the tag +def do_git(tag, tag_dir): + try: + printt("Cloning Autopsy tag " + tag + " into dir " + tag_dir + " (this could take a while)...") + subprocess.call(["git", "clone", "https://github.com/sleuthkit/autopsy.git", tag_dir], + stdout=subprocess.PIPE) + printt("Checking out tag " + tag + "...") + subprocess.call(["git", "checkout", tag], + stdout=subprocess.PIPE, + cwd=tag_dir) + return True + except Exception as ex: + print("Error cloning and checking out Autopsy: ", sys.exc_info()[0]) + print(str(ex)) + print("The terminal you are using most likely does not recognize git commands.") + return False + +# Get the flags from argv +def args(): + try: + sys.argv.pop(0) + while sys.argv: + arg = sys.argv.pop(0) + if arg == "-h" or arg == "--help": + return 1 + elif arg == "-t" or arg == "--tag": + global tag + tag = sys.argv.pop(0) + elif arg == "-s" or arg == "--source": + global source + source = sys.argv.pop(0) + elif arg == "-d" or arg == "--dir": + global docdir + docdir = sys.argv.pop(0) + elif arg == "-a" or arg == "--auto": + global dry + dry = False + else: + raise Exception() + except: + pass + +# Print script run info +def printinfo(): + global tag + global source + global docdir + global dry + printt("Release script information:") + if source is None: + source = fix_path(os.path.abspath(".")) + print("Using source directory:\n " + source) + if tag is None: + tag = get_tag(source) + print("Checking out to tag:\n " + tag) + if docdir is None: + docdir = fix_path(os.path.abspath("./jdiff-javadocs")) + print("Generating jdiff JavaDocs in:\n " + docdir) + if dry is True: + print("Dry run: will not auto-update version numbers") + sys.stdout.flush() + +# Print the script's usage/help +def usage(): + return \ + """ + USAGE: + Compares the API of the current Autopsy source code with a previous + tagged version. By default, it will detect the previous tag from + the NEWS file and will not update the versions in the source code. + + OPTIONAL FLAGS: + -t --tag Specify a previous tag to compare to. + Otherwise the NEWS file will be used. + + -d --dir The output directory for the jdiff JavaDocs. If no + directory is given, the default is jdiff-javadocs/{module}. + + -s --source The directory containing Autopsy's source code. + + -a --auto Automatically update version numbers (not dry). + + -h --help Prints this usage. + """ + +# ==================================== # +# Main Functionality # +# ==================================== # + +# Where the magic happens +def main(): + global tag; global source; global docdir; global dry + tag = None; source = None; docdir = None; dry = True + + ret = args() + if ret: + print(usage()) + return 0 + printinfo() + + # ----------------------------------------------- + # 1) Clone Autopsy, checkout to given tag/commit + # 2) Get the modules in the clone and the source + # 3) Generate the xml comparison + # ----------------------------------------------- + if not del_dir("./build/" + tag): + print("\n\n=========================================") + print(" Failed to delete previous Autopsy clone.") + print(" Unable to continue...") + print("=========================================") + return 1 + tag_dir = os.path.abspath("./build/" + tag) + if not do_git(tag, tag_dir): + return 1 + sys.stdout.flush() + + tag_modules = find_modules(tag_dir) + source_modules = find_modules(source) + + printt("Generating jdiff XML reports...") + apiname_tag = tag + apiname_cur = "current" + gen_xml(tag_dir, tag_modules, apiname_tag) + gen_xml(source, source_modules, apiname_cur) + + printt("Deleting cloned Autopsy directory...") + print("Clone successfully deleted" if del_dir(tag_dir) else "Failed to delete clone") + sys.stdout.flush() + + # ----------------------------------------------------- + # 1) Seperate modules into added, similar, and removed + # 2) Compare XML for each module + # ----------------------------------------------------- + printt("Comparing modules found...") + similar_modules, added_modules, removed_modules = module_diff(source_modules, tag_modules) + if added_modules or removed_modules: + for m in added_modules: + print("+ Added " + m.name) + sys.stdout.flush() + for m in removed_modules: + print("- Removed " + m.name) + sys.stdout.flush() + else: + print("No added or removed modules") + sys.stdout.flush() + + printt("Comparing jdiff outputs...") + for module in similar_modules: + module.set_ret(compare_xml(module, apiname_tag, apiname_cur)) + print("Refer to the jdiff-javadocs folder for more details") + + # ------------------------------------------------------------ + # 1) Do versioning + # 2) Auto-update version numbers in files and the_modules list + # 3) Auto-update dependencies + # ------------------------------------------------------------ + printt("Auto-detecting version numbers and changes...") + for module in added_modules: + module.set_versions(get_versions(module, source)) + for module in similar_modules: + module.set_versions(get_versions(module, source)) + + added_modules = remove_correct_added(added_modules) + the_modules = similar_modules + added_modules + print_version_updates(the_modules) + + if not dry: + printt("Auto-updating version numbers...") + update_versions(the_modules, source) + print("All auto-updates complete") + + printt("Detecting and auto-updating dependencies...") + update_dependencies(the_modules, source) + + printt("Deleting jdiff XML...") + xml_dir = os.path.abspath("./build/jdiff-xml") + print("XML successfully deleted" if del_dir(xml_dir) else "Failed to delete XML") + + print("\n--- Script completed successfully ---") + return 0 + +# Start off the script +if __name__ == "__main__": + sys.exit(main()) From 9ac0c310ea1af8c5f8d3eae6c00903319b0d9d64 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Tue, 19 Nov 2013 18:17:34 -0500 Subject: [PATCH 179/179] Normalization of line endings --- Core/manifest.mf | 20 +- ExifParser/manifest.mf | 12 +- HashDatabase/manifest.mf | 14 +- HashDatabase/nbproject/project.properties | 12 +- .../autopsy/keywordsearch/Bundle.properties | 182 +- .../docs/keywordsearch-about.html | 162 +- test/README.txt | 26 +- test/script/Emailer.py | 98 +- test/script/regression.py | 3708 ++++++++--------- test/script/srcupdater.py | 374 +- 10 files changed, 2304 insertions(+), 2304 deletions(-) diff --git a/Core/manifest.mf b/Core/manifest.mf index 31bfec73de..7aa34c46dc 100644 --- a/Core/manifest.mf +++ b/Core/manifest.mf @@ -1,10 +1,10 @@ -Manifest-Version: 1.0 -OpenIDE-Module: org.sleuthkit.autopsy.core/9 -OpenIDE-Module-Localizing-Bundle: org/sleuthkit/autopsy/core/Bundle.properties -OpenIDE-Module-Layer: org/sleuthkit/autopsy/core/layer.xml -OpenIDE-Module-Implementation-Version: 9 -OpenIDE-Module-Requires: org.openide.windows.WindowManager, org.netbeans.api.javahelp.Help -AutoUpdate-Show-In-Client: true -AutoUpdate-Essential-Module: true -OpenIDE-Module-Install: org/sleuthkit/autopsy/core/Installer.class - +Manifest-Version: 1.0 +OpenIDE-Module: org.sleuthkit.autopsy.core/9 +OpenIDE-Module-Localizing-Bundle: org/sleuthkit/autopsy/core/Bundle.properties +OpenIDE-Module-Layer: org/sleuthkit/autopsy/core/layer.xml +OpenIDE-Module-Implementation-Version: 9 +OpenIDE-Module-Requires: org.openide.windows.WindowManager, org.netbeans.api.javahelp.Help +AutoUpdate-Show-In-Client: true +AutoUpdate-Essential-Module: true +OpenIDE-Module-Install: org/sleuthkit/autopsy/core/Installer.class + diff --git a/ExifParser/manifest.mf b/ExifParser/manifest.mf index dbf05fee2f..44ad288f51 100644 --- a/ExifParser/manifest.mf +++ b/ExifParser/manifest.mf @@ -1,6 +1,6 @@ -Manifest-Version: 1.0 -AutoUpdate-Show-In-Client: true -OpenIDE-Module: org.sleuthkit.autopsy.exifparser/3 -OpenIDE-Module-Implementation-Version: 9 -OpenIDE-Module-Layer: org/sleuthkit/autopsy/exifparser/layer.xml -OpenIDE-Module-Localizing-Bundle: org/sleuthkit/autopsy/exifparser/Bundle.properties +Manifest-Version: 1.0 +AutoUpdate-Show-In-Client: true +OpenIDE-Module: org.sleuthkit.autopsy.exifparser/3 +OpenIDE-Module-Implementation-Version: 9 +OpenIDE-Module-Layer: org/sleuthkit/autopsy/exifparser/layer.xml +OpenIDE-Module-Localizing-Bundle: org/sleuthkit/autopsy/exifparser/Bundle.properties diff --git a/HashDatabase/manifest.mf b/HashDatabase/manifest.mf index ba201a294c..b8c105413c 100644 --- a/HashDatabase/manifest.mf +++ b/HashDatabase/manifest.mf @@ -1,7 +1,7 @@ -Manifest-Version: 1.0 -AutoUpdate-Show-In-Client: true -OpenIDE-Module: org.sleuthkit.autopsy.hashdatabase/3 -OpenIDE-Module-Implementation-Version: 9 -OpenIDE-Module-Layer: org/sleuthkit/autopsy/hashdatabase/layer.xml -OpenIDE-Module-Localizing-Bundle: org/sleuthkit/autopsy/hashdatabase/Bundle.properties - +Manifest-Version: 1.0 +AutoUpdate-Show-In-Client: true +OpenIDE-Module: org.sleuthkit.autopsy.hashdatabase/3 +OpenIDE-Module-Implementation-Version: 9 +OpenIDE-Module-Layer: org/sleuthkit/autopsy/hashdatabase/layer.xml +OpenIDE-Module-Localizing-Bundle: org/sleuthkit/autopsy/hashdatabase/Bundle.properties + diff --git a/HashDatabase/nbproject/project.properties b/HashDatabase/nbproject/project.properties index 88f75855cb..e633758f8a 100644 --- a/HashDatabase/nbproject/project.properties +++ b/HashDatabase/nbproject/project.properties @@ -1,6 +1,6 @@ -javac.source=1.7 -javac.compilerargs=-Xlint -Xlint:-serial -license.file=../LICENSE-2.0.txt -nbm.homepage=http://www.sleuthkit.org/autopsy/ -nbm.needs.restart=true -spec.version.base=1.3 +javac.source=1.7 +javac.compilerargs=-Xlint -Xlint:-serial +license.file=../LICENSE-2.0.txt +nbm.homepage=http://www.sleuthkit.org/autopsy/ +nbm.needs.restart=true +spec.version.base=1.3 diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties index 65dbef957c..2d260d1242 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties @@ -1,91 +1,91 @@ -OpenIDE-Module-Display-Category=Ingest Module -OpenIDE-Module-Long-Description=\ - Keyword Search ingest module.\n\n\ - The module indexes files found in the disk image at ingest time. \ - It then periodically runs the search on the indexed files using one or more keyword lists (containing pure words and/or regular expressions) and posts results.\n\n\ - The module also contains additional tools integrated in the main GUI, such as keyword list configuration, keyword seach bar in the top-right corner, extracted text viewer and search results viewer showing highlighted keywords found. -OpenIDE-Module-Name=KeywordSearch -ListBundleName=Keyword Lists -ListBundleConfig=Keyword List Configuration -IndexProgressPanel.statusText.text=Status text -IndexProgressPanel.cancelButton.text=Cancel -ExtractedContentPanel.hitLabel.text=Matches on page: -ExtractedContentPanel.hitCountLabel.text=- -ExtractedContentPanel.hitOfLabel.text=of -ExtractedContentPanel.hitTotalLabel.text=- -ExtractedContentPanel.hitButtonsLabel.text=Match -ExtractedContentPanel.hitPreviousButton.text= -ExtractedContentPanel.hitNextButton.text= -ExtractedContentPanel.copyMenuItem.text=Copy -ExtractedContentPanel.selectAllMenuItem.text=Select All -KeywordSearchEditListPanel.saveListButton.text=Copy List -KeywordSearchEditListPanel.addWordField.text= -KeywordSearchEditListPanel.addWordButton.text=Add -KeywordSearchEditListPanel.chRegex.text=Regular Expression -KeywordSearchEditListPanel.deleteWordButton.text=Remove Selected -KeywordSearchEditListPanel.cutMenuItem.text=Cut -KeywordSearchEditListPanel.selectAllMenuItem.text=Select All -KeywordSearchEditListPanel.pasteMenuItem.text=Paste -KeywordSearchEditListPanel.copyMenuItem.text=Copy -KeywordSearchEditListPanel.exportButton.text=Export List -KeywordSearchEditListPanel.deleteListButton.text=Delete List -KeywordSearchListsManagementPanel.newListButton.text=New List -KeywordSearchEditListPanel.useForIngestCheckbox.text=Enable for ingest -KeywordSearchListsManagementPanel.importButton.text=Import List -KeywordSearchPanel.searchBox.text=Search... -KeywordSearchPanel.regExCheckboxMenuItem.text=Use Regular Expressions -KeywordSearchPanel.settingsLabel.text= -KeywordSearchListsViewerPanel.searchAddButton.text=Search -KeywordSearchListsViewerPanel.manageListsButton.text=Manage Lists -KeywordSearchListsViewerPanel.ingestIndexLabel.text=Files Indexed: -KeywordSearchEditListPanel.selectorsCombo.toolTipText=Regular Expression selector type (optional) -KeywordSearchPanel.searchButton.text= -KeywordSearchPanel.cutMenuItem.text=Cut -KeywordSearchPanel.copyMenuItem.text=Copy -KeywordSearchPanel.pasteMenuItem.text=Paste -KeywordSearchPanel.selectAllMenuItem.text=Select All -ExtractedContentPanel.pageButtonsLabel.text=Page -ExtractedContentPanel.pageNextButton.text= -ExtractedContentPanel.pagePreviousButton.actionCommand=pagePreviousButton -ExtractedContentPanel.pagePreviousButton.text= -ExtractedContentPanel.pagesLabel.text=Page: -ExtractedContentPanel.pageOfLabel.text=of -ExtractedContentPanel.pageCurLabel.text=- -ExtractedContentPanel.pageTotalLabel.text=- -ExtractedContentPanel.hitLabel.toolTipText= -KeywordSearchEditListPanel.ingestMessagesCheckbox.text=Enable sending messages to inbox during ingest -KeywordSearchEditListPanel.ingestMessagesCheckbox.toolTipText=Send messages during triage / ingest when hits on keyword from this list occur -KeywordSearchConfigurationPanel2.skipNSRLCheckBox.text=Do not add files in NSRL (known files) to keyword index during ingest -KeywordSearchConfigurationPanel2.skipNSRLCheckBox.toolTipText=Requires Hash DB service to had run previously, or be selected for next ingest. -KeywordSearchConfigurationPanel2.filesIndexedValue.text=- -KeywordSearchConfigurationPanel2.filesIndexedLabel.text=Files in keyword index: -KeywordSearchIngestSimplePanel.languagesLabel.text=Scripts enabled for string extraction from unknown file types: -KeywordSearchIngestSimplePanel.languagesValLabel.text=- -KeywordSearchIngestSimplePanel.languagesLabel.toolTipText=Scripts enabled for string extraction from unknown file types. Changes can be done in Advanced Settings. -KeywordSearchIngestSimplePanel.languagesValLabel.toolTipText= -KeywordSearchConfigurationPanel3.languagesLabel.text=Enabled scripts (languages): -KeywordSearchConfigurationPanel2.chunksLabel.text=Chunks in keyword index: -KeywordSearchConfigurationPanel2.chunksValLabel.text=- -KeywordSearchConfigurationPanel3.enableUTF8Checkbox.text=Enable UTF8 text extraction -KeywordSearchConfigurationPanel3.enableUTF16Checkbox.text=Enable UTF16LE and UTF16BE string extraction -KeywordSearchEditListPanel.keywordOptionsLabel.text=Keyword Options -KeywordSearchEditListPanel.listOptionsLabel.text=List Options -KeywordSearchConfigurationPanel3.ingestSettingsLabel.text=Ingest settings for string extraction from unknown file types (changes effective on next ingest): -KeywordSearchConfigurationPanel2.settingsLabel.text=Settings -KeywordSearchConfigurationPanel2.informationLabel.text=Information -KeywordSearchListsManagementPanel.keywordListsLabel.text=Keyword Lists: -KeywordSearchEditListPanel.keywordsLabel.text=Keywords: -KeywordSearchConfigurationPanel2.timeRadioButton1.toolTipText=20 mins. (fastest ingest time) -KeywordSearchConfigurationPanel2.timeRadioButton1.text=20 minutes (slowest feedback, fastest ingest) -KeywordSearchConfigurationPanel2.timeRadioButton2.toolTipText=10 minutes (faster overall ingest time than default) -KeywordSearchConfigurationPanel2.timeRadioButton2.text=10 minutes (slower feedback, faster ingest) -KeywordSearchConfigurationPanel2.timeRadioButton3.toolTipText=5 minutes (overall ingest time will be longer) -KeywordSearchConfigurationPanel2.timeRadioButton3.text=5 minutes (default) -KeywordSearchIngestSimplePanel.encodingsLabel.text=Encodings: -KeywordSearchIngestSimplePanel.keywordSearchEncodings.text=- -KeywordSearchIngestSimplePanel.titleLabel.text=Select keyword lists to enable during ingest: -OpenIDE-Module-Short-Description=Keyword Search ingest module, extracted text viewer and keyword search tools -KeywordSearchListsViewerPanel.manageListsButton.toolTipText=Manage keyword lists, their settings and associated keywords. The settings are shared among all cases. -KeywordSearchConfigurationPanel2.frequencyLabel.text=Results update frequency during ingest: -KeywordSearchConfigurationPanel2.timeRadioButton4.text_1=1 minute (faster feedback, longest ingest) -KeywordSearchConfigurationPanel2.timeRadioButton4.toolTipText=1 minute (overall ingest time will be longest) +OpenIDE-Module-Display-Category=Ingest Module +OpenIDE-Module-Long-Description=\ + Keyword Search ingest module.\n\n\ + The module indexes files found in the disk image at ingest time. \ + It then periodically runs the search on the indexed files using one or more keyword lists (containing pure words and/or regular expressions) and posts results.\n\n\ + The module also contains additional tools integrated in the main GUI, such as keyword list configuration, keyword seach bar in the top-right corner, extracted text viewer and search results viewer showing highlighted keywords found. +OpenIDE-Module-Name=KeywordSearch +ListBundleName=Keyword Lists +ListBundleConfig=Keyword List Configuration +IndexProgressPanel.statusText.text=Status text +IndexProgressPanel.cancelButton.text=Cancel +ExtractedContentPanel.hitLabel.text=Matches on page: +ExtractedContentPanel.hitCountLabel.text=- +ExtractedContentPanel.hitOfLabel.text=of +ExtractedContentPanel.hitTotalLabel.text=- +ExtractedContentPanel.hitButtonsLabel.text=Match +ExtractedContentPanel.hitPreviousButton.text= +ExtractedContentPanel.hitNextButton.text= +ExtractedContentPanel.copyMenuItem.text=Copy +ExtractedContentPanel.selectAllMenuItem.text=Select All +KeywordSearchEditListPanel.saveListButton.text=Copy List +KeywordSearchEditListPanel.addWordField.text= +KeywordSearchEditListPanel.addWordButton.text=Add +KeywordSearchEditListPanel.chRegex.text=Regular Expression +KeywordSearchEditListPanel.deleteWordButton.text=Remove Selected +KeywordSearchEditListPanel.cutMenuItem.text=Cut +KeywordSearchEditListPanel.selectAllMenuItem.text=Select All +KeywordSearchEditListPanel.pasteMenuItem.text=Paste +KeywordSearchEditListPanel.copyMenuItem.text=Copy +KeywordSearchEditListPanel.exportButton.text=Export List +KeywordSearchEditListPanel.deleteListButton.text=Delete List +KeywordSearchListsManagementPanel.newListButton.text=New List +KeywordSearchEditListPanel.useForIngestCheckbox.text=Enable for ingest +KeywordSearchListsManagementPanel.importButton.text=Import List +KeywordSearchPanel.searchBox.text=Search... +KeywordSearchPanel.regExCheckboxMenuItem.text=Use Regular Expressions +KeywordSearchPanel.settingsLabel.text= +KeywordSearchListsViewerPanel.searchAddButton.text=Search +KeywordSearchListsViewerPanel.manageListsButton.text=Manage Lists +KeywordSearchListsViewerPanel.ingestIndexLabel.text=Files Indexed: +KeywordSearchEditListPanel.selectorsCombo.toolTipText=Regular Expression selector type (optional) +KeywordSearchPanel.searchButton.text= +KeywordSearchPanel.cutMenuItem.text=Cut +KeywordSearchPanel.copyMenuItem.text=Copy +KeywordSearchPanel.pasteMenuItem.text=Paste +KeywordSearchPanel.selectAllMenuItem.text=Select All +ExtractedContentPanel.pageButtonsLabel.text=Page +ExtractedContentPanel.pageNextButton.text= +ExtractedContentPanel.pagePreviousButton.actionCommand=pagePreviousButton +ExtractedContentPanel.pagePreviousButton.text= +ExtractedContentPanel.pagesLabel.text=Page: +ExtractedContentPanel.pageOfLabel.text=of +ExtractedContentPanel.pageCurLabel.text=- +ExtractedContentPanel.pageTotalLabel.text=- +ExtractedContentPanel.hitLabel.toolTipText= +KeywordSearchEditListPanel.ingestMessagesCheckbox.text=Enable sending messages to inbox during ingest +KeywordSearchEditListPanel.ingestMessagesCheckbox.toolTipText=Send messages during triage / ingest when hits on keyword from this list occur +KeywordSearchConfigurationPanel2.skipNSRLCheckBox.text=Do not add files in NSRL (known files) to keyword index during ingest +KeywordSearchConfigurationPanel2.skipNSRLCheckBox.toolTipText=Requires Hash DB service to had run previously, or be selected for next ingest. +KeywordSearchConfigurationPanel2.filesIndexedValue.text=- +KeywordSearchConfigurationPanel2.filesIndexedLabel.text=Files in keyword index: +KeywordSearchIngestSimplePanel.languagesLabel.text=Scripts enabled for string extraction from unknown file types: +KeywordSearchIngestSimplePanel.languagesValLabel.text=- +KeywordSearchIngestSimplePanel.languagesLabel.toolTipText=Scripts enabled for string extraction from unknown file types. Changes can be done in Advanced Settings. +KeywordSearchIngestSimplePanel.languagesValLabel.toolTipText= +KeywordSearchConfigurationPanel3.languagesLabel.text=Enabled scripts (languages): +KeywordSearchConfigurationPanel2.chunksLabel.text=Chunks in keyword index: +KeywordSearchConfigurationPanel2.chunksValLabel.text=- +KeywordSearchConfigurationPanel3.enableUTF8Checkbox.text=Enable UTF8 text extraction +KeywordSearchConfigurationPanel3.enableUTF16Checkbox.text=Enable UTF16LE and UTF16BE string extraction +KeywordSearchEditListPanel.keywordOptionsLabel.text=Keyword Options +KeywordSearchEditListPanel.listOptionsLabel.text=List Options +KeywordSearchConfigurationPanel3.ingestSettingsLabel.text=Ingest settings for string extraction from unknown file types (changes effective on next ingest): +KeywordSearchConfigurationPanel2.settingsLabel.text=Settings +KeywordSearchConfigurationPanel2.informationLabel.text=Information +KeywordSearchListsManagementPanel.keywordListsLabel.text=Keyword Lists: +KeywordSearchEditListPanel.keywordsLabel.text=Keywords: +KeywordSearchConfigurationPanel2.timeRadioButton1.toolTipText=20 mins. (fastest ingest time) +KeywordSearchConfigurationPanel2.timeRadioButton1.text=20 minutes (slowest feedback, fastest ingest) +KeywordSearchConfigurationPanel2.timeRadioButton2.toolTipText=10 minutes (faster overall ingest time than default) +KeywordSearchConfigurationPanel2.timeRadioButton2.text=10 minutes (slower feedback, faster ingest) +KeywordSearchConfigurationPanel2.timeRadioButton3.toolTipText=5 minutes (overall ingest time will be longer) +KeywordSearchConfigurationPanel2.timeRadioButton3.text=5 minutes (default) +KeywordSearchIngestSimplePanel.encodingsLabel.text=Encodings: +KeywordSearchIngestSimplePanel.keywordSearchEncodings.text=- +KeywordSearchIngestSimplePanel.titleLabel.text=Select keyword lists to enable during ingest: +OpenIDE-Module-Short-Description=Keyword Search ingest module, extracted text viewer and keyword search tools +KeywordSearchListsViewerPanel.manageListsButton.toolTipText=Manage keyword lists, their settings and associated keywords. The settings are shared among all cases. +KeywordSearchConfigurationPanel2.frequencyLabel.text=Results update frequency during ingest: +KeywordSearchConfigurationPanel2.timeRadioButton4.text_1=1 minute (faster feedback, longest ingest) +KeywordSearchConfigurationPanel2.timeRadioButton4.toolTipText=1 minute (overall ingest time will be longest) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/docs/keywordsearch-about.html b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/docs/keywordsearch-about.html index ec2a45f056..b4290c98de 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/docs/keywordsearch-about.html +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/docs/keywordsearch-about.html @@ -1,81 +1,81 @@ - - - - - Keyword Search - - - - -

      Keyword Search

      -

      - Autopsy ships a keyword search module, which provides the ingest capability - and also supports a manual text search mode. -

      -

      The keyword search ingest module extracts text from the files on the image being ingested and adds them to the index that can then be searched.

      -

      - Autopsy tries its best to extract maximum amount of text from the files being indexed. - First, the indexing will try to extract text from supported file formats, such as pure text file format, MS Office Documents, PDF files, Email files, and many others. - If the file is not supported by the standard text extractor, Autopsy will fallback to string extraction algorithm. - String extraction on unknown file formats or arbitrary binary files can often still extract a good amount of text from the file, often good enough to provide additional clues. - However, string extraction will not be able to extract text strings from binary files that have been encrypted. -

      -

      - Autopsy ships with some built-in lists that define regular expressions and enable user to search for Phone Numbers, IP addresses, URLs and E-mail addresses. - However, enabling some of these very general lists can produce a very large number of hits, many of them can be false-positives. -

      -

      - Once files are in the index, they can be searched quickly for specific keywords, regular expressions, - or using keyword search lists that can contain a mixture of keywords and regular expressions. - Search queries can be executed automatically by the ingest during the ingest run, or at the end of the ingest, depending on the current settings and the time it takes to ingest the image. -

      -

      Search queries can also be executed manually by the user at any time, as long as there are some files already indexed and ready to be searched.

      -

      - Keyword search module will save the search results regardless whether the search is performed by the ingest process, or manually by the user. - The saved results are available in the Directory Tree in the left hand side panel. -

      -

      - To see keyword search results in real-time while ingest is running, add keyword lists using the - Keyword Search Configuration Dialog - and select the "Use during ingest" check box. - You can select "Enable sending messages to inbox during ingest" per list, if the hits on that list should be reported in the Inbox, which is recommended for very specific searches. -

      -

      - See (Ingest) - for more information on ingest in general. -

      -

      - Once there are files in the index, the Keyword Search Bar - will be available for use to manually search at any time. -

      - - - + + + + + Keyword Search + + + + +

      Keyword Search

      +

      + Autopsy ships a keyword search module, which provides the ingest capability + and also supports a manual text search mode. +

      +

      The keyword search ingest module extracts text from the files on the image being ingested and adds them to the index that can then be searched.

      +

      + Autopsy tries its best to extract maximum amount of text from the files being indexed. + First, the indexing will try to extract text from supported file formats, such as pure text file format, MS Office Documents, PDF files, Email files, and many others. + If the file is not supported by the standard text extractor, Autopsy will fallback to string extraction algorithm. + String extraction on unknown file formats or arbitrary binary files can often still extract a good amount of text from the file, often good enough to provide additional clues. + However, string extraction will not be able to extract text strings from binary files that have been encrypted. +

      +

      + Autopsy ships with some built-in lists that define regular expressions and enable user to search for Phone Numbers, IP addresses, URLs and E-mail addresses. + However, enabling some of these very general lists can produce a very large number of hits, many of them can be false-positives. +

      +

      + Once files are in the index, they can be searched quickly for specific keywords, regular expressions, + or using keyword search lists that can contain a mixture of keywords and regular expressions. + Search queries can be executed automatically by the ingest during the ingest run, or at the end of the ingest, depending on the current settings and the time it takes to ingest the image. +

      +

      Search queries can also be executed manually by the user at any time, as long as there are some files already indexed and ready to be searched.

      +

      + Keyword search module will save the search results regardless whether the search is performed by the ingest process, or manually by the user. + The saved results are available in the Directory Tree in the left hand side panel. +

      +

      + To see keyword search results in real-time while ingest is running, add keyword lists using the + Keyword Search Configuration Dialog + and select the "Use during ingest" check box. + You can select "Enable sending messages to inbox during ingest" per list, if the hits on that list should be reported in the Inbox, which is recommended for very specific searches. +

      +

      + See (Ingest) + for more information on ingest in general. +

      +

      + Once there are files in the index, the Keyword Search Bar + will be available for use to manually search at any time. +

      + + + diff --git a/test/README.txt b/test/README.txt index d0064b4f95..854f5e1a33 100644 --- a/test/README.txt +++ b/test/README.txt @@ -1,13 +1,13 @@ -This folder contains the data and scripts required to run regression tests -for Autopsy. There is a 'Testing' folder in the root directory that contains -the Java code that drives Autopsy to perform the tests. - -To run these tests: -- You will need python3. We run this from within Cygwin. -- Download the input images by typing 'ant test-download-imgs' in the root Autopsy folder. - This will place images in 'test/input'. -- Run 'python3 regression.py' from inside of the 'test/scripts' folder. -- Alternatively, run 'python3 regression.py -l [CONFIGFILE] to run the tests on a specified - list of images using a configuration file. See config.xml in the 'test/scripts' folder to - see configuration file formatting. -- Run 'python3 regression.py -h' to see other options. +This folder contains the data and scripts required to run regression tests +for Autopsy. There is a 'Testing' folder in the root directory that contains +the Java code that drives Autopsy to perform the tests. + +To run these tests: +- You will need python3. We run this from within Cygwin. +- Download the input images by typing 'ant test-download-imgs' in the root Autopsy folder. + This will place images in 'test/input'. +- Run 'python3 regression.py' from inside of the 'test/scripts' folder. +- Alternatively, run 'python3 regression.py -l [CONFIGFILE] to run the tests on a specified + list of images using a configuration file. See config.xml in the 'test/scripts' folder to + see configuration file formatting. +- Run 'python3 regression.py -h' to see other options. diff --git a/test/script/Emailer.py b/test/script/Emailer.py index 5d12e6afa3..7e661e12ea 100644 --- a/test/script/Emailer.py +++ b/test/script/Emailer.py @@ -1,49 +1,49 @@ -import smtplib -from email.mime.image import MIMEImage -from email.mime.multipart import MIMEMultipart -from email.mime.text import MIMEText -from email.mime.base import MIMEBase -from email import encoders -import xml -from xml.dom.minidom import parse, parseString - -def send_email(to, server, subj, body, attachments): - """Send an email with the given information. - - Args: - to: a String, the email address to send the email to - server: a String, the mail server to send from - subj: a String, the subject line of the message - body: a String, the body of the message - attachments: a listof_pathto_File, the attachements to include - """ - msg = MIMEMultipart() - msg['Subject'] = subj - # me == the sender's email address - # family = the list of all recipients' email addresses - msg['From'] = 'AutopsyTest' - msg['To'] = to - msg.preamble = 'This is a test' - container = MIMEText(body, 'plain') - msg.attach(container) - Build_email(msg, attachments) - s = smtplib.SMTP(server) - try: - print('Sending Email') - s.sendmail(msg['From'], msg['To'], msg.as_string()) - except Exception as e: - print(str(e)) - s.quit() - -def Build_email(msg, attachments): - for file in attachments: - part = MIMEBase('application', "octet-stream") - atach = open(file, "rb") - attch = atach.read() - noml = file.split("\\") - nom = noml[len(noml)-1] - part.set_payload(attch) - encoders.encode_base64(part) - part.add_header('Content-Disposition', 'attachment; filename="' + nom + '"') - msg.attach(part) - +import smtplib +from email.mime.image import MIMEImage +from email.mime.multipart import MIMEMultipart +from email.mime.text import MIMEText +from email.mime.base import MIMEBase +from email import encoders +import xml +from xml.dom.minidom import parse, parseString + +def send_email(to, server, subj, body, attachments): + """Send an email with the given information. + + Args: + to: a String, the email address to send the email to + server: a String, the mail server to send from + subj: a String, the subject line of the message + body: a String, the body of the message + attachments: a listof_pathto_File, the attachements to include + """ + msg = MIMEMultipart() + msg['Subject'] = subj + # me == the sender's email address + # family = the list of all recipients' email addresses + msg['From'] = 'AutopsyTest' + msg['To'] = to + msg.preamble = 'This is a test' + container = MIMEText(body, 'plain') + msg.attach(container) + Build_email(msg, attachments) + s = smtplib.SMTP(server) + try: + print('Sending Email') + s.sendmail(msg['From'], msg['To'], msg.as_string()) + except Exception as e: + print(str(e)) + s.quit() + +def Build_email(msg, attachments): + for file in attachments: + part = MIMEBase('application', "octet-stream") + atach = open(file, "rb") + attch = atach.read() + noml = file.split("\\") + nom = noml[len(noml)-1] + part.set_payload(attch) + encoders.encode_base64(part) + part.add_header('Content-Disposition', 'attachment; filename="' + nom + '"') + msg.attach(part) + diff --git a/test/script/regression.py b/test/script/regression.py index b2ad319963..6c640823ed 100644 --- a/test/script/regression.py +++ b/test/script/regression.py @@ -1,1854 +1,1854 @@ -#!/usr/bin/python -# -*- coding: utf_8 -*- - - # Autopsy Forensic Browser - # - # Copyright 2013 Basis Technology Corp. - # - # Licensed under the Apache License, Version 2.0 (the "License"); - # you may not use this file except in compliance with the License. - # You may obtain a copy of the License at - # - # http://www.apache.org/licenses/LICENSE-2.0 - # - # Unless required by applicable law or agreed to in writing, software - # distributed under the License is distributed on an "AS IS" BASIS, - # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - # See the License for the specific language governing permissions and - # limitations under the License. -from tskdbdiff import TskDbDiff, TskDbDiffException -import codecs -import datetime -import logging -import os -import re -import shutil -import socket -import sqlite3 -import subprocess -import sys -from sys import platform as _platform -import time -import traceback -import xml -from time import localtime, strftime -from xml.dom.minidom import parse, parseString -import smtplib -from email.mime.image import MIMEImage -from email.mime.multipart import MIMEMultipart -from email.mime.text import MIMEText -import re -import zipfile -import zlib -import Emailer -import srcupdater -from regression_utils import * - -# -# Please read me... -# -# This is the regression testing Python script. -# It uses an ant command to run build.xml for RegressionTest.java -# -# The code is cleanly sectioned and commented. -# Please follow the current formatting. -# It is a long and potentially confusing script. -# -# Variable, function, and class names are written in Python conventions: -# this_is_a_variable this_is_a_function() ThisIsAClass -# -# - - -# Data Definitions: -# -# pathto_X: A path to type X. -# ConfigFile: An XML file formatted according to the template in myconfig.xml -# ParsedConfig: A dom object that represents a ConfigFile -# SQLCursor: A cursor recieved from a connection to an SQL database -# Nat: A Natural Number -# Image: An image -# - -# Enumeration of database types used for the simplification of generating database paths -DBType = enum('OUTPUT', 'GOLD', 'BACKUP') - -# Common filename of the output and gold databases (although they are in different directories -DB_FILENAME = "autopsy.db" - -# Backup database filename -BACKUP_DB_FILENAME = "autopsy_backup.db" - -# TODO: Double check this purpose statement -# Folder name for gold standard database testing -AUTOPSY_TEST_CASE = "AutopsyTestCase" - -# TODO: Double check this purpose statement -# The filename of the log to store error messages -COMMON_LOG = "AutopsyErrors.txt" - -Day = 0 - -#----------------------# -# Main # -#----------------------# -def main(): - """Parse the command-line arguments, create the configuration, and run the tests.""" - args = Args() - parse_result = args.parse() - test_config = TestConfiguration(args) - # The arguments were given wrong: - if not parse_result: - return - if(not args.fr): - antin = ["ant"] - antin.append("-f") - antin.append(os.path.join("..","..","build.xml")) - antin.append("test-download-imgs") - if SYS is OS.CYGWIN: - subprocess.call(antin) - elif SYS is OS.WIN: - theproc = subprocess.Popen(antin, shell = True, stdout=subprocess.PIPE) - theproc.communicate() - # Otherwise test away! - TestRunner.run_tests(test_config) - - -class TestRunner(object): - """A collection of functions to run the regression tests.""" - - def run_tests(test_config): - """Run the tests specified by the main TestConfiguration. - - Executes the AutopsyIngest for each image and dispatches the results based on - the mode (rebuild or testing) - """ - test_data_list = [ TestData(image, test_config) for image in test_config.images ] - - Reports.html_add_images(test_config.html_log, test_config.images) - - logres =[] - for test_data in test_data_list: - Errors.clear_print_logs() - Errors.set_testing_phase(test_data.image) - if not (test_config.args.rebuild or os.path.exists(test_data.gold_archive)): - msg = "Gold standard doesn't exist, skipping image:" - Errors.print_error(msg) - Errors.print_error(test_data.gold_archive) - continue - TestRunner._run_autopsy_ingest(test_data) - - if test_config.args.rebuild: - TestRunner.rebuild(test_data) - else: - logres.append(TestRunner._run_test(test_data)) - test_data.printout = Errors.printout - test_data.printerror = Errors.printerror - - Reports.write_html_foot(test_config.html_log) - # TODO: move this elsewhere - if (len(logres)>0): - for lm in logres: - for ln in lm: - Errors.add_email_msg(ln) - - # TODO: possibly worth putting this in a sub method - if all([ test_data.overall_passed for test_data in test_data_list ]): - Errors.add_email_msg("All images passed.\n") - else: - msg = "The following images failed:\n" - for test_data in test_data_list: - if not test_data.overall_passed: - msg += "\t" + test_data.image + "\n" - Errors.add_email_msg(msg) - html = open(test_config.html_log) - Errors.add_email_attachment(html.name) - html.close() - - if test_config.email_enabled: - Emailer.send_email(test_config.mail_to, test_config.mail_server, - test_config.mail_subject, Errors.email_body, Errors.email_attachs) - - def _run_autopsy_ingest(test_data): - """Run Autopsy ingest for the image in the given TestData. - - Also generates the necessary logs for rebuilding or diff. - - Args: - test_data: the TestData to run the ingest on. - """ - if image_type(test_data.image_file) == IMGTYPE.UNKNOWN: - Errors.print_error("Error: Image type is unrecognized:") - Errors.print_error(test_data.image_file + "\n") - return - - logging.debug("--------------------") - logging.debug(test_data.image_name) - logging.debug("--------------------") - TestRunner._run_ant(test_data) - time.sleep(2) # Give everything a second to process - - try: - # Dump the database before we diff or use it for rebuild - TskDbDiff.dump_output_db(test_data.get_db_path(DBType.OUTPUT), test_data.get_db_dump_path(DBType.OUTPUT), - test_data.get_sorted_data_path(DBType.OUTPUT)) - except sqlite3.OperationalError as e: - print("Ingest did not run properly.", - "Make sure no other instances of Autopsy are open and try again.") - sys.exit() - - # merges logs into a single log for later diff / rebuild - copy_logs(test_data) - Logs.generate_log_data(test_data) - - TestRunner._handle_solr(test_data) - TestRunner._handle_exception(test_data) - - #TODO: figure out return type of _run_test (logres) - def _run_test(test_data): - """Compare the results of the output to the gold standard. - - Args: - test_data: the TestData - - Returns: - logres? - """ - TestRunner._extract_gold(test_data) - - # Look for core exceptions - # @@@ Should be moved to TestResultsDiffer, but it didn't know about logres -- need to look into that - logres = Logs.search_common_log("TskCoreException", test_data) - - TestResultsDiffer.run_diff(test_data) - test_data.overall_passed = (test_data.html_report_passed and - test_data.errors_diff_passed and test_data.db_diff_passed) - - Reports.generate_reports(test_data) - if(not test_data.overall_passed): - Errors.add_email_attachment(test_data.common_log_path) - return logres - - def _extract_gold(test_data): - """Extract gold archive file to output/gold/tmp/ - - Args: - test_data: the TestData - """ - extrctr = zipfile.ZipFile(test_data.gold_archive, 'r', compression=zipfile.ZIP_DEFLATED) - extrctr.extractall(test_data.main_config.gold) - extrctr.close - time.sleep(2) - - def _handle_solr(test_data): - """Clean up SOLR index if in keep mode (-k). - - Args: - test_data: the TestData - """ - if not test_data.main_config.args.keep: - if clear_dir(test_data.solr_index): - print_report([], "DELETE SOLR INDEX", "Solr index deleted.") - else: - print_report([], "KEEP SOLR INDEX", "Solr index has been kept.") - - def _handle_exception(test_data): - """If running in exception mode, print exceptions to log. - - Args: - test_data: the TestData - """ - if test_data.main_config.args.exception: - exceptions = search_logs(test_data.main_config.args.exception_string, test_data) - okay = ("No warnings or exceptions found containing text '" + - test_data.main_config.args.exception_string + "'.") - print_report(exceptions, "EXCEPTION", okay) - - def rebuild(test_data): - """Rebuild the gold standard with the given TestData. - - Copies the test-generated database and html report files into the gold directory. - """ - test_config = test_data.main_config - # Errors to print - errors = [] - # Delete the current gold standards - gold_dir = test_config.img_gold - clear_dir(test_config.img_gold) - tmpdir = make_path(gold_dir, test_data.image_name) - dbinpth = test_data.get_db_path(DBType.OUTPUT) - dboutpth = make_path(tmpdir, DB_FILENAME) - dataoutpth = make_path(tmpdir, test_data.image_name + "SortedData.txt") - dbdumpinpth = test_data.get_db_dump_path(DBType.OUTPUT) - dbdumpoutpth = make_path(tmpdir, test_data.image_name + "DBDump.txt") - if not os.path.exists(test_config.img_gold): - os.makedirs(test_config.img_gold) - if not os.path.exists(tmpdir): - os.makedirs(tmpdir) - try: - shutil.copy(dbinpth, dboutpth) - if file_exists(test_data.get_sorted_data_path(DBType.OUTPUT)): - shutil.copy(test_data.get_sorted_data_path(DBType.OUTPUT), dataoutpth) - shutil.copy(dbdumpinpth, dbdumpoutpth) - error_pth = make_path(tmpdir, test_data.image_name+"SortedErrors.txt") - shutil.copy(test_data.sorted_log, error_pth) - except IOError as e: - Errors.print_error(str(e)) - Errors.add_email_message("Not rebuilt properly") - print(str(e)) - print(traceback.format_exc()) - # Rebuild the HTML report - output_html_report_dir = test_data.get_html_report_path(DBType.OUTPUT) - gold_html_report_dir = make_path(tmpdir, "Report") - - try: - shutil.copytree(output_html_report_dir, gold_html_report_dir) - except OSError as e: - errors.append(e.error()) - except Exception as e: - errors.append("Error: Unknown fatal error when rebuilding the gold html report.") - errors.append(str(e) + "\n") - print(traceback.format_exc()) - oldcwd = os.getcwd() - zpdir = gold_dir - os.chdir(zpdir) - os.chdir("..") - img_gold = "tmp" - img_archive = make_path(test_data.image_name+"-archive.zip") - comprssr = zipfile.ZipFile(img_archive, 'w',compression=zipfile.ZIP_DEFLATED) - TestRunner.zipdir(img_gold, comprssr) - comprssr.close() - os.chdir(oldcwd) - del_dir(test_config.img_gold) - okay = "Sucessfully rebuilt all gold standards." - print_report(errors, "REBUILDING", okay) - - def zipdir(path, zip): - for root, dirs, files in os.walk(path): - for file in files: - zip.write(os.path.join(root, file)) - - def _run_ant(test_data): - """Construct and run the ant build command for the given TestData. - - Tests Autopsy by calling RegressionTest.java via the ant build file. - - Args: - test_data: the TestData - """ - test_config = test_data.main_config - # Set up the directories - if dir_exists(test_data.output_path): - shutil.rmtree(test_data.output_path) - os.makedirs(test_data.output_path) - test_data.ant = ["ant"] - test_data.ant.append("-v") - test_data.ant.append("-f") - # case.ant.append(case.build_path) - test_data.ant.append(os.path.join("..","..","Testing","build.xml")) - test_data.ant.append("regression-test") - test_data.ant.append("-l") - test_data.ant.append(test_data.antlog_dir) - test_data.ant.append("-Dimg_path=" + test_data.image_file) - test_data.ant.append("-Dknown_bad_path=" + test_config.known_bad_path) - test_data.ant.append("-Dkeyword_path=" + test_config.keyword_path) - test_data.ant.append("-Dnsrl_path=" + test_config.nsrl_path) - test_data.ant.append("-Dgold_path=" + test_config.gold) - test_data.ant.append("-Dout_path=" + - make_local_path(test_data.output_path)) - test_data.ant.append("-Dignore_unalloc=" + "%s" % test_config.args.unallocated) - test_data.ant.append("-Dtest.timeout=" + str(test_config.timeout)) - - Errors.print_out("Ingesting Image:\n" + test_data.image_file + "\n") - Errors.print_out("CMD: " + " ".join(test_data.ant)) - Errors.print_out("Starting test...\n") - antoutpth = make_local_path(test_data.main_config.output_dir, "antRunOutput.txt") - antout = open(antoutpth, "a") - if SYS is OS.CYGWIN: - subprocess.call(test_data.ant, stdout=subprocess.PIPE) - elif SYS is OS.WIN: - theproc = subprocess.Popen(test_data.ant, shell = True, stdout=subprocess.PIPE) - theproc.communicate() - antout.close() - - -class TestData(object): - """Container for the input and output of a single image. - - Represents data for the test of a single image, including path to the image, - database paths, etc. - - Attributes: - main_config: the global TestConfiguration - ant: a listof_String, the ant command for this TestData - image_file: a pathto_Image, the image for this TestData - image: a String, the image file's name - image_name: a String, the image file's name with a trailing (0) - output_path: pathto_Dir, the output directory for this TestData - autopsy_data_file: a pathto_File, the IMAGE_NAMEAutopsy_data.txt file - warning_log: a pathto_File, the AutopsyLogs.txt file - antlog_dir: a pathto_File, the antlog.txt file - test_dbdump: a pathto_File, the database dump, IMAGENAMEDump.txt - common_log_path: a pathto_File, the IMAGE_NAMECOMMON_LOG file - sorted_log: a pathto_File, the IMAGENAMESortedErrors.txt file - reports_dir: a pathto_Dir, the AutopsyTestCase/Reports folder - gold_data_dir: a pathto_Dir, the gold standard directory - gold_archive: a pathto_File, the gold standard archive - logs_dir: a pathto_Dir, the location where autopsy logs are stored - solr_index: a pathto_Dir, the locatino of the solr index - html_report_passed: a boolean, did the HTML report diff pass? - errors_diff_passed: a boolean, did the error diff pass? - db_diff_passed: a boolean, did the db diff pass? - overall_passed: a boolean, did the test pass? - total_test_time: a String representation of the test duration - start_date: a String representation of this TestData's start date - end_date: a String representation of the TestData's end date - total_ingest_time: a String representation of the total ingest time - artifact_count: a Nat, the number of artifacts - artifact_fail: a Nat, the number of artifact failures - heap_space: a String representation of TODO - service_times: a String representation of TODO - autopsy_version: a String, the version of autopsy that was run - ingest_messages: a Nat, the number of ingest messages - indexed_files: a Nat, the number of files indexed during the ingest - indexed_chunks: a Nat, the number of chunks indexed during the ingest - printerror: a listof_String, the error messages printed during this TestData's test - printout: a listof_String, the messages pritned during this TestData's test - """ - - def __init__(self, image, main_config): - """Init this TestData with it's image and the test configuration. - - Args: - image: the Image to be tested. - main_config: the global TestConfiguration. - """ - # Configuration Data - self.main_config = main_config - self.ant = [] - self.image_file = str(image) - # TODO: This 0 should be be refactored out, but it will require rebuilding and changing of outputs. - self.image = get_image_name(self.image_file) - self.image_name = self.image + "(0)" - # Directory structure and files - self.output_path = make_path(self.main_config.output_dir, self.image_name) - self.autopsy_data_file = make_path(self.output_path, self.image_name + "Autopsy_data.txt") - self.warning_log = make_local_path(self.output_path, "AutopsyLogs.txt") - self.antlog_dir = make_local_path(self.output_path, "antlog.txt") - self.test_dbdump = make_path(self.output_path, self.image_name + - "DBDump.txt") - self.common_log_path = make_local_path(self.output_path, self.image_name + COMMON_LOG) - self.sorted_log = make_local_path(self.output_path, self.image_name + "SortedErrors.txt") - self.reports_dir = make_path(self.output_path, AUTOPSY_TEST_CASE, "Reports") - self.gold_data_dir = make_path(self.main_config.img_gold, self.image_name) - self.gold_archive = make_path(self.main_config.gold, - self.image_name + "-archive.zip") - self.logs_dir = make_path(self.output_path, "logs") - self.solr_index = make_path(self.output_path, AUTOPSY_TEST_CASE, - "ModuleOutput", "KeywordSearch") - # Results and Info - self.html_report_passed = False - self.errors_diff_passed = False - self.db_diff_passed = False - self.overall_passed = False - # Ingest info - self.total_test_time = "" - self.start_date = "" - self.end_date = "" - self.total_ingest_time = "" - self.artifact_count = 0 - self.artifact_fail = 0 - self.heap_space = "" - self.service_times = "" - self.autopsy_version = "" - self.ingest_messages = 0 - self.indexed_files = 0 - self.indexed_chunks = 0 - # Error tracking - self.printerror = [] - self.printout = [] - - def ant_to_string(self): - string = "" - for arg in self.ant: - string += (arg + " ") - return string - - def get_db_path(self, db_type): - """Get the path to the database file that corresponds to the given DBType. - - Args: - DBType: the DBType of the path to be generated. - """ - if(db_type == DBType.GOLD): - db_path = make_path(self.gold_data_dir, DB_FILENAME) - elif(db_type == DBType.OUTPUT): - db_path = make_path(self.main_config.output_dir, self.image_name, AUTOPSY_TEST_CASE, DB_FILENAME) - else: - db_path = make_path(self.main_config.output_dir, self.image_name, AUTOPSY_TEST_CASE, BACKUP_DB_FILENAME) - return db_path - - def get_html_report_path(self, html_type): - """Get the path to the HTML Report folder that corresponds to the given DBType. - - Args: - DBType: the DBType of the path to be generated. - """ - if(html_type == DBType.GOLD): - return make_path(self.gold_data_dir, "Report") - else: - # Autopsy creates an HTML report folder in the form AutopsyTestCase DATE-TIME - # It's impossible to get the exact time the folder was created, but the folder - # we are looking for is the only one in the self.reports_dir folder - html_path = "" - for fs in os.listdir(self.reports_dir): - html_path = make_path(self.reports_dir, fs) - if os.path.isdir(html_path): - break - return make_path(html_path, os.listdir(html_path)[0]) - - def get_sorted_data_path(self, file_type): - """Get the path to the SortedData file that corresponds to the given DBType. - - Args: - file_type: the DBType of the path to be generated - """ - return self._get_path_to_file(file_type, "SortedData.txt") - - def get_sorted_errors_path(self, file_type): - """Get the path to the SortedErrors file that correspodns to the given - DBType. - - Args: - file_type: the DBType of the path to be generated - """ - return self._get_path_to_file(file_type, "SortedErrors.txt") - - def get_db_dump_path(self, file_type): - """Get the path to the DBDump file that corresponds to the given DBType. - - Args: - file_type: the DBType of the path to be generated - """ - return self._get_path_to_file(file_type, "DBDump.txt") - - def _get_path_to_file(self, file_type, file_name): - """Get the path to the specified file with the specified type. - - Args: - file_type: the DBType of the path to be generated - file_name: a String, the filename of the path to be generated - """ - full_filename = self.image_name + file_name - if(file_type == DBType.GOLD): - return make_path(self.gold_data_dir, full_filename) - else: - return make_path(self.output_path, full_filename) - - -class TestConfiguration(object): - """Container for test configuration data. - - The Master Test Configuration. Encapsulates consolidated high level input from - config XML file and command-line arguments. - - Attributes: - args: an Args, the command line arguments - output_dir: a pathto_Dir, the output directory - input_dir: a pathto_Dir, the input directory - gold: a pathto_Dir, the gold directory - img_gold: a pathto_Dir, the temp directory where gold images are unzipped to - csv: a pathto_File, the local csv file - global_csv: a pathto_File, the global csv file - html_log: a pathto_File - known_bad_path: - keyword_path: - nsrl_path: - build_path: a pathto_File, the ant build file which runs the tests - autopsy_version: - ingest_messages: a Nat, number of ingest messages - indexed_files: a Nat, the number of indexed files - indexed_chunks: a Nat, the number of indexed chunks - timer: - images: a listof_Image, the images to be tested - timeout: a Nat, the amount of time before killing the test - ant: a listof_String, the ant command to run the tests - """ - - def __init__(self, args): - """Inits TestConfiguration and loads a config file if available. - - Args: - args: an Args, the command line arguments. - """ - self.args = args - # Paths: - self.output_dir = "" - self.input_dir = make_local_path("..","input") - self.gold = make_path("..", "output", "gold") - self.img_gold = make_path(self.gold, 'tmp') - # Logs: - self.csv = "" - self.global_csv = "" - self.html_log = "" - # Ant info: - self.known_bad_path = make_path(self.input_dir, "notablehashes.txt-md5.idx") - self.keyword_path = make_path(self.input_dir, "notablekeywords.xml") - self.nsrl_path = make_path(self.input_dir, "nsrl.txt-md5.idx") - self.build_path = make_path("..", "build.xml") - # Infinite Testing info - timer = 0 - self.images = [] - # Email info - self.email_enabled = args.email_enabled - self.mail_server = "" - self.mail_to = "" - self.mail_subject = "" - # Set the timeout to something huge - # The entire tester should not timeout before this number in ms - # However it only seems to take about half this time - # And it's very buggy, so we're being careful - self.timeout = 24 * 60 * 60 * 1000 * 1000 - - if not self.args.single: - self._load_config_file(self.args.config_file) - else: - self.images.append(self.args.single_file) - self._init_logs() - #self._init_imgs() - #self._init_build_info() - - - def _load_config_file(self, config_file): - """Updates this TestConfiguration's attributes from the config file. - - Initializes this TestConfiguration by iterating through the XML config file - command-line argument. Populates self.images and optional email configuration - - Args: - config_file: ConfigFile - the configuration file to load - """ - try: - count = 0 - parsed_config = parse(config_file) - logres = [] - counts = {} - if parsed_config.getElementsByTagName("indir"): - self.input_dir = parsed_config.getElementsByTagName("indir")[0].getAttribute("value").encode().decode("utf_8") - if parsed_config.getElementsByTagName("global_csv"): - self.global_csv = parsed_config.getElementsByTagName("global_csv")[0].getAttribute("value").encode().decode("utf_8") - self.global_csv = make_local_path(self.global_csv) - if parsed_config.getElementsByTagName("golddir"): - self.gold = parsed_config.getElementsByTagName("golddir")[0].getAttribute("value").encode().decode("utf_8") - self.img_gold = make_path(self.gold, 'tmp') - - self._init_imgs(parsed_config) - self._init_build_info(parsed_config) - self._init_email_info(parsed_config) - - except IOError as e: - msg = "There was an error loading the configuration file.\n" - msg += "\t" + str(e) - Errors.add_email_msg(msg) - logging.critical(traceback.format_exc()) - print(traceback.format_exc()) - - def _init_logs(self): - """Setup output folder, logs, and reporting infrastructure.""" - if(not dir_exists(make_path("..", "output", "results"))): - os.makedirs(make_path("..", "output", "results",)) - self.output_dir = make_path("..", "output", "results", time.strftime("%Y.%m.%d-%H.%M.%S")) - os.makedirs(self.output_dir) - self.csv = make_local_path(self.output_dir, "CSV.txt") - self.html_log = make_path(self.output_dir, "AutopsyTestCase.html") - log_name = self.output_dir + "\\regression.log" - logging.basicConfig(filename=log_name, level=logging.DEBUG) - - def _init_build_info(self, parsed_config): - """Initializes paths that point to information necessary to run the AutopsyIngest.""" - build_elements = parsed_config.getElementsByTagName("build") - if build_elements: - build_element = build_elements[0] - build_path = build_element.getAttribute("value").encode().decode("utf_8") - self.build_path = build_path - - def _init_imgs(self, parsed_config): - """Initialize the list of images to run tests on.""" - for element in parsed_config.getElementsByTagName("image"): - value = element.getAttribute("value").encode().decode("utf_8") - print ("Image in Config File: " + value) - if file_exists(value): - self.images.append(value) - else: - msg = "File: " + value + " doesn't exist" - Errors.print_error(msg) - Errors.add_email_msg(msg) - image_count = len(self.images) - - # Sanity check to see if there are obvious gold images that we are not testing - gold_count = 0 - for file in os.listdir(self.gold): - if not(file == 'tmp'): - gold_count+=1 - - if (image_count > gold_count): - print("******Alert: There are more input images than gold standards, some images will not be properly tested.\n") - elif (image_count < gold_count): - print("******Alert: There are more gold standards than input images, this will not check all gold Standards.\n") - - def _init_email_info(self, parsed_config): - """Initializes email information dictionary""" - email_elements = parsed_config.getElementsByTagName("email") - if email_elements: - mail_to = email_elements[0] - self.mail_to = mail_to.getAttribute("value").encode().decode("utf_8") - mail_server_elements = parsed_config.getElementsByTagName("mail_server") - if mail_server_elements: - mail_from = mail_server_elements[0] - self.mail_server = mail_from.getAttribute("value").encode().decode("utf_8") - subject_elements = parsed_config.getElementsByTagName("subject") - if subject_elements: - subject = subject_elements[0] - self.mail_subject = subject.getAttribute("value").encode().decode("utf_8") - if self.mail_server and self.mail_to and self.args.email_enabled: - self.email_enabled = True - print("Email will be sent to ", self.mail_to) - else: - print("No email will be sent.") - - -#-------------------------------------------------# -# Functions relating to comparing outputs # -#-------------------------------------------------# -class TestResultsDiffer(object): - """Compares results for a single test.""" - - def run_diff(test_data): - """Compares results for a single test. - - Args: - test_data: the TestData to use. - databaseDiff: TskDbDiff object created based off test_data - """ - try: - output_db = test_data.get_db_path(DBType.OUTPUT) - gold_db = test_data.get_db_path(DBType.GOLD) - output_dir = test_data.output_path - gold_bb_dump = test_data.get_sorted_data_path(DBType.GOLD) - gold_dump = test_data.get_db_dump_path(DBType.GOLD) - test_data.db_diff_pass = all(TskDbDiff(output_db, gold_db, output_dir=output_dir, gold_bb_dump=gold_bb_dump, - gold_dump=gold_dump).run_diff()) - - # Compare Exceptions - # replace is a fucntion that replaces strings of digits with 'd' - # this is needed so dates and times will not cause the diff to fail - replace = lambda file: re.sub(re.compile("\d"), "d", file) - output_errors = test_data.get_sorted_errors_path(DBType.OUTPUT) - gold_errors = test_data.get_sorted_errors_path(DBType.GOLD) - passed = TestResultsDiffer._compare_text(output_errors, gold_errors, - replace) - test_data.errors_diff_passed = passed - - # Compare html output - gold_report_path = test_data.get_html_report_path(DBType.GOLD) - output_report_path = test_data.get_html_report_path(DBType.OUTPUT) - passed = TestResultsDiffer._html_report_diff(gold_report_path, - output_report_path) - test_data.html_report_passed = passed - - # Clean up tmp folder - del_dir(test_data.gold_data_dir) - - except sqlite3.OperationalError as e: - Errors.print_error("Tests failed while running the diff:\n") - Errors.print_error(str(e)) - except TskDbDiffException as e: - Errors.print_error(str(e)) - except Exception as e: - Errors.print_error("Tests failed due to an error, try rebuilding or creating gold standards.\n") - Errors.print_error(str(e) + "\n") - print(traceback.format_exc()) - - def _compare_text(output_file, gold_file, process=None): - """Compare two text files. - - Args: - output_file: a pathto_File, the output text file - gold_file: a pathto_File, the input text file - pre-process: (optional) a function of String -> String that will be - called on each input file before the diff, if specified. - """ - if(not file_exists(output_file)): - return False - output_data = codecs.open(output_file, "r", "utf_8").read() - gold_data = codecs.open(gold_file, "r", "utf_8").read() - - if process is not None: - output_data = process(output_data) - gold_data = process(gold_data) - - if (not(gold_data == output_data)): - diff_path = os.path.splitext(os.path.basename(output_file))[0] - diff_path += "-Diff.txt" - diff_file = codecs.open(diff_path, "wb", "utf_8") - dffcmdlst = ["diff", output_file, gold_file] - subprocess.call(dffcmdlst, stdout = diff_file) - Errors.add_email_attachment(diff_path) - msg = "There was a difference in " - msg += os.path.basename(output_file) + ".\n" - Errors.add_email_msg(msg) - Errors.print_error(msg) - return False - else: - return True - - def _html_report_diff(gold_report_path, output_report_path): - """Compare the output and gold html reports. - - Args: - gold_report_path: a pathto_Dir, the gold HTML report directory - output_report_path: a pathto_Dir, the output HTML report directory - - Returns: - true, if the reports match, false otherwise. - """ - try: - gold_html_files = get_files_by_ext(gold_report_path, ".html") - output_html_files = get_files_by_ext(output_report_path, ".html") - - #ensure both reports have the same number of files and are in the same order - if(len(gold_html_files) != len(output_html_files)): - msg = "The reports did not have the same number or files." - msg += "One of the reports may have been corrupted." - Errors.print_error(msg) - else: - gold_html_files.sort() - output_html_files.sort() - - total = {"Gold": 0, "New": 0} - for gold, output in zip(gold_html_files, output_html_files): - count = TestResultsDiffer._compare_report_files(gold, output) - total["Gold"] += count[0] - total["New"] += count[1] - - okay = "The test report matches the gold report." - errors=["Gold report had " + str(total["Gold"]) +" errors", "New report had " + str(total["New"]) + " errors."] - print_report(errors, "REPORT COMPARISON", okay) - - if total["Gold"] == total["New"]: - return True - else: - Errors.print_error("The reports did not match each other.\n " + errors[0] +" and the " + errors[1]) - return False - except OSError as e: - e.print_error() - return False - except Exception as e: - Errors.print_error("Error: Unknown fatal error comparing reports.") - Errors.print_error(str(e) + "\n") - logging.critical(traceback.format_exc()) - return False - - def _compare_report_files(a_path, b_path): - """Compares the two specified report html files. - - Args: - a_path: a pathto_File, the first html report file - b_path: a pathto_File, the second html report file - - Returns: - a tuple of (Nat, Nat), which represent the length of each - unordered list in the html report files, or (0, 0) if the - lenghts are the same. - """ - a_file = open(a_path) - b_file = open(b_path) - a = a_file.read() - b = b_file.read() - a = a[a.find("
        "):] - b = b[b.find("
          "):] - - a_list = TestResultsDiffer._split(a, 50) - b_list = TestResultsDiffer._split(b, 50) - if not len(a_list) == len(b_list): - ex = (len(a_list), len(b_list)) - return ex - else: - return (0, 0) - - # Split a string into an array of string of the given size - def _split(input, size): - return [input[start:start+size] for start in range(0, len(input), size)] - - -class Reports(object): - def generate_reports(test_data): - """Generate the reports for a single test - - Args: - test_data: the TestData - """ - Reports._generate_html(test_data) - if test_data.main_config.global_csv: - Reports._generate_csv(test_data.main_config.global_csv, test_data) - else: - Reports._generate_csv(test_data.main_config.csv, test_data) - - def _generate_html(test_data): - """Generate the HTML log file.""" - # If the file doesn't exist yet, this is the first test_config to run for - # this test, so we need to make the start of the html log - html_log = test_data.main_config.html_log - if not file_exists(html_log): - Reports.write_html_head() - with open(html_log, "a") as html: - # The image title - title = "

          " + test_data.image_name + " \ - tested on " + socket.gethostname() + "

          \ -

          \ - Errors and Warnings |\ - Information |\ - General Output |\ - Logs\ -

          " - # The script errors found - if not test_data.overall_passed: - ids = 'errors1' - else: - ids = 'errors' - errors = "
          \ -

          Errors and Warnings

          \ -
          " - # For each error we have logged in the test_config - for error in test_data.printerror: - # Replace < and > to avoid any html display errors - errors += "

          " + error.replace("<", "<").replace(">", ">") + "

          " - # If there is a \n, we probably want a
          in the html - if "\n" in error: - errors += "
          " - errors += "
          " - - # Links to the logs - logs = "
          \ -

          Logs

          \ -
          " - logs_path = test_data.logs_dir - for file in os.listdir(logs_path): - logs += "

          " + file + "

          " - logs += "
          " - - # All the testing information - info = "
          \ -

          Information

          \ -
          \ - " - # The individual elements - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" - info += "" -# info += "" -# info += "" -# info += "" -# info += "" -# info += "" -# info += "" - info += "
          Image Path:" + test_data.image_file + "
          Image Name:" + test_data.image_name + "
          test_config Output Directory:" + test_data.main_config.output_dir + "
          Autopsy Version:" + test_data.autopsy_version + "
          Heap Space:" + test_data.heap_space + "
          Test Start Date:" + test_data.start_date + "
          Test End Date:" + test_data.end_date + "
          Total Test Time:" + test_data.total_test_time + "
          Total Ingest Time:" + test_data.total_ingest_time + "
          Exceptions Count:" + str(len(get_exceptions(test_data))) + "
          Autopsy OutOfMemoryExceptions:" + str(len(search_logs("OutOfMemoryException", test_data))) + "
          Autopsy OutOfMemoryErrors:" + str(len(search_logs("OutOfMemoryError", test_data))) + "
          Tika OutOfMemoryErrors/Exceptions:" + str(Reports._get_num_memory_errors("tika", test_data)) + "
          Solr OutOfMemoryErrors/Exceptions:" + str(Reports._get_num_memory_errors("solr", test_data)) + "
          TskCoreExceptions:" + str(len(search_log_set("autopsy", "TskCoreException", test_data))) + "
          TskDataExceptions:" + str(len(search_log_set("autopsy", "TskDataException", test_data))) + "
          Ingest Messages Count:" + str(test_data.ingest_messages) + "
          Indexed Files Count:" + str(test_data.indexed_files) + "
          Indexed File Chunks Count:" + str(test_data.indexed_chunks) + "
          Out Of Disk Space:\ -

          (will skew other test results)

          " + str(len(search_log_set("autopsy", "Stopping ingest due to low disk space on disk", test_data))) + "
          TSK Objects Count:" + str(test_data.db_diff_results.output_objs) + "
          Artifacts Count:" + str(test_data.db_diff_results.output_artifacts)+ "
          Attributes Count:" + str(test_data.db_diff_results.output_attrs) + "
          \ -
          " - # For all the general print statements in the test_config - output = "
          \ -

          General Output

          \ -
          " - # For each printout in the test_config's list - for out in test_data.printout: - output += "

          " + out + "

          " - # If there was a \n it probably means we want a
          in the html - if "\n" in out: - output += "
          " - output += "
          " - - html.write(title) - html.write(errors) - html.write(info) - html.write(logs) - html.write(output) - - def write_html_head(html_log): - """Write the top of the HTML log file. - - Args: - html_log: a pathto_File, the global HTML log - """ - with open(str(html_log), "a") as html: - head = "\ - \ - AutopsyTesttest_config Output\ - \ - \ - " - html.write(head) - - def write_html_foot(html_log): - """Write the bottom of the HTML log file. - - Args: - html_log: a pathto_File, the global HTML log - """ - with open(html_log, "a") as html: - head = "" - html.write(head) - - def html_add_images(html_log, full_image_names): - """Add all the image names to the HTML log. - - Args: - full_image_names: a listof_String, each representing an image name - html_log: a pathto_File, the global HTML log - """ - # If the file doesn't exist yet, this is the first test_config to run for - # this test, so we need to make the start of the html log - if not file_exists(html_log): - Reports.write_html_head(html_log) - with open(html_log, "a") as html: - links = [] - for full_name in full_image_names: - name = get_image_name(full_name) - links.append("" + name + "") - html.write("

          " + (" | ".join(links)) + "

          ") - - def _generate_csv(csv_path, test_data): - """Generate the CSV log file""" - # If the CSV file hasn't already been generated, this is the - # first run, and we need to add the column names - if not file_exists(csv_path): - Reports.csv_header(csv_path) - # Now add on the fields to a new row - with open(csv_path, "a") as csv: - # Variables that need to be written - vars = [] - vars.append( test_data.image_file ) - vars.append( test_data.image_name ) - vars.append( test_data.main_config.output_dir ) - vars.append( socket.gethostname() ) - vars.append( test_data.autopsy_version ) - vars.append( test_data.heap_space ) - vars.append( test_data.start_date ) - vars.append( test_data.end_date ) - vars.append( test_data.total_test_time ) - vars.append( test_data.total_ingest_time ) - vars.append( test_data.service_times ) - vars.append( str(len(get_exceptions(test_data))) ) - vars.append( str(Reports._get_num_memory_errors("autopsy", test_data)) ) - vars.append( str(Reports._get_num_memory_errors("tika", test_data)) ) - vars.append( str(Reports._get_num_memory_errors("solr", test_data)) ) - vars.append( str(len(search_log_set("autopsy", "TskCoreException", test_data))) ) - vars.append( str(len(search_log_set("autopsy", "TskDataException", test_data))) ) - vars.append( str(test_data.ingest_messages) ) - vars.append( str(test_data.indexed_files) ) - vars.append( str(test_data.indexed_chunks) ) - vars.append( str(len(search_log_set("autopsy", "Stopping ingest due to low disk space on disk", test_data))) ) -# vars.append( str(test_data.db_diff_results.output_objs) ) -# vars.append( str(test_data.db_diff_results.output_artifacts) ) -# vars.append( str(test_data.db_diff_results.output_objs) ) - vars.append( make_local_path("gold", test_data.image_name, DB_FILENAME) ) -# vars.append( test_data.db_diff_results.get_artifact_comparison() ) -# vars.append( test_data.db_diff_results.get_attribute_comparison() ) - vars.append( make_local_path("gold", test_data.image_name, "standard.html") ) - vars.append( str(test_data.html_report_passed) ) - vars.append( test_data.ant_to_string() ) - # Join it together with a ", " - output = "|".join(vars) - output += "\n" - # Write to the log! - csv.write(output) - - def csv_header(csv_path): - """Generate the CSV column names.""" - with open(csv_path, "w") as csv: - titles = [] - titles.append("Image Path") - titles.append("Image Name") - titles.append("Output test_config Directory") - titles.append("Host Name") - titles.append("Autopsy Version") - titles.append("Heap Space Setting") - titles.append("Test Start Date") - titles.append("Test End Date") - titles.append("Total Test Time") - titles.append("Total Ingest Time") - titles.append("Service Times") - titles.append("Autopsy Exceptions") - titles.append("Autopsy OutOfMemoryErrors/Exceptions") - titles.append("Tika OutOfMemoryErrors/Exceptions") - titles.append("Solr OutOfMemoryErrors/Exceptions") - titles.append("TskCoreExceptions") - titles.append("TskDataExceptions") - titles.append("Ingest Messages Count") - titles.append("Indexed Files Count") - titles.append("Indexed File Chunks Count") - titles.append("Out Of Disk Space") -# titles.append("Tsk Objects Count") -# titles.append("Artifacts Count") -# titles.append("Attributes Count") - titles.append("Gold Database Name") -# titles.append("Artifacts Comparison") -# titles.append("Attributes Comparison") - titles.append("Gold Report Name") - titles.append("Report Comparison") - titles.append("Ant Command Line") - output = "|".join(titles) - output += "\n" - csv.write(output) - - def _get_num_memory_errors(type, test_data): - """Get the number of OutOfMemory errors and Exceptions. - - Args: - type: a String representing the type of log to check. - test_data: the TestData to examine. - """ - return (len(search_log_set(type, "OutOfMemoryError", test_data)) + - len(search_log_set(type, "OutOfMemoryException", test_data))) - -class Logs(object): - - def generate_log_data(test_data): - """Find and handle relevent data from the Autopsy logs. - - Args: - test_data: the TestData whose logs to examine - """ - Logs._generate_common_log(test_data) - try: - Logs._fill_ingest_data(test_data) - except Exception as e: - Errors.print_error("Error: Unknown fatal error when filling test_config data.") - Errors.print_error(str(e) + "\n") - logging.critical(traceback.format_exc()) - # If running in verbose mode (-v) - if test_data.main_config.args.verbose: - errors = Logs._report_all_errors() - okay = "No warnings or errors in any log files." - print_report(errors, "VERBOSE", okay) - - def _generate_common_log(test_data): - """Generate the common log, the log of all exceptions and warnings from - each log file generated by Autopsy. - - Args: - test_data: the TestData to generate a log for - """ - try: - logs_path = test_data.logs_dir - common_log = codecs.open(test_data.common_log_path, "w", "utf_8") - warning_log = codecs.open(test_data.warning_log, "w", "utf_8") - common_log.write("--------------------------------------------------\n") - common_log.write(test_data.image_name + "\n") - common_log.write("--------------------------------------------------\n") - rep_path = make_local_path(test_data.main_config.output_dir) - rep_path = rep_path.replace("\\\\", "\\") - for file in os.listdir(logs_path): - log = codecs.open(make_path(logs_path, file), "r", "utf_8") - for line in log: - line = line.replace(rep_path, "test_data") - if line.startswith("Exception"): - common_log.write(file +": " + line) - elif line.startswith("Error"): - common_log.write(file +": " + line) - elif line.startswith("SEVERE"): - common_log.write(file +":" + line) - else: - warning_log.write(file +": " + line) - log.close() - common_log.write("\n") - common_log.close() - print(test_data.sorted_log) - srtcmdlst = ["sort", test_data.common_log_path, "-o", test_data.sorted_log] - subprocess.call(srtcmdlst) - except (OSError, IOError) as e: - Errors.print_error("Error: Unable to generate the common log.") - Errors.print_error(str(e) + "\n") - Errors.print_error(traceback.format_exc()) - logging.critical(traceback.format_exc()) - - def _fill_ingest_data(test_data): - """Fill the TestDatas variables that require the log files. - - Args: - test_data: the TestData to modify - """ - try: - # Open autopsy.log.0 - log_path = make_path(test_data.logs_dir, "autopsy.log.0") - log = open(log_path) - - # Set the TestData start time based off the first line of autopsy.log.0 - # *** If logging time format ever changes this will break *** - test_data.start_date = log.readline().split(" org.")[0] - - # Set the test_data ending time based off the "create" time (when the file was copied) - test_data.end_date = time.ctime(os.path.getmtime(log_path)) - except IOError as e: - Errors.print_error("Error: Unable to open autopsy.log.0.") - Errors.print_error(str(e) + "\n") - logging.warning(traceback.format_exc()) - # Start date must look like: "Jul 16, 2012 12:57:53 PM" - # End date must look like: "Mon Jul 16 13:02:42 2012" - # *** If logging time format ever changes this will break *** - start = datetime.datetime.strptime(test_data.start_date, "%b %d, %Y %I:%M:%S %p") - end = datetime.datetime.strptime(test_data.end_date, "%a %b %d %H:%M:%S %Y") - test_data.total_test_time = str(end - start) - - try: - # Set Autopsy version, heap space, ingest time, and service times - - version_line = search_logs("INFO: Application name: Autopsy, version:", test_data)[0] - test_data.autopsy_version = get_word_at(version_line, 5).rstrip(",") - - test_data.heap_space = search_logs("Heap memory usage:", test_data)[0].rstrip().split(": ")[1] - - ingest_line = search_logs("Ingest (including enqueue)", test_data)[0] - test_data.total_ingest_time = get_word_at(ingest_line, 6).rstrip() - - message_line = search_log_set("autopsy", "Ingest messages count:", test_data)[0] - test_data.ingest_messages = int(message_line.rstrip().split(": ")[2]) - - files_line = search_log_set("autopsy", "Indexed files count:", test_data)[0] - test_data.indexed_files = int(files_line.rstrip().split(": ")[2]) - - chunks_line = search_log_set("autopsy", "Indexed file chunks count:", test_data)[0] - test_data.indexed_chunks = int(chunks_line.rstrip().split(": ")[2]) - except (OSError, IOError) as e: - Errors.print_error("Error: Unable to find the required information to fill test_config data.") - Errors.print_error(str(e) + "\n") - logging.critical(traceback.format_exc()) - print(traceback.format_exc()) - try: - service_lines = search_log("autopsy.log.0", "to process()", test_data) - service_list = [] - for line in service_lines: - words = line.split(" ") - # Kind of forcing our way into getting this data - # If this format changes, the tester will break - i = words.index("secs.") - times = words[i-4] + " " - times += words[i-3] + " " - times += words[i-2] + " " - times += words[i-1] + " " - times += words[i] - service_list.append(times) - test_data.service_times = "; ".join(service_list) - except (OSError, IOError) as e: - Errors.print_error("Error: Unknown fatal error when finding service times.") - Errors.print_error(str(e) + "\n") - logging.critical(traceback.format_exc()) - - def _report_all_errors(): - """Generate a list of all the errors found in the common log. - - Returns: - a listof_String, the errors found in the common log - """ - try: - return get_warnings() + get_exceptions() - except (OSError, IOError) as e: - Errors.print_error("Error: Unknown fatal error when reporting all errors.") - Errors.print_error(str(e) + "\n") - logging.warning(traceback.format_exc()) - - def search_common_log(string, test_data): - """Search the common log for any instances of a given string. - - Args: - string: the String to search for. - test_data: the TestData that holds the log to search. - - Returns: - a listof_String, all the lines that the string is found on - """ - results = [] - log = codecs.open(test_data.common_log_path, "r", "utf_8") - for line in log: - if string in line: - results.append(line) - log.close() - return results - - -def print_report(errors, name, okay): - """Print a report with the specified information. - - Args: - errors: a listof_String, the errors to report. - name: a String, the name of the report. - okay: the String to print when there are no errors. - """ - if errors: - Errors.print_error("--------< " + name + " >----------") - for error in errors: - Errors.print_error(str(error)) - Errors.print_error("--------< / " + name + " >--------\n") - else: - Errors.print_out("-----------------------------------------------------------------") - Errors.print_out("< " + name + " - " + okay + " />") - Errors.print_out("-----------------------------------------------------------------\n") - - -def get_exceptions(test_data): - """Get a list of the exceptions in the autopsy logs. - - Args: - test_data: the TestData to use to find the exceptions. - Returns: - a listof_String, the exceptions found in the logs. - """ - exceptions = [] - logs_path = test_data.logs_dir - results = [] - for file in os.listdir(logs_path): - if "autopsy.log" in file: - log = codecs.open(make_path(logs_path, file), "r", "utf_8") - ex = re.compile("\SException") - er = re.compile("\SError") - for line in log: - if ex.search(line) or er.search(line): - exceptions.append(line) - log.close() - return exceptions - -def get_warnings(test_data): - """Get a list of the warnings listed in the common log. - - Args: - test_data: the TestData to use to find the warnings - - Returns: - listof_String, the warnings found. - """ - warnings = [] - common_log = codecs.open(test_data.warning_log, "r", "utf_8") - for line in common_log: - if "warning" in line.lower(): - warnings.append(line) - common_log.close() - return warnings - -def copy_logs(test_data): - """Copy the Autopsy generated logs to output directory. - - Args: - test_data: the TestData whose logs will be copied - """ - try: - log_dir = os.path.join("..", "..", "Testing","build","test","qa-functional","work","userdir0","var","log") - shutil.copytree(log_dir, test_data.logs_dir) - except OSError as e: - printerror(test_data,"Error: Failed to copy the logs.") - printerror(test_data,str(e) + "\n") - logging.warning(traceback.format_exc()) - -def setDay(): - global Day - Day = int(strftime("%d", localtime())) - -def getLastDay(): - return Day - -def getDay(): - return int(strftime("%d", localtime())) - -def newDay(): - return getLastDay() != getDay() - -#------------------------------------------------------------# -# Exception classes to manage "acceptable" thrown exceptions # -# versus unexpected and fatal exceptions # -#------------------------------------------------------------# - -class FileNotFoundException(Exception): - """ - If a file cannot be found by one of the helper functions, - they will throw a FileNotFoundException unless the purpose - is to return False. - """ - def __init__(self, file): - self.file = file - self.strerror = "FileNotFoundException: " + file - - def print_error(self): - Errors.print_error("Error: File could not be found at:") - Errors.print_error(self.file + "\n") - - def error(self): - error = "Error: File could not be found at:\n" + self.file + "\n" - return error - -class DirNotFoundException(Exception): - """ - If a directory cannot be found by a helper function, - it will throw this exception - """ - def __init__(self, dir): - self.dir = dir - self.strerror = "DirNotFoundException: " + dir - - def print_error(self): - Errors.print_error("Error: Directory could not be found at:") - Errors.print_error(self.dir + "\n") - - def error(self): - error = "Error: Directory could not be found at:\n" + self.dir + "\n" - return error - - -class Errors: - """A class used to manage error reporting. - - Attributes: - printout: a listof_String, the non-error messages that were printed - printerror: a listof_String, the error messages that were printed - email_body: a String, the body of the report email - email_msg_prefix: a String, the prefix for lines added to the email - email_attchs: a listof_pathto_File, the files to be attached to the - report email - """ - printout = [] - printerror = [] - email_body = "" - email_msg_prefix = "Configuration" - email_attachs = [] - - def set_testing_phase(image_name): - """Change the email message prefix to be the given testing phase. - - Args: - image_name: a String, representing the current image being tested - """ - Errors.email_msg_prefix = image_name - - def print_out(msg): - """Print out an informational message. - - Args: - msg: a String, the message to be printed - """ - print(msg) - Errors.printout.append(msg) - - def print_error(msg): - """Print out an error message. - - Args: - msg: a String, the error message to be printed. - """ - print(msg) - Errors.printerror.append(msg) - - def clear_print_logs(): - """Reset the image-specific attributes of the Errors class.""" - Errors.printout = [] - Errors.printerror = [] - - def add_email_msg(msg): - """Add the given message to the body of the report email. - - Args: - msg: a String, the message to be added to the email - """ - Errors.email_body += Errors.email_msg_prefix + ":" + msg - - def add_email_attachment(path): - """Add the given file to be an attachment for the report email - - Args: - file: a pathto_File, the file to add - """ - Errors.email_attachs.append(path) - - -class DiffResults(object): - """Container for the results of the database diff tests. - - Stores artifact, object, and attribute counts and comparisons generated by - TskDbDiff. - - Attributes: - gold_attrs: a Nat, the number of gold attributes - output_attrs: a Nat, the number of output attributes - gold_objs: a Nat, the number of gold objects - output_objs: a Nat, the number of output objects - artifact_comp: a listof_String, describing the differences - attribute_comp: a listof_String, describing the differences - passed: a boolean, did the diff pass? - """ - def __init__(self, tsk_diff): - """Inits a DiffResults - - Args: - tsk_diff: a TskDBDiff - """ - self.gold_attrs = tsk_diff.gold_attributes - self.output_attrs = tsk_diff.autopsy_attributes - self.gold_objs = tsk_diff.gold_objects - self.output_objs = tsk_diff.autopsy_objects - self.artifact_comp = tsk_diff.artifact_comparison - self.attribute_comp = tsk_diff.attribute_comparison - self.gold_artifacts = len(tsk_diff.gold_artifacts) - self.output_artifacts = len(tsk_diff.autopsy_artifacts) - self.passed = tsk_diff.passed - - def get_artifact_comparison(self): - if not self.artifact_comp: - return "All counts matched" - else: - return "; ".join(self.artifact_comp) - - def get_attribute_comparison(self): - if not self.attribute_comp: - return "All counts matched" - list = [] - for error in self.attribute_comp: - list.append(error) - return ";".join(list) - - -#-------------------------------------------------------------# -# Parses argv and stores booleans to match command line input # -#-------------------------------------------------------------# -class Args(object): - """A container for command line options and arguments. - - Attributes: - single: a boolean indicating whether to run in single file mode - single_file: an Image to run the test on - rebuild: a boolean indicating whether to run in rebuild mode - list: a boolean indicating a config file was specified - unallocated: a boolean indicating unallocated space should be ignored - ignore: a boolean indicating the input directory should be ingnored - keep: a boolean indicating whether to keep the SOLR index - verbose: a boolean indicating whether verbose output should be printed - exeception: a boolean indicating whether errors containing exception - exception_string should be printed - exception_sring: a String representing and exception name - fr: a boolean indicating whether gold standard images will be downloaded - """ - def __init__(self): - self.single = False - self.single_file = "" - self.rebuild = False - self.list = False - self.config_file = "" - self.unallocated = False - self.ignore = False - self.keep = False - self.verbose = False - self.exception = False - self.exception_string = "" - self.fr = False - self.email_enabled = False - - def parse(self): - """Get the command line arguments and parse them.""" - nxtproc = [] - nxtproc.append("python3") - nxtproc.append(sys.argv.pop(0)) - while sys.argv: - arg = sys.argv.pop(0) - nxtproc.append(arg) - if(arg == "-f"): - #try: @@@ Commented out until a more specific except statement is added - arg = sys.argv.pop(0) - print("Running on a single file:") - print(path_fix(arg) + "\n") - self.single = True - self.single_file = path_fix(arg) - #except: - # print("Error: No single file given.\n") - # return False - elif(arg == "-r" or arg == "--rebuild"): - print("Running in rebuild mode.\n") - self.rebuild = True - elif(arg == "-l" or arg == "--list"): - try: - arg = sys.argv.pop(0) - nxtproc.append(arg) - print("Running from configuration file:") - print(arg + "\n") - self.list = True - self.config_file = arg - except: - print("Error: No configuration file given.\n") - return False - elif(arg == "-u" or arg == "--unallocated"): - print("Ignoring unallocated space.\n") - self.unallocated = True - elif(arg == "-k" or arg == "--keep"): - print("Keeping the Solr index.\n") - self.keep = True - elif(arg == "-v" or arg == "--verbose"): - print("Running in verbose mode:") - print("Printing all thrown exceptions.\n") - self.verbose = True - elif(arg == "-e" or arg == "--exception"): - try: - arg = sys.argv.pop(0) - nxtproc.append(arg) - print("Running in exception mode: ") - print("Printing all exceptions with the string '" + arg + "'\n") - self.exception = True - self.exception_string = arg - except: - print("Error: No exception string given.") - elif arg == "-h" or arg == "--help": - print(usage()) - return False - elif arg == "-fr" or arg == "--forcerun": - print("Not downloading new images") - self.fr = True - elif arg == "-e" or arg == "-email": - self.email_enabled = True - else: - print(usage()) - return False - # Return the args were sucessfully parsed - return self._sanity_check() - - def _sanity_check(self): - """Check to make sure there are no conflicting arguments and the - specified files exist. - - Returns: - False if there are conflicting arguments or a specified file does - not exist, True otherwise - """ - if self.single and self.list: - print("Cannot run both from config file and on a single file.") - return False - if self.list: - if not file_exists(self.config_file): - print("Configuration file does not exist at:", - self.config_file) - return False - elif self.single: - if not file_exists(self.single_file): - msg = "Image file does not exist at: " + self.single_file - return False - if (not self.single) and (not self.ignore) and (not self.list): - self.config_file = "config.xml" - if not file_exists(self.config_file): - msg = "Configuration file does not exist at: " + self.config_file - return False - - return True - -#### -# Helper Functions -#### -def search_logs(string, test_data): - """Search through all the known log files for a given string. - - Args: - string: the String to search for. - test_data: the TestData that holds the logs to search. - - Returns: - a listof_String, the lines that contained the given String. - """ - logs_path = test_data.logs_dir - results = [] - for file in os.listdir(logs_path): - log = codecs.open(make_path(logs_path, file), "r", "utf_8") - for line in log: - if string in line: - results.append(line) - log.close() - return results - -def search_log(log, string, test_data): - """Search the given log for any instances of a given string. - - Args: - log: a pathto_File, the log to search in - string: the String to search for. - test_data: the TestData that holds the log to search. - - Returns: - a listof_String, all the lines that the string is found on - """ - logs_path = make_path(test_data.logs_dir, log) - try: - results = [] - log = codecs.open(logs_path, "r", "utf_8") - for line in log: - if string in line: - results.append(line) - log.close() - if results: - return results - except: - raise FileNotFoundException(logs_path) - -# Search through all the the logs of the given type -# Types include autopsy, tika, and solr -def search_log_set(type, string, test_data): - """Search through all logs to the given type for the given string. - - Args: - type: the type of log to search in. - string: the String to search for. - test_data: the TestData containing the logs to search. - - Returns: - a listof_String, the lines on which the String was found. - """ - logs_path = test_data.logs_dir - results = [] - for file in os.listdir(logs_path): - if type in file: - log = codecs.open(make_path(logs_path, file), "r", "utf_8") - for line in log: - if string in line: - results.append(line) - log.close() - return results - - -def clear_dir(dir): - """Clears all files from a directory and remakes it. - - Args: - dir: a pathto_Dir, the directory to clear - """ - try: - if dir_exists(dir): - shutil.rmtree(dir) - os.makedirs(dir) - return True; - except OSError as e: - printerror(test_data,"Error: Cannot clear the given directory:") - printerror(test_data,dir + "\n") - print(str(e)) - return False; - -def del_dir(dir): - """Delete the given directory. - - Args: - dir: a pathto_Dir, the directory to delete - """ - try: - if dir_exists(dir): - shutil.rmtree(dir) - return True; - except: - printerror(test_data,"Error: Cannot delete the given directory:") - printerror(test_data,dir + "\n") - return False; - -def get_file_in_dir(dir, ext): - """Returns the first file in the given directory with the given extension. - - Args: - dir: a pathto_Dir, the directory to search - ext: a String, the extension to search for - - Returns: - pathto_File, the file that was found - """ - try: - for file in os.listdir(dir): - if file.endswith(ext): - return make_path(dir, file) - # If nothing has been found, raise an exception - raise FileNotFoundException(dir) - except: - raise DirNotFoundException(dir) - -def find_file_in_dir(dir, name, ext): - """Find the file with the given name in the given directory. - - Args: - dir: a pathto_Dir, the directory to search - name: a String, the basename of the file to search for - ext: a String, the extension of the file to search for - """ - try: - for file in os.listdir(dir): - if file.startswith(name): - if file.endswith(ext): - return make_path(dir, file) - raise FileNotFoundException(dir) - except: - raise DirNotFoundException(dir) - - -class OS: - LINUX, MAC, WIN, CYGWIN = range(4) - - -if __name__ == "__main__": - global SYS - if _platform == "linux" or _platform == "linux2": - SYS = OS.LINUX - elif _platform == "darwin": - SYS = OS.MAC - elif _platform == "win32": - SYS = OS.WIN - elif _platform == "cygwin": - SYS = OS.CYGWIN - - if SYS is OS.WIN or SYS is OS.CYGWIN: - main() - else: - print("We only support Windows and Cygwin at this time.") +#!/usr/bin/python +# -*- coding: utf_8 -*- + + # Autopsy Forensic Browser + # + # Copyright 2013 Basis Technology Corp. + # + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. +from tskdbdiff import TskDbDiff, TskDbDiffException +import codecs +import datetime +import logging +import os +import re +import shutil +import socket +import sqlite3 +import subprocess +import sys +from sys import platform as _platform +import time +import traceback +import xml +from time import localtime, strftime +from xml.dom.minidom import parse, parseString +import smtplib +from email.mime.image import MIMEImage +from email.mime.multipart import MIMEMultipart +from email.mime.text import MIMEText +import re +import zipfile +import zlib +import Emailer +import srcupdater +from regression_utils import * + +# +# Please read me... +# +# This is the regression testing Python script. +# It uses an ant command to run build.xml for RegressionTest.java +# +# The code is cleanly sectioned and commented. +# Please follow the current formatting. +# It is a long and potentially confusing script. +# +# Variable, function, and class names are written in Python conventions: +# this_is_a_variable this_is_a_function() ThisIsAClass +# +# + + +# Data Definitions: +# +# pathto_X: A path to type X. +# ConfigFile: An XML file formatted according to the template in myconfig.xml +# ParsedConfig: A dom object that represents a ConfigFile +# SQLCursor: A cursor recieved from a connection to an SQL database +# Nat: A Natural Number +# Image: An image +# + +# Enumeration of database types used for the simplification of generating database paths +DBType = enum('OUTPUT', 'GOLD', 'BACKUP') + +# Common filename of the output and gold databases (although they are in different directories +DB_FILENAME = "autopsy.db" + +# Backup database filename +BACKUP_DB_FILENAME = "autopsy_backup.db" + +# TODO: Double check this purpose statement +# Folder name for gold standard database testing +AUTOPSY_TEST_CASE = "AutopsyTestCase" + +# TODO: Double check this purpose statement +# The filename of the log to store error messages +COMMON_LOG = "AutopsyErrors.txt" + +Day = 0 + +#----------------------# +# Main # +#----------------------# +def main(): + """Parse the command-line arguments, create the configuration, and run the tests.""" + args = Args() + parse_result = args.parse() + test_config = TestConfiguration(args) + # The arguments were given wrong: + if not parse_result: + return + if(not args.fr): + antin = ["ant"] + antin.append("-f") + antin.append(os.path.join("..","..","build.xml")) + antin.append("test-download-imgs") + if SYS is OS.CYGWIN: + subprocess.call(antin) + elif SYS is OS.WIN: + theproc = subprocess.Popen(antin, shell = True, stdout=subprocess.PIPE) + theproc.communicate() + # Otherwise test away! + TestRunner.run_tests(test_config) + + +class TestRunner(object): + """A collection of functions to run the regression tests.""" + + def run_tests(test_config): + """Run the tests specified by the main TestConfiguration. + + Executes the AutopsyIngest for each image and dispatches the results based on + the mode (rebuild or testing) + """ + test_data_list = [ TestData(image, test_config) for image in test_config.images ] + + Reports.html_add_images(test_config.html_log, test_config.images) + + logres =[] + for test_data in test_data_list: + Errors.clear_print_logs() + Errors.set_testing_phase(test_data.image) + if not (test_config.args.rebuild or os.path.exists(test_data.gold_archive)): + msg = "Gold standard doesn't exist, skipping image:" + Errors.print_error(msg) + Errors.print_error(test_data.gold_archive) + continue + TestRunner._run_autopsy_ingest(test_data) + + if test_config.args.rebuild: + TestRunner.rebuild(test_data) + else: + logres.append(TestRunner._run_test(test_data)) + test_data.printout = Errors.printout + test_data.printerror = Errors.printerror + + Reports.write_html_foot(test_config.html_log) + # TODO: move this elsewhere + if (len(logres)>0): + for lm in logres: + for ln in lm: + Errors.add_email_msg(ln) + + # TODO: possibly worth putting this in a sub method + if all([ test_data.overall_passed for test_data in test_data_list ]): + Errors.add_email_msg("All images passed.\n") + else: + msg = "The following images failed:\n" + for test_data in test_data_list: + if not test_data.overall_passed: + msg += "\t" + test_data.image + "\n" + Errors.add_email_msg(msg) + html = open(test_config.html_log) + Errors.add_email_attachment(html.name) + html.close() + + if test_config.email_enabled: + Emailer.send_email(test_config.mail_to, test_config.mail_server, + test_config.mail_subject, Errors.email_body, Errors.email_attachs) + + def _run_autopsy_ingest(test_data): + """Run Autopsy ingest for the image in the given TestData. + + Also generates the necessary logs for rebuilding or diff. + + Args: + test_data: the TestData to run the ingest on. + """ + if image_type(test_data.image_file) == IMGTYPE.UNKNOWN: + Errors.print_error("Error: Image type is unrecognized:") + Errors.print_error(test_data.image_file + "\n") + return + + logging.debug("--------------------") + logging.debug(test_data.image_name) + logging.debug("--------------------") + TestRunner._run_ant(test_data) + time.sleep(2) # Give everything a second to process + + try: + # Dump the database before we diff or use it for rebuild + TskDbDiff.dump_output_db(test_data.get_db_path(DBType.OUTPUT), test_data.get_db_dump_path(DBType.OUTPUT), + test_data.get_sorted_data_path(DBType.OUTPUT)) + except sqlite3.OperationalError as e: + print("Ingest did not run properly.", + "Make sure no other instances of Autopsy are open and try again.") + sys.exit() + + # merges logs into a single log for later diff / rebuild + copy_logs(test_data) + Logs.generate_log_data(test_data) + + TestRunner._handle_solr(test_data) + TestRunner._handle_exception(test_data) + + #TODO: figure out return type of _run_test (logres) + def _run_test(test_data): + """Compare the results of the output to the gold standard. + + Args: + test_data: the TestData + + Returns: + logres? + """ + TestRunner._extract_gold(test_data) + + # Look for core exceptions + # @@@ Should be moved to TestResultsDiffer, but it didn't know about logres -- need to look into that + logres = Logs.search_common_log("TskCoreException", test_data) + + TestResultsDiffer.run_diff(test_data) + test_data.overall_passed = (test_data.html_report_passed and + test_data.errors_diff_passed and test_data.db_diff_passed) + + Reports.generate_reports(test_data) + if(not test_data.overall_passed): + Errors.add_email_attachment(test_data.common_log_path) + return logres + + def _extract_gold(test_data): + """Extract gold archive file to output/gold/tmp/ + + Args: + test_data: the TestData + """ + extrctr = zipfile.ZipFile(test_data.gold_archive, 'r', compression=zipfile.ZIP_DEFLATED) + extrctr.extractall(test_data.main_config.gold) + extrctr.close + time.sleep(2) + + def _handle_solr(test_data): + """Clean up SOLR index if in keep mode (-k). + + Args: + test_data: the TestData + """ + if not test_data.main_config.args.keep: + if clear_dir(test_data.solr_index): + print_report([], "DELETE SOLR INDEX", "Solr index deleted.") + else: + print_report([], "KEEP SOLR INDEX", "Solr index has been kept.") + + def _handle_exception(test_data): + """If running in exception mode, print exceptions to log. + + Args: + test_data: the TestData + """ + if test_data.main_config.args.exception: + exceptions = search_logs(test_data.main_config.args.exception_string, test_data) + okay = ("No warnings or exceptions found containing text '" + + test_data.main_config.args.exception_string + "'.") + print_report(exceptions, "EXCEPTION", okay) + + def rebuild(test_data): + """Rebuild the gold standard with the given TestData. + + Copies the test-generated database and html report files into the gold directory. + """ + test_config = test_data.main_config + # Errors to print + errors = [] + # Delete the current gold standards + gold_dir = test_config.img_gold + clear_dir(test_config.img_gold) + tmpdir = make_path(gold_dir, test_data.image_name) + dbinpth = test_data.get_db_path(DBType.OUTPUT) + dboutpth = make_path(tmpdir, DB_FILENAME) + dataoutpth = make_path(tmpdir, test_data.image_name + "SortedData.txt") + dbdumpinpth = test_data.get_db_dump_path(DBType.OUTPUT) + dbdumpoutpth = make_path(tmpdir, test_data.image_name + "DBDump.txt") + if not os.path.exists(test_config.img_gold): + os.makedirs(test_config.img_gold) + if not os.path.exists(tmpdir): + os.makedirs(tmpdir) + try: + shutil.copy(dbinpth, dboutpth) + if file_exists(test_data.get_sorted_data_path(DBType.OUTPUT)): + shutil.copy(test_data.get_sorted_data_path(DBType.OUTPUT), dataoutpth) + shutil.copy(dbdumpinpth, dbdumpoutpth) + error_pth = make_path(tmpdir, test_data.image_name+"SortedErrors.txt") + shutil.copy(test_data.sorted_log, error_pth) + except IOError as e: + Errors.print_error(str(e)) + Errors.add_email_message("Not rebuilt properly") + print(str(e)) + print(traceback.format_exc()) + # Rebuild the HTML report + output_html_report_dir = test_data.get_html_report_path(DBType.OUTPUT) + gold_html_report_dir = make_path(tmpdir, "Report") + + try: + shutil.copytree(output_html_report_dir, gold_html_report_dir) + except OSError as e: + errors.append(e.error()) + except Exception as e: + errors.append("Error: Unknown fatal error when rebuilding the gold html report.") + errors.append(str(e) + "\n") + print(traceback.format_exc()) + oldcwd = os.getcwd() + zpdir = gold_dir + os.chdir(zpdir) + os.chdir("..") + img_gold = "tmp" + img_archive = make_path(test_data.image_name+"-archive.zip") + comprssr = zipfile.ZipFile(img_archive, 'w',compression=zipfile.ZIP_DEFLATED) + TestRunner.zipdir(img_gold, comprssr) + comprssr.close() + os.chdir(oldcwd) + del_dir(test_config.img_gold) + okay = "Sucessfully rebuilt all gold standards." + print_report(errors, "REBUILDING", okay) + + def zipdir(path, zip): + for root, dirs, files in os.walk(path): + for file in files: + zip.write(os.path.join(root, file)) + + def _run_ant(test_data): + """Construct and run the ant build command for the given TestData. + + Tests Autopsy by calling RegressionTest.java via the ant build file. + + Args: + test_data: the TestData + """ + test_config = test_data.main_config + # Set up the directories + if dir_exists(test_data.output_path): + shutil.rmtree(test_data.output_path) + os.makedirs(test_data.output_path) + test_data.ant = ["ant"] + test_data.ant.append("-v") + test_data.ant.append("-f") + # case.ant.append(case.build_path) + test_data.ant.append(os.path.join("..","..","Testing","build.xml")) + test_data.ant.append("regression-test") + test_data.ant.append("-l") + test_data.ant.append(test_data.antlog_dir) + test_data.ant.append("-Dimg_path=" + test_data.image_file) + test_data.ant.append("-Dknown_bad_path=" + test_config.known_bad_path) + test_data.ant.append("-Dkeyword_path=" + test_config.keyword_path) + test_data.ant.append("-Dnsrl_path=" + test_config.nsrl_path) + test_data.ant.append("-Dgold_path=" + test_config.gold) + test_data.ant.append("-Dout_path=" + + make_local_path(test_data.output_path)) + test_data.ant.append("-Dignore_unalloc=" + "%s" % test_config.args.unallocated) + test_data.ant.append("-Dtest.timeout=" + str(test_config.timeout)) + + Errors.print_out("Ingesting Image:\n" + test_data.image_file + "\n") + Errors.print_out("CMD: " + " ".join(test_data.ant)) + Errors.print_out("Starting test...\n") + antoutpth = make_local_path(test_data.main_config.output_dir, "antRunOutput.txt") + antout = open(antoutpth, "a") + if SYS is OS.CYGWIN: + subprocess.call(test_data.ant, stdout=subprocess.PIPE) + elif SYS is OS.WIN: + theproc = subprocess.Popen(test_data.ant, shell = True, stdout=subprocess.PIPE) + theproc.communicate() + antout.close() + + +class TestData(object): + """Container for the input and output of a single image. + + Represents data for the test of a single image, including path to the image, + database paths, etc. + + Attributes: + main_config: the global TestConfiguration + ant: a listof_String, the ant command for this TestData + image_file: a pathto_Image, the image for this TestData + image: a String, the image file's name + image_name: a String, the image file's name with a trailing (0) + output_path: pathto_Dir, the output directory for this TestData + autopsy_data_file: a pathto_File, the IMAGE_NAMEAutopsy_data.txt file + warning_log: a pathto_File, the AutopsyLogs.txt file + antlog_dir: a pathto_File, the antlog.txt file + test_dbdump: a pathto_File, the database dump, IMAGENAMEDump.txt + common_log_path: a pathto_File, the IMAGE_NAMECOMMON_LOG file + sorted_log: a pathto_File, the IMAGENAMESortedErrors.txt file + reports_dir: a pathto_Dir, the AutopsyTestCase/Reports folder + gold_data_dir: a pathto_Dir, the gold standard directory + gold_archive: a pathto_File, the gold standard archive + logs_dir: a pathto_Dir, the location where autopsy logs are stored + solr_index: a pathto_Dir, the locatino of the solr index + html_report_passed: a boolean, did the HTML report diff pass? + errors_diff_passed: a boolean, did the error diff pass? + db_diff_passed: a boolean, did the db diff pass? + overall_passed: a boolean, did the test pass? + total_test_time: a String representation of the test duration + start_date: a String representation of this TestData's start date + end_date: a String representation of the TestData's end date + total_ingest_time: a String representation of the total ingest time + artifact_count: a Nat, the number of artifacts + artifact_fail: a Nat, the number of artifact failures + heap_space: a String representation of TODO + service_times: a String representation of TODO + autopsy_version: a String, the version of autopsy that was run + ingest_messages: a Nat, the number of ingest messages + indexed_files: a Nat, the number of files indexed during the ingest + indexed_chunks: a Nat, the number of chunks indexed during the ingest + printerror: a listof_String, the error messages printed during this TestData's test + printout: a listof_String, the messages pritned during this TestData's test + """ + + def __init__(self, image, main_config): + """Init this TestData with it's image and the test configuration. + + Args: + image: the Image to be tested. + main_config: the global TestConfiguration. + """ + # Configuration Data + self.main_config = main_config + self.ant = [] + self.image_file = str(image) + # TODO: This 0 should be be refactored out, but it will require rebuilding and changing of outputs. + self.image = get_image_name(self.image_file) + self.image_name = self.image + "(0)" + # Directory structure and files + self.output_path = make_path(self.main_config.output_dir, self.image_name) + self.autopsy_data_file = make_path(self.output_path, self.image_name + "Autopsy_data.txt") + self.warning_log = make_local_path(self.output_path, "AutopsyLogs.txt") + self.antlog_dir = make_local_path(self.output_path, "antlog.txt") + self.test_dbdump = make_path(self.output_path, self.image_name + + "DBDump.txt") + self.common_log_path = make_local_path(self.output_path, self.image_name + COMMON_LOG) + self.sorted_log = make_local_path(self.output_path, self.image_name + "SortedErrors.txt") + self.reports_dir = make_path(self.output_path, AUTOPSY_TEST_CASE, "Reports") + self.gold_data_dir = make_path(self.main_config.img_gold, self.image_name) + self.gold_archive = make_path(self.main_config.gold, + self.image_name + "-archive.zip") + self.logs_dir = make_path(self.output_path, "logs") + self.solr_index = make_path(self.output_path, AUTOPSY_TEST_CASE, + "ModuleOutput", "KeywordSearch") + # Results and Info + self.html_report_passed = False + self.errors_diff_passed = False + self.db_diff_passed = False + self.overall_passed = False + # Ingest info + self.total_test_time = "" + self.start_date = "" + self.end_date = "" + self.total_ingest_time = "" + self.artifact_count = 0 + self.artifact_fail = 0 + self.heap_space = "" + self.service_times = "" + self.autopsy_version = "" + self.ingest_messages = 0 + self.indexed_files = 0 + self.indexed_chunks = 0 + # Error tracking + self.printerror = [] + self.printout = [] + + def ant_to_string(self): + string = "" + for arg in self.ant: + string += (arg + " ") + return string + + def get_db_path(self, db_type): + """Get the path to the database file that corresponds to the given DBType. + + Args: + DBType: the DBType of the path to be generated. + """ + if(db_type == DBType.GOLD): + db_path = make_path(self.gold_data_dir, DB_FILENAME) + elif(db_type == DBType.OUTPUT): + db_path = make_path(self.main_config.output_dir, self.image_name, AUTOPSY_TEST_CASE, DB_FILENAME) + else: + db_path = make_path(self.main_config.output_dir, self.image_name, AUTOPSY_TEST_CASE, BACKUP_DB_FILENAME) + return db_path + + def get_html_report_path(self, html_type): + """Get the path to the HTML Report folder that corresponds to the given DBType. + + Args: + DBType: the DBType of the path to be generated. + """ + if(html_type == DBType.GOLD): + return make_path(self.gold_data_dir, "Report") + else: + # Autopsy creates an HTML report folder in the form AutopsyTestCase DATE-TIME + # It's impossible to get the exact time the folder was created, but the folder + # we are looking for is the only one in the self.reports_dir folder + html_path = "" + for fs in os.listdir(self.reports_dir): + html_path = make_path(self.reports_dir, fs) + if os.path.isdir(html_path): + break + return make_path(html_path, os.listdir(html_path)[0]) + + def get_sorted_data_path(self, file_type): + """Get the path to the SortedData file that corresponds to the given DBType. + + Args: + file_type: the DBType of the path to be generated + """ + return self._get_path_to_file(file_type, "SortedData.txt") + + def get_sorted_errors_path(self, file_type): + """Get the path to the SortedErrors file that correspodns to the given + DBType. + + Args: + file_type: the DBType of the path to be generated + """ + return self._get_path_to_file(file_type, "SortedErrors.txt") + + def get_db_dump_path(self, file_type): + """Get the path to the DBDump file that corresponds to the given DBType. + + Args: + file_type: the DBType of the path to be generated + """ + return self._get_path_to_file(file_type, "DBDump.txt") + + def _get_path_to_file(self, file_type, file_name): + """Get the path to the specified file with the specified type. + + Args: + file_type: the DBType of the path to be generated + file_name: a String, the filename of the path to be generated + """ + full_filename = self.image_name + file_name + if(file_type == DBType.GOLD): + return make_path(self.gold_data_dir, full_filename) + else: + return make_path(self.output_path, full_filename) + + +class TestConfiguration(object): + """Container for test configuration data. + + The Master Test Configuration. Encapsulates consolidated high level input from + config XML file and command-line arguments. + + Attributes: + args: an Args, the command line arguments + output_dir: a pathto_Dir, the output directory + input_dir: a pathto_Dir, the input directory + gold: a pathto_Dir, the gold directory + img_gold: a pathto_Dir, the temp directory where gold images are unzipped to + csv: a pathto_File, the local csv file + global_csv: a pathto_File, the global csv file + html_log: a pathto_File + known_bad_path: + keyword_path: + nsrl_path: + build_path: a pathto_File, the ant build file which runs the tests + autopsy_version: + ingest_messages: a Nat, number of ingest messages + indexed_files: a Nat, the number of indexed files + indexed_chunks: a Nat, the number of indexed chunks + timer: + images: a listof_Image, the images to be tested + timeout: a Nat, the amount of time before killing the test + ant: a listof_String, the ant command to run the tests + """ + + def __init__(self, args): + """Inits TestConfiguration and loads a config file if available. + + Args: + args: an Args, the command line arguments. + """ + self.args = args + # Paths: + self.output_dir = "" + self.input_dir = make_local_path("..","input") + self.gold = make_path("..", "output", "gold") + self.img_gold = make_path(self.gold, 'tmp') + # Logs: + self.csv = "" + self.global_csv = "" + self.html_log = "" + # Ant info: + self.known_bad_path = make_path(self.input_dir, "notablehashes.txt-md5.idx") + self.keyword_path = make_path(self.input_dir, "notablekeywords.xml") + self.nsrl_path = make_path(self.input_dir, "nsrl.txt-md5.idx") + self.build_path = make_path("..", "build.xml") + # Infinite Testing info + timer = 0 + self.images = [] + # Email info + self.email_enabled = args.email_enabled + self.mail_server = "" + self.mail_to = "" + self.mail_subject = "" + # Set the timeout to something huge + # The entire tester should not timeout before this number in ms + # However it only seems to take about half this time + # And it's very buggy, so we're being careful + self.timeout = 24 * 60 * 60 * 1000 * 1000 + + if not self.args.single: + self._load_config_file(self.args.config_file) + else: + self.images.append(self.args.single_file) + self._init_logs() + #self._init_imgs() + #self._init_build_info() + + + def _load_config_file(self, config_file): + """Updates this TestConfiguration's attributes from the config file. + + Initializes this TestConfiguration by iterating through the XML config file + command-line argument. Populates self.images and optional email configuration + + Args: + config_file: ConfigFile - the configuration file to load + """ + try: + count = 0 + parsed_config = parse(config_file) + logres = [] + counts = {} + if parsed_config.getElementsByTagName("indir"): + self.input_dir = parsed_config.getElementsByTagName("indir")[0].getAttribute("value").encode().decode("utf_8") + if parsed_config.getElementsByTagName("global_csv"): + self.global_csv = parsed_config.getElementsByTagName("global_csv")[0].getAttribute("value").encode().decode("utf_8") + self.global_csv = make_local_path(self.global_csv) + if parsed_config.getElementsByTagName("golddir"): + self.gold = parsed_config.getElementsByTagName("golddir")[0].getAttribute("value").encode().decode("utf_8") + self.img_gold = make_path(self.gold, 'tmp') + + self._init_imgs(parsed_config) + self._init_build_info(parsed_config) + self._init_email_info(parsed_config) + + except IOError as e: + msg = "There was an error loading the configuration file.\n" + msg += "\t" + str(e) + Errors.add_email_msg(msg) + logging.critical(traceback.format_exc()) + print(traceback.format_exc()) + + def _init_logs(self): + """Setup output folder, logs, and reporting infrastructure.""" + if(not dir_exists(make_path("..", "output", "results"))): + os.makedirs(make_path("..", "output", "results",)) + self.output_dir = make_path("..", "output", "results", time.strftime("%Y.%m.%d-%H.%M.%S")) + os.makedirs(self.output_dir) + self.csv = make_local_path(self.output_dir, "CSV.txt") + self.html_log = make_path(self.output_dir, "AutopsyTestCase.html") + log_name = self.output_dir + "\\regression.log" + logging.basicConfig(filename=log_name, level=logging.DEBUG) + + def _init_build_info(self, parsed_config): + """Initializes paths that point to information necessary to run the AutopsyIngest.""" + build_elements = parsed_config.getElementsByTagName("build") + if build_elements: + build_element = build_elements[0] + build_path = build_element.getAttribute("value").encode().decode("utf_8") + self.build_path = build_path + + def _init_imgs(self, parsed_config): + """Initialize the list of images to run tests on.""" + for element in parsed_config.getElementsByTagName("image"): + value = element.getAttribute("value").encode().decode("utf_8") + print ("Image in Config File: " + value) + if file_exists(value): + self.images.append(value) + else: + msg = "File: " + value + " doesn't exist" + Errors.print_error(msg) + Errors.add_email_msg(msg) + image_count = len(self.images) + + # Sanity check to see if there are obvious gold images that we are not testing + gold_count = 0 + for file in os.listdir(self.gold): + if not(file == 'tmp'): + gold_count+=1 + + if (image_count > gold_count): + print("******Alert: There are more input images than gold standards, some images will not be properly tested.\n") + elif (image_count < gold_count): + print("******Alert: There are more gold standards than input images, this will not check all gold Standards.\n") + + def _init_email_info(self, parsed_config): + """Initializes email information dictionary""" + email_elements = parsed_config.getElementsByTagName("email") + if email_elements: + mail_to = email_elements[0] + self.mail_to = mail_to.getAttribute("value").encode().decode("utf_8") + mail_server_elements = parsed_config.getElementsByTagName("mail_server") + if mail_server_elements: + mail_from = mail_server_elements[0] + self.mail_server = mail_from.getAttribute("value").encode().decode("utf_8") + subject_elements = parsed_config.getElementsByTagName("subject") + if subject_elements: + subject = subject_elements[0] + self.mail_subject = subject.getAttribute("value").encode().decode("utf_8") + if self.mail_server and self.mail_to and self.args.email_enabled: + self.email_enabled = True + print("Email will be sent to ", self.mail_to) + else: + print("No email will be sent.") + + +#-------------------------------------------------# +# Functions relating to comparing outputs # +#-------------------------------------------------# +class TestResultsDiffer(object): + """Compares results for a single test.""" + + def run_diff(test_data): + """Compares results for a single test. + + Args: + test_data: the TestData to use. + databaseDiff: TskDbDiff object created based off test_data + """ + try: + output_db = test_data.get_db_path(DBType.OUTPUT) + gold_db = test_data.get_db_path(DBType.GOLD) + output_dir = test_data.output_path + gold_bb_dump = test_data.get_sorted_data_path(DBType.GOLD) + gold_dump = test_data.get_db_dump_path(DBType.GOLD) + test_data.db_diff_pass = all(TskDbDiff(output_db, gold_db, output_dir=output_dir, gold_bb_dump=gold_bb_dump, + gold_dump=gold_dump).run_diff()) + + # Compare Exceptions + # replace is a fucntion that replaces strings of digits with 'd' + # this is needed so dates and times will not cause the diff to fail + replace = lambda file: re.sub(re.compile("\d"), "d", file) + output_errors = test_data.get_sorted_errors_path(DBType.OUTPUT) + gold_errors = test_data.get_sorted_errors_path(DBType.GOLD) + passed = TestResultsDiffer._compare_text(output_errors, gold_errors, + replace) + test_data.errors_diff_passed = passed + + # Compare html output + gold_report_path = test_data.get_html_report_path(DBType.GOLD) + output_report_path = test_data.get_html_report_path(DBType.OUTPUT) + passed = TestResultsDiffer._html_report_diff(gold_report_path, + output_report_path) + test_data.html_report_passed = passed + + # Clean up tmp folder + del_dir(test_data.gold_data_dir) + + except sqlite3.OperationalError as e: + Errors.print_error("Tests failed while running the diff:\n") + Errors.print_error(str(e)) + except TskDbDiffException as e: + Errors.print_error(str(e)) + except Exception as e: + Errors.print_error("Tests failed due to an error, try rebuilding or creating gold standards.\n") + Errors.print_error(str(e) + "\n") + print(traceback.format_exc()) + + def _compare_text(output_file, gold_file, process=None): + """Compare two text files. + + Args: + output_file: a pathto_File, the output text file + gold_file: a pathto_File, the input text file + pre-process: (optional) a function of String -> String that will be + called on each input file before the diff, if specified. + """ + if(not file_exists(output_file)): + return False + output_data = codecs.open(output_file, "r", "utf_8").read() + gold_data = codecs.open(gold_file, "r", "utf_8").read() + + if process is not None: + output_data = process(output_data) + gold_data = process(gold_data) + + if (not(gold_data == output_data)): + diff_path = os.path.splitext(os.path.basename(output_file))[0] + diff_path += "-Diff.txt" + diff_file = codecs.open(diff_path, "wb", "utf_8") + dffcmdlst = ["diff", output_file, gold_file] + subprocess.call(dffcmdlst, stdout = diff_file) + Errors.add_email_attachment(diff_path) + msg = "There was a difference in " + msg += os.path.basename(output_file) + ".\n" + Errors.add_email_msg(msg) + Errors.print_error(msg) + return False + else: + return True + + def _html_report_diff(gold_report_path, output_report_path): + """Compare the output and gold html reports. + + Args: + gold_report_path: a pathto_Dir, the gold HTML report directory + output_report_path: a pathto_Dir, the output HTML report directory + + Returns: + true, if the reports match, false otherwise. + """ + try: + gold_html_files = get_files_by_ext(gold_report_path, ".html") + output_html_files = get_files_by_ext(output_report_path, ".html") + + #ensure both reports have the same number of files and are in the same order + if(len(gold_html_files) != len(output_html_files)): + msg = "The reports did not have the same number or files." + msg += "One of the reports may have been corrupted." + Errors.print_error(msg) + else: + gold_html_files.sort() + output_html_files.sort() + + total = {"Gold": 0, "New": 0} + for gold, output in zip(gold_html_files, output_html_files): + count = TestResultsDiffer._compare_report_files(gold, output) + total["Gold"] += count[0] + total["New"] += count[1] + + okay = "The test report matches the gold report." + errors=["Gold report had " + str(total["Gold"]) +" errors", "New report had " + str(total["New"]) + " errors."] + print_report(errors, "REPORT COMPARISON", okay) + + if total["Gold"] == total["New"]: + return True + else: + Errors.print_error("The reports did not match each other.\n " + errors[0] +" and the " + errors[1]) + return False + except OSError as e: + e.print_error() + return False + except Exception as e: + Errors.print_error("Error: Unknown fatal error comparing reports.") + Errors.print_error(str(e) + "\n") + logging.critical(traceback.format_exc()) + return False + + def _compare_report_files(a_path, b_path): + """Compares the two specified report html files. + + Args: + a_path: a pathto_File, the first html report file + b_path: a pathto_File, the second html report file + + Returns: + a tuple of (Nat, Nat), which represent the length of each + unordered list in the html report files, or (0, 0) if the + lenghts are the same. + """ + a_file = open(a_path) + b_file = open(b_path) + a = a_file.read() + b = b_file.read() + a = a[a.find("
            "):] + b = b[b.find("
              "):] + + a_list = TestResultsDiffer._split(a, 50) + b_list = TestResultsDiffer._split(b, 50) + if not len(a_list) == len(b_list): + ex = (len(a_list), len(b_list)) + return ex + else: + return (0, 0) + + # Split a string into an array of string of the given size + def _split(input, size): + return [input[start:start+size] for start in range(0, len(input), size)] + + +class Reports(object): + def generate_reports(test_data): + """Generate the reports for a single test + + Args: + test_data: the TestData + """ + Reports._generate_html(test_data) + if test_data.main_config.global_csv: + Reports._generate_csv(test_data.main_config.global_csv, test_data) + else: + Reports._generate_csv(test_data.main_config.csv, test_data) + + def _generate_html(test_data): + """Generate the HTML log file.""" + # If the file doesn't exist yet, this is the first test_config to run for + # this test, so we need to make the start of the html log + html_log = test_data.main_config.html_log + if not file_exists(html_log): + Reports.write_html_head() + with open(html_log, "a") as html: + # The image title + title = "

              " + test_data.image_name + " \ + tested on " + socket.gethostname() + "

              \ +

              \ + Errors and Warnings |\ + Information |\ + General Output |\ + Logs\ +

              " + # The script errors found + if not test_data.overall_passed: + ids = 'errors1' + else: + ids = 'errors' + errors = "
              \ +

              Errors and Warnings

              \ +
              " + # For each error we have logged in the test_config + for error in test_data.printerror: + # Replace < and > to avoid any html display errors + errors += "

              " + error.replace("<", "<").replace(">", ">") + "

              " + # If there is a \n, we probably want a
              in the html + if "\n" in error: + errors += "
              " + errors += "
              " + + # Links to the logs + logs = "
              \ +

              Logs

              \ +
              " + logs_path = test_data.logs_dir + for file in os.listdir(logs_path): + logs += "

              " + file + "

              " + logs += "
              " + + # All the testing information + info = "
              \ +

              Information

              \ +
              \ + " + # The individual elements + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" + info += "" +# info += "" +# info += "" +# info += "" +# info += "" +# info += "" +# info += "" + info += "
              Image Path:" + test_data.image_file + "
              Image Name:" + test_data.image_name + "
              test_config Output Directory:" + test_data.main_config.output_dir + "
              Autopsy Version:" + test_data.autopsy_version + "
              Heap Space:" + test_data.heap_space + "
              Test Start Date:" + test_data.start_date + "
              Test End Date:" + test_data.end_date + "
              Total Test Time:" + test_data.total_test_time + "
              Total Ingest Time:" + test_data.total_ingest_time + "
              Exceptions Count:" + str(len(get_exceptions(test_data))) + "
              Autopsy OutOfMemoryExceptions:" + str(len(search_logs("OutOfMemoryException", test_data))) + "
              Autopsy OutOfMemoryErrors:" + str(len(search_logs("OutOfMemoryError", test_data))) + "
              Tika OutOfMemoryErrors/Exceptions:" + str(Reports._get_num_memory_errors("tika", test_data)) + "
              Solr OutOfMemoryErrors/Exceptions:" + str(Reports._get_num_memory_errors("solr", test_data)) + "
              TskCoreExceptions:" + str(len(search_log_set("autopsy", "TskCoreException", test_data))) + "
              TskDataExceptions:" + str(len(search_log_set("autopsy", "TskDataException", test_data))) + "
              Ingest Messages Count:" + str(test_data.ingest_messages) + "
              Indexed Files Count:" + str(test_data.indexed_files) + "
              Indexed File Chunks Count:" + str(test_data.indexed_chunks) + "
              Out Of Disk Space:\ +

              (will skew other test results)

              " + str(len(search_log_set("autopsy", "Stopping ingest due to low disk space on disk", test_data))) + "
              TSK Objects Count:" + str(test_data.db_diff_results.output_objs) + "
              Artifacts Count:" + str(test_data.db_diff_results.output_artifacts)+ "
              Attributes Count:" + str(test_data.db_diff_results.output_attrs) + "
              \ +
              " + # For all the general print statements in the test_config + output = "
              \ +

              General Output

              \ +
              " + # For each printout in the test_config's list + for out in test_data.printout: + output += "

              " + out + "

              " + # If there was a \n it probably means we want a
              in the html + if "\n" in out: + output += "
              " + output += "
              " + + html.write(title) + html.write(errors) + html.write(info) + html.write(logs) + html.write(output) + + def write_html_head(html_log): + """Write the top of the HTML log file. + + Args: + html_log: a pathto_File, the global HTML log + """ + with open(str(html_log), "a") as html: + head = "\ + \ + AutopsyTesttest_config Output\ + \ + \ + " + html.write(head) + + def write_html_foot(html_log): + """Write the bottom of the HTML log file. + + Args: + html_log: a pathto_File, the global HTML log + """ + with open(html_log, "a") as html: + head = "" + html.write(head) + + def html_add_images(html_log, full_image_names): + """Add all the image names to the HTML log. + + Args: + full_image_names: a listof_String, each representing an image name + html_log: a pathto_File, the global HTML log + """ + # If the file doesn't exist yet, this is the first test_config to run for + # this test, so we need to make the start of the html log + if not file_exists(html_log): + Reports.write_html_head(html_log) + with open(html_log, "a") as html: + links = [] + for full_name in full_image_names: + name = get_image_name(full_name) + links.append("" + name + "") + html.write("

              " + (" | ".join(links)) + "

              ") + + def _generate_csv(csv_path, test_data): + """Generate the CSV log file""" + # If the CSV file hasn't already been generated, this is the + # first run, and we need to add the column names + if not file_exists(csv_path): + Reports.csv_header(csv_path) + # Now add on the fields to a new row + with open(csv_path, "a") as csv: + # Variables that need to be written + vars = [] + vars.append( test_data.image_file ) + vars.append( test_data.image_name ) + vars.append( test_data.main_config.output_dir ) + vars.append( socket.gethostname() ) + vars.append( test_data.autopsy_version ) + vars.append( test_data.heap_space ) + vars.append( test_data.start_date ) + vars.append( test_data.end_date ) + vars.append( test_data.total_test_time ) + vars.append( test_data.total_ingest_time ) + vars.append( test_data.service_times ) + vars.append( str(len(get_exceptions(test_data))) ) + vars.append( str(Reports._get_num_memory_errors("autopsy", test_data)) ) + vars.append( str(Reports._get_num_memory_errors("tika", test_data)) ) + vars.append( str(Reports._get_num_memory_errors("solr", test_data)) ) + vars.append( str(len(search_log_set("autopsy", "TskCoreException", test_data))) ) + vars.append( str(len(search_log_set("autopsy", "TskDataException", test_data))) ) + vars.append( str(test_data.ingest_messages) ) + vars.append( str(test_data.indexed_files) ) + vars.append( str(test_data.indexed_chunks) ) + vars.append( str(len(search_log_set("autopsy", "Stopping ingest due to low disk space on disk", test_data))) ) +# vars.append( str(test_data.db_diff_results.output_objs) ) +# vars.append( str(test_data.db_diff_results.output_artifacts) ) +# vars.append( str(test_data.db_diff_results.output_objs) ) + vars.append( make_local_path("gold", test_data.image_name, DB_FILENAME) ) +# vars.append( test_data.db_diff_results.get_artifact_comparison() ) +# vars.append( test_data.db_diff_results.get_attribute_comparison() ) + vars.append( make_local_path("gold", test_data.image_name, "standard.html") ) + vars.append( str(test_data.html_report_passed) ) + vars.append( test_data.ant_to_string() ) + # Join it together with a ", " + output = "|".join(vars) + output += "\n" + # Write to the log! + csv.write(output) + + def csv_header(csv_path): + """Generate the CSV column names.""" + with open(csv_path, "w") as csv: + titles = [] + titles.append("Image Path") + titles.append("Image Name") + titles.append("Output test_config Directory") + titles.append("Host Name") + titles.append("Autopsy Version") + titles.append("Heap Space Setting") + titles.append("Test Start Date") + titles.append("Test End Date") + titles.append("Total Test Time") + titles.append("Total Ingest Time") + titles.append("Service Times") + titles.append("Autopsy Exceptions") + titles.append("Autopsy OutOfMemoryErrors/Exceptions") + titles.append("Tika OutOfMemoryErrors/Exceptions") + titles.append("Solr OutOfMemoryErrors/Exceptions") + titles.append("TskCoreExceptions") + titles.append("TskDataExceptions") + titles.append("Ingest Messages Count") + titles.append("Indexed Files Count") + titles.append("Indexed File Chunks Count") + titles.append("Out Of Disk Space") +# titles.append("Tsk Objects Count") +# titles.append("Artifacts Count") +# titles.append("Attributes Count") + titles.append("Gold Database Name") +# titles.append("Artifacts Comparison") +# titles.append("Attributes Comparison") + titles.append("Gold Report Name") + titles.append("Report Comparison") + titles.append("Ant Command Line") + output = "|".join(titles) + output += "\n" + csv.write(output) + + def _get_num_memory_errors(type, test_data): + """Get the number of OutOfMemory errors and Exceptions. + + Args: + type: a String representing the type of log to check. + test_data: the TestData to examine. + """ + return (len(search_log_set(type, "OutOfMemoryError", test_data)) + + len(search_log_set(type, "OutOfMemoryException", test_data))) + +class Logs(object): + + def generate_log_data(test_data): + """Find and handle relevent data from the Autopsy logs. + + Args: + test_data: the TestData whose logs to examine + """ + Logs._generate_common_log(test_data) + try: + Logs._fill_ingest_data(test_data) + except Exception as e: + Errors.print_error("Error: Unknown fatal error when filling test_config data.") + Errors.print_error(str(e) + "\n") + logging.critical(traceback.format_exc()) + # If running in verbose mode (-v) + if test_data.main_config.args.verbose: + errors = Logs._report_all_errors() + okay = "No warnings or errors in any log files." + print_report(errors, "VERBOSE", okay) + + def _generate_common_log(test_data): + """Generate the common log, the log of all exceptions and warnings from + each log file generated by Autopsy. + + Args: + test_data: the TestData to generate a log for + """ + try: + logs_path = test_data.logs_dir + common_log = codecs.open(test_data.common_log_path, "w", "utf_8") + warning_log = codecs.open(test_data.warning_log, "w", "utf_8") + common_log.write("--------------------------------------------------\n") + common_log.write(test_data.image_name + "\n") + common_log.write("--------------------------------------------------\n") + rep_path = make_local_path(test_data.main_config.output_dir) + rep_path = rep_path.replace("\\\\", "\\") + for file in os.listdir(logs_path): + log = codecs.open(make_path(logs_path, file), "r", "utf_8") + for line in log: + line = line.replace(rep_path, "test_data") + if line.startswith("Exception"): + common_log.write(file +": " + line) + elif line.startswith("Error"): + common_log.write(file +": " + line) + elif line.startswith("SEVERE"): + common_log.write(file +":" + line) + else: + warning_log.write(file +": " + line) + log.close() + common_log.write("\n") + common_log.close() + print(test_data.sorted_log) + srtcmdlst = ["sort", test_data.common_log_path, "-o", test_data.sorted_log] + subprocess.call(srtcmdlst) + except (OSError, IOError) as e: + Errors.print_error("Error: Unable to generate the common log.") + Errors.print_error(str(e) + "\n") + Errors.print_error(traceback.format_exc()) + logging.critical(traceback.format_exc()) + + def _fill_ingest_data(test_data): + """Fill the TestDatas variables that require the log files. + + Args: + test_data: the TestData to modify + """ + try: + # Open autopsy.log.0 + log_path = make_path(test_data.logs_dir, "autopsy.log.0") + log = open(log_path) + + # Set the TestData start time based off the first line of autopsy.log.0 + # *** If logging time format ever changes this will break *** + test_data.start_date = log.readline().split(" org.")[0] + + # Set the test_data ending time based off the "create" time (when the file was copied) + test_data.end_date = time.ctime(os.path.getmtime(log_path)) + except IOError as e: + Errors.print_error("Error: Unable to open autopsy.log.0.") + Errors.print_error(str(e) + "\n") + logging.warning(traceback.format_exc()) + # Start date must look like: "Jul 16, 2012 12:57:53 PM" + # End date must look like: "Mon Jul 16 13:02:42 2012" + # *** If logging time format ever changes this will break *** + start = datetime.datetime.strptime(test_data.start_date, "%b %d, %Y %I:%M:%S %p") + end = datetime.datetime.strptime(test_data.end_date, "%a %b %d %H:%M:%S %Y") + test_data.total_test_time = str(end - start) + + try: + # Set Autopsy version, heap space, ingest time, and service times + + version_line = search_logs("INFO: Application name: Autopsy, version:", test_data)[0] + test_data.autopsy_version = get_word_at(version_line, 5).rstrip(",") + + test_data.heap_space = search_logs("Heap memory usage:", test_data)[0].rstrip().split(": ")[1] + + ingest_line = search_logs("Ingest (including enqueue)", test_data)[0] + test_data.total_ingest_time = get_word_at(ingest_line, 6).rstrip() + + message_line = search_log_set("autopsy", "Ingest messages count:", test_data)[0] + test_data.ingest_messages = int(message_line.rstrip().split(": ")[2]) + + files_line = search_log_set("autopsy", "Indexed files count:", test_data)[0] + test_data.indexed_files = int(files_line.rstrip().split(": ")[2]) + + chunks_line = search_log_set("autopsy", "Indexed file chunks count:", test_data)[0] + test_data.indexed_chunks = int(chunks_line.rstrip().split(": ")[2]) + except (OSError, IOError) as e: + Errors.print_error("Error: Unable to find the required information to fill test_config data.") + Errors.print_error(str(e) + "\n") + logging.critical(traceback.format_exc()) + print(traceback.format_exc()) + try: + service_lines = search_log("autopsy.log.0", "to process()", test_data) + service_list = [] + for line in service_lines: + words = line.split(" ") + # Kind of forcing our way into getting this data + # If this format changes, the tester will break + i = words.index("secs.") + times = words[i-4] + " " + times += words[i-3] + " " + times += words[i-2] + " " + times += words[i-1] + " " + times += words[i] + service_list.append(times) + test_data.service_times = "; ".join(service_list) + except (OSError, IOError) as e: + Errors.print_error("Error: Unknown fatal error when finding service times.") + Errors.print_error(str(e) + "\n") + logging.critical(traceback.format_exc()) + + def _report_all_errors(): + """Generate a list of all the errors found in the common log. + + Returns: + a listof_String, the errors found in the common log + """ + try: + return get_warnings() + get_exceptions() + except (OSError, IOError) as e: + Errors.print_error("Error: Unknown fatal error when reporting all errors.") + Errors.print_error(str(e) + "\n") + logging.warning(traceback.format_exc()) + + def search_common_log(string, test_data): + """Search the common log for any instances of a given string. + + Args: + string: the String to search for. + test_data: the TestData that holds the log to search. + + Returns: + a listof_String, all the lines that the string is found on + """ + results = [] + log = codecs.open(test_data.common_log_path, "r", "utf_8") + for line in log: + if string in line: + results.append(line) + log.close() + return results + + +def print_report(errors, name, okay): + """Print a report with the specified information. + + Args: + errors: a listof_String, the errors to report. + name: a String, the name of the report. + okay: the String to print when there are no errors. + """ + if errors: + Errors.print_error("--------< " + name + " >----------") + for error in errors: + Errors.print_error(str(error)) + Errors.print_error("--------< / " + name + " >--------\n") + else: + Errors.print_out("-----------------------------------------------------------------") + Errors.print_out("< " + name + " - " + okay + " />") + Errors.print_out("-----------------------------------------------------------------\n") + + +def get_exceptions(test_data): + """Get a list of the exceptions in the autopsy logs. + + Args: + test_data: the TestData to use to find the exceptions. + Returns: + a listof_String, the exceptions found in the logs. + """ + exceptions = [] + logs_path = test_data.logs_dir + results = [] + for file in os.listdir(logs_path): + if "autopsy.log" in file: + log = codecs.open(make_path(logs_path, file), "r", "utf_8") + ex = re.compile("\SException") + er = re.compile("\SError") + for line in log: + if ex.search(line) or er.search(line): + exceptions.append(line) + log.close() + return exceptions + +def get_warnings(test_data): + """Get a list of the warnings listed in the common log. + + Args: + test_data: the TestData to use to find the warnings + + Returns: + listof_String, the warnings found. + """ + warnings = [] + common_log = codecs.open(test_data.warning_log, "r", "utf_8") + for line in common_log: + if "warning" in line.lower(): + warnings.append(line) + common_log.close() + return warnings + +def copy_logs(test_data): + """Copy the Autopsy generated logs to output directory. + + Args: + test_data: the TestData whose logs will be copied + """ + try: + log_dir = os.path.join("..", "..", "Testing","build","test","qa-functional","work","userdir0","var","log") + shutil.copytree(log_dir, test_data.logs_dir) + except OSError as e: + printerror(test_data,"Error: Failed to copy the logs.") + printerror(test_data,str(e) + "\n") + logging.warning(traceback.format_exc()) + +def setDay(): + global Day + Day = int(strftime("%d", localtime())) + +def getLastDay(): + return Day + +def getDay(): + return int(strftime("%d", localtime())) + +def newDay(): + return getLastDay() != getDay() + +#------------------------------------------------------------# +# Exception classes to manage "acceptable" thrown exceptions # +# versus unexpected and fatal exceptions # +#------------------------------------------------------------# + +class FileNotFoundException(Exception): + """ + If a file cannot be found by one of the helper functions, + they will throw a FileNotFoundException unless the purpose + is to return False. + """ + def __init__(self, file): + self.file = file + self.strerror = "FileNotFoundException: " + file + + def print_error(self): + Errors.print_error("Error: File could not be found at:") + Errors.print_error(self.file + "\n") + + def error(self): + error = "Error: File could not be found at:\n" + self.file + "\n" + return error + +class DirNotFoundException(Exception): + """ + If a directory cannot be found by a helper function, + it will throw this exception + """ + def __init__(self, dir): + self.dir = dir + self.strerror = "DirNotFoundException: " + dir + + def print_error(self): + Errors.print_error("Error: Directory could not be found at:") + Errors.print_error(self.dir + "\n") + + def error(self): + error = "Error: Directory could not be found at:\n" + self.dir + "\n" + return error + + +class Errors: + """A class used to manage error reporting. + + Attributes: + printout: a listof_String, the non-error messages that were printed + printerror: a listof_String, the error messages that were printed + email_body: a String, the body of the report email + email_msg_prefix: a String, the prefix for lines added to the email + email_attchs: a listof_pathto_File, the files to be attached to the + report email + """ + printout = [] + printerror = [] + email_body = "" + email_msg_prefix = "Configuration" + email_attachs = [] + + def set_testing_phase(image_name): + """Change the email message prefix to be the given testing phase. + + Args: + image_name: a String, representing the current image being tested + """ + Errors.email_msg_prefix = image_name + + def print_out(msg): + """Print out an informational message. + + Args: + msg: a String, the message to be printed + """ + print(msg) + Errors.printout.append(msg) + + def print_error(msg): + """Print out an error message. + + Args: + msg: a String, the error message to be printed. + """ + print(msg) + Errors.printerror.append(msg) + + def clear_print_logs(): + """Reset the image-specific attributes of the Errors class.""" + Errors.printout = [] + Errors.printerror = [] + + def add_email_msg(msg): + """Add the given message to the body of the report email. + + Args: + msg: a String, the message to be added to the email + """ + Errors.email_body += Errors.email_msg_prefix + ":" + msg + + def add_email_attachment(path): + """Add the given file to be an attachment for the report email + + Args: + file: a pathto_File, the file to add + """ + Errors.email_attachs.append(path) + + +class DiffResults(object): + """Container for the results of the database diff tests. + + Stores artifact, object, and attribute counts and comparisons generated by + TskDbDiff. + + Attributes: + gold_attrs: a Nat, the number of gold attributes + output_attrs: a Nat, the number of output attributes + gold_objs: a Nat, the number of gold objects + output_objs: a Nat, the number of output objects + artifact_comp: a listof_String, describing the differences + attribute_comp: a listof_String, describing the differences + passed: a boolean, did the diff pass? + """ + def __init__(self, tsk_diff): + """Inits a DiffResults + + Args: + tsk_diff: a TskDBDiff + """ + self.gold_attrs = tsk_diff.gold_attributes + self.output_attrs = tsk_diff.autopsy_attributes + self.gold_objs = tsk_diff.gold_objects + self.output_objs = tsk_diff.autopsy_objects + self.artifact_comp = tsk_diff.artifact_comparison + self.attribute_comp = tsk_diff.attribute_comparison + self.gold_artifacts = len(tsk_diff.gold_artifacts) + self.output_artifacts = len(tsk_diff.autopsy_artifacts) + self.passed = tsk_diff.passed + + def get_artifact_comparison(self): + if not self.artifact_comp: + return "All counts matched" + else: + return "; ".join(self.artifact_comp) + + def get_attribute_comparison(self): + if not self.attribute_comp: + return "All counts matched" + list = [] + for error in self.attribute_comp: + list.append(error) + return ";".join(list) + + +#-------------------------------------------------------------# +# Parses argv and stores booleans to match command line input # +#-------------------------------------------------------------# +class Args(object): + """A container for command line options and arguments. + + Attributes: + single: a boolean indicating whether to run in single file mode + single_file: an Image to run the test on + rebuild: a boolean indicating whether to run in rebuild mode + list: a boolean indicating a config file was specified + unallocated: a boolean indicating unallocated space should be ignored + ignore: a boolean indicating the input directory should be ingnored + keep: a boolean indicating whether to keep the SOLR index + verbose: a boolean indicating whether verbose output should be printed + exeception: a boolean indicating whether errors containing exception + exception_string should be printed + exception_sring: a String representing and exception name + fr: a boolean indicating whether gold standard images will be downloaded + """ + def __init__(self): + self.single = False + self.single_file = "" + self.rebuild = False + self.list = False + self.config_file = "" + self.unallocated = False + self.ignore = False + self.keep = False + self.verbose = False + self.exception = False + self.exception_string = "" + self.fr = False + self.email_enabled = False + + def parse(self): + """Get the command line arguments and parse them.""" + nxtproc = [] + nxtproc.append("python3") + nxtproc.append(sys.argv.pop(0)) + while sys.argv: + arg = sys.argv.pop(0) + nxtproc.append(arg) + if(arg == "-f"): + #try: @@@ Commented out until a more specific except statement is added + arg = sys.argv.pop(0) + print("Running on a single file:") + print(path_fix(arg) + "\n") + self.single = True + self.single_file = path_fix(arg) + #except: + # print("Error: No single file given.\n") + # return False + elif(arg == "-r" or arg == "--rebuild"): + print("Running in rebuild mode.\n") + self.rebuild = True + elif(arg == "-l" or arg == "--list"): + try: + arg = sys.argv.pop(0) + nxtproc.append(arg) + print("Running from configuration file:") + print(arg + "\n") + self.list = True + self.config_file = arg + except: + print("Error: No configuration file given.\n") + return False + elif(arg == "-u" or arg == "--unallocated"): + print("Ignoring unallocated space.\n") + self.unallocated = True + elif(arg == "-k" or arg == "--keep"): + print("Keeping the Solr index.\n") + self.keep = True + elif(arg == "-v" or arg == "--verbose"): + print("Running in verbose mode:") + print("Printing all thrown exceptions.\n") + self.verbose = True + elif(arg == "-e" or arg == "--exception"): + try: + arg = sys.argv.pop(0) + nxtproc.append(arg) + print("Running in exception mode: ") + print("Printing all exceptions with the string '" + arg + "'\n") + self.exception = True + self.exception_string = arg + except: + print("Error: No exception string given.") + elif arg == "-h" or arg == "--help": + print(usage()) + return False + elif arg == "-fr" or arg == "--forcerun": + print("Not downloading new images") + self.fr = True + elif arg == "-e" or arg == "-email": + self.email_enabled = True + else: + print(usage()) + return False + # Return the args were sucessfully parsed + return self._sanity_check() + + def _sanity_check(self): + """Check to make sure there are no conflicting arguments and the + specified files exist. + + Returns: + False if there are conflicting arguments or a specified file does + not exist, True otherwise + """ + if self.single and self.list: + print("Cannot run both from config file and on a single file.") + return False + if self.list: + if not file_exists(self.config_file): + print("Configuration file does not exist at:", + self.config_file) + return False + elif self.single: + if not file_exists(self.single_file): + msg = "Image file does not exist at: " + self.single_file + return False + if (not self.single) and (not self.ignore) and (not self.list): + self.config_file = "config.xml" + if not file_exists(self.config_file): + msg = "Configuration file does not exist at: " + self.config_file + return False + + return True + +#### +# Helper Functions +#### +def search_logs(string, test_data): + """Search through all the known log files for a given string. + + Args: + string: the String to search for. + test_data: the TestData that holds the logs to search. + + Returns: + a listof_String, the lines that contained the given String. + """ + logs_path = test_data.logs_dir + results = [] + for file in os.listdir(logs_path): + log = codecs.open(make_path(logs_path, file), "r", "utf_8") + for line in log: + if string in line: + results.append(line) + log.close() + return results + +def search_log(log, string, test_data): + """Search the given log for any instances of a given string. + + Args: + log: a pathto_File, the log to search in + string: the String to search for. + test_data: the TestData that holds the log to search. + + Returns: + a listof_String, all the lines that the string is found on + """ + logs_path = make_path(test_data.logs_dir, log) + try: + results = [] + log = codecs.open(logs_path, "r", "utf_8") + for line in log: + if string in line: + results.append(line) + log.close() + if results: + return results + except: + raise FileNotFoundException(logs_path) + +# Search through all the the logs of the given type +# Types include autopsy, tika, and solr +def search_log_set(type, string, test_data): + """Search through all logs to the given type for the given string. + + Args: + type: the type of log to search in. + string: the String to search for. + test_data: the TestData containing the logs to search. + + Returns: + a listof_String, the lines on which the String was found. + """ + logs_path = test_data.logs_dir + results = [] + for file in os.listdir(logs_path): + if type in file: + log = codecs.open(make_path(logs_path, file), "r", "utf_8") + for line in log: + if string in line: + results.append(line) + log.close() + return results + + +def clear_dir(dir): + """Clears all files from a directory and remakes it. + + Args: + dir: a pathto_Dir, the directory to clear + """ + try: + if dir_exists(dir): + shutil.rmtree(dir) + os.makedirs(dir) + return True; + except OSError as e: + printerror(test_data,"Error: Cannot clear the given directory:") + printerror(test_data,dir + "\n") + print(str(e)) + return False; + +def del_dir(dir): + """Delete the given directory. + + Args: + dir: a pathto_Dir, the directory to delete + """ + try: + if dir_exists(dir): + shutil.rmtree(dir) + return True; + except: + printerror(test_data,"Error: Cannot delete the given directory:") + printerror(test_data,dir + "\n") + return False; + +def get_file_in_dir(dir, ext): + """Returns the first file in the given directory with the given extension. + + Args: + dir: a pathto_Dir, the directory to search + ext: a String, the extension to search for + + Returns: + pathto_File, the file that was found + """ + try: + for file in os.listdir(dir): + if file.endswith(ext): + return make_path(dir, file) + # If nothing has been found, raise an exception + raise FileNotFoundException(dir) + except: + raise DirNotFoundException(dir) + +def find_file_in_dir(dir, name, ext): + """Find the file with the given name in the given directory. + + Args: + dir: a pathto_Dir, the directory to search + name: a String, the basename of the file to search for + ext: a String, the extension of the file to search for + """ + try: + for file in os.listdir(dir): + if file.startswith(name): + if file.endswith(ext): + return make_path(dir, file) + raise FileNotFoundException(dir) + except: + raise DirNotFoundException(dir) + + +class OS: + LINUX, MAC, WIN, CYGWIN = range(4) + + +if __name__ == "__main__": + global SYS + if _platform == "linux" or _platform == "linux2": + SYS = OS.LINUX + elif _platform == "darwin": + SYS = OS.MAC + elif _platform == "win32": + SYS = OS.WIN + elif _platform == "cygwin": + SYS = OS.CYGWIN + + if SYS is OS.WIN or SYS is OS.CYGWIN: + main() + else: + print("We only support Windows and Cygwin at this time.") diff --git a/test/script/srcupdater.py b/test/script/srcupdater.py index 99a393d9eb..c8c7d5410b 100644 --- a/test/script/srcupdater.py +++ b/test/script/srcupdater.py @@ -1,187 +1,187 @@ -import codecs -import datetime -import logging -import os -import re -import shutil -import socket -import sqlite3 -import subprocess -import sys -from sys import platform as _platform -import time -import traceback -import xml -from xml.dom.minidom import parse, parseString -import Emailer -from regression_utils import * - -def compile(errore, attachli, parsedin): - global redo - global tryredo - global failedbool - global errorem - errorem = errore - global attachl - attachl = attachli - global passed - global parsed - parsed = parsedin - passed = True - tryredo = False - redo = True - while(redo): - passed = True - if(passed): - gitPull("sleuthkit") - if(passed): - vsBuild() - if(passed): - gitPull("autopsy") - if(passed): - antBuild("datamodel", False) - if(passed): - antBuild("autopsy", True) - if(passed): - redo = False - else: - print("Compile Failed") - time.sleep(3600) - attachl = [] - errorem = "The test standard didn't match the gold standard.\n" - failedbool = False - if(tryredo): - errorem = "" - errorem += "Rebuilt properly.\n" - Emailer.send_email(parsed, errorem, attachl, True) - attachl = [] - passed = True - -#Pulls from git -def gitPull(TskOrAutopsy): - global SYS - global errorem - global attachl - ccwd = "" - gppth = make_local_path("..", "GitPullOutput" + TskOrAutopsy + ".txt") - attachl.append(gppth) - gpout = open(gppth, 'a') - toPull = "https://www.github.com/sleuthkit/" + TskOrAutopsy - call = ["git", "pull", toPull] - if TskOrAutopsy == "sleuthkit": - ccwd = os.path.join("..", "..", "..", "sleuthkit") - else: - ccwd = os.path.join("..", "..") - subprocess.call(call, stdout=sys.stdout, cwd=ccwd) - gpout.close() - - -#Builds TSK as a win32 applicatiion -def vsBuild(): - global redo - global tryredo - global passed - global parsed - #Please ensure that the current working directory is $autopsy/testing/script - oldpath = os.getcwd() - os.chdir(os.path.join("..", "..", "..","sleuthkit", "win32")) - vs = [] - vs.append("/cygdrive/c/windows/microsoft.NET/framework/v4.0.30319/MSBuild.exe") - vs.append(os.path.join("Tsk-win.sln")) - vs.append("/p:configuration=release") - vs.append("/p:platform=win32") - vs.append("/t:clean") - vs.append("/t:rebuild") - print(vs) - VSpth = make_local_path("..", "VSOutput.txt") - VSout = open(VSpth, 'a') - subprocess.call(vs, stdout=VSout) - VSout.close() - os.chdir(oldpath) - chk = os.path.join("..", "..", "..","sleuthkit", "win32", "Release", "libtsk_jni.dll") - try: - open(chk) - except IOError as e: - global errorem - global attachl - if(not tryredo): - errorem += "LIBTSK C++ failed to build.\n" - attachl.append(VSpth) - send_email(parsed, errorem, attachl, False) - tryredo = True - passed = False - redo = True - - - -#Builds Autopsy or the Datamodel -def antBuild(which, Build): - global redo - global passed - global tryredo - global parsed - directory = os.path.join("..", "..") - ant = [] - if which == "datamodel": - directory = os.path.join("..", "..", "..", "sleuthkit", "bindings", "java") - ant.append("ant") - ant.append("-f") - ant.append(directory) - ant.append("clean") - if(Build): - ant.append("build") - else: - ant.append("dist") - antpth = make_local_path("..", "ant" + which + "Output.txt") - antout = open(antpth, 'a') - succd = subprocess.call(ant, stdout=antout) - antout.close() - global errorem - global attachl - if which == "datamodel": - chk = os.path.join("..", "..", "..","sleuthkit", "bindings", "java", "dist", "TSK_DataModel.jar") - try: - open(chk) - except IOError as e: - if(not tryredo): - errorem += "DataModel Java build failed.\n" - attachl.append(antpth) - Emailer.send_email(parsed, errorem, attachl, False) - passed = False - tryredo = True - elif (succd != 0 and (not tryredo)): - errorem += "Autopsy build failed.\n" - attachl.append(antpth) - Emailer.send_email(parsed, errorem, attachl, False) - tryredo = True - elif (succd != 0): - passed = False - - -def main(): - errore = "" - attachli = [] - config_file = "" - arg = sys.argv.pop(0) - arg = sys.argv.pop(0) - config_file = arg - parsedin = parse(config_file) - compile(errore, attachli, parsedin) - -class OS: - LINUX, MAC, WIN, CYGWIN = range(4) -if __name__ == "__main__": - global SYS - if _platform == "linux" or _platform == "linux2": - SYS = OS.LINUX - elif _platform == "darwin": - SYS = OS.MAC - elif _platform == "win32": - SYS = OS.WIN - elif _platform == "cygwin": - SYS = OS.CYGWIN - - if SYS is OS.WIN or SYS is OS.CYGWIN: - main() - else: - print("We only support Windows and Cygwin at this time.") +import codecs +import datetime +import logging +import os +import re +import shutil +import socket +import sqlite3 +import subprocess +import sys +from sys import platform as _platform +import time +import traceback +import xml +from xml.dom.minidom import parse, parseString +import Emailer +from regression_utils import * + +def compile(errore, attachli, parsedin): + global redo + global tryredo + global failedbool + global errorem + errorem = errore + global attachl + attachl = attachli + global passed + global parsed + parsed = parsedin + passed = True + tryredo = False + redo = True + while(redo): + passed = True + if(passed): + gitPull("sleuthkit") + if(passed): + vsBuild() + if(passed): + gitPull("autopsy") + if(passed): + antBuild("datamodel", False) + if(passed): + antBuild("autopsy", True) + if(passed): + redo = False + else: + print("Compile Failed") + time.sleep(3600) + attachl = [] + errorem = "The test standard didn't match the gold standard.\n" + failedbool = False + if(tryredo): + errorem = "" + errorem += "Rebuilt properly.\n" + Emailer.send_email(parsed, errorem, attachl, True) + attachl = [] + passed = True + +#Pulls from git +def gitPull(TskOrAutopsy): + global SYS + global errorem + global attachl + ccwd = "" + gppth = make_local_path("..", "GitPullOutput" + TskOrAutopsy + ".txt") + attachl.append(gppth) + gpout = open(gppth, 'a') + toPull = "https://www.github.com/sleuthkit/" + TskOrAutopsy + call = ["git", "pull", toPull] + if TskOrAutopsy == "sleuthkit": + ccwd = os.path.join("..", "..", "..", "sleuthkit") + else: + ccwd = os.path.join("..", "..") + subprocess.call(call, stdout=sys.stdout, cwd=ccwd) + gpout.close() + + +#Builds TSK as a win32 applicatiion +def vsBuild(): + global redo + global tryredo + global passed + global parsed + #Please ensure that the current working directory is $autopsy/testing/script + oldpath = os.getcwd() + os.chdir(os.path.join("..", "..", "..","sleuthkit", "win32")) + vs = [] + vs.append("/cygdrive/c/windows/microsoft.NET/framework/v4.0.30319/MSBuild.exe") + vs.append(os.path.join("Tsk-win.sln")) + vs.append("/p:configuration=release") + vs.append("/p:platform=win32") + vs.append("/t:clean") + vs.append("/t:rebuild") + print(vs) + VSpth = make_local_path("..", "VSOutput.txt") + VSout = open(VSpth, 'a') + subprocess.call(vs, stdout=VSout) + VSout.close() + os.chdir(oldpath) + chk = os.path.join("..", "..", "..","sleuthkit", "win32", "Release", "libtsk_jni.dll") + try: + open(chk) + except IOError as e: + global errorem + global attachl + if(not tryredo): + errorem += "LIBTSK C++ failed to build.\n" + attachl.append(VSpth) + send_email(parsed, errorem, attachl, False) + tryredo = True + passed = False + redo = True + + + +#Builds Autopsy or the Datamodel +def antBuild(which, Build): + global redo + global passed + global tryredo + global parsed + directory = os.path.join("..", "..") + ant = [] + if which == "datamodel": + directory = os.path.join("..", "..", "..", "sleuthkit", "bindings", "java") + ant.append("ant") + ant.append("-f") + ant.append(directory) + ant.append("clean") + if(Build): + ant.append("build") + else: + ant.append("dist") + antpth = make_local_path("..", "ant" + which + "Output.txt") + antout = open(antpth, 'a') + succd = subprocess.call(ant, stdout=antout) + antout.close() + global errorem + global attachl + if which == "datamodel": + chk = os.path.join("..", "..", "..","sleuthkit", "bindings", "java", "dist", "TSK_DataModel.jar") + try: + open(chk) + except IOError as e: + if(not tryredo): + errorem += "DataModel Java build failed.\n" + attachl.append(antpth) + Emailer.send_email(parsed, errorem, attachl, False) + passed = False + tryredo = True + elif (succd != 0 and (not tryredo)): + errorem += "Autopsy build failed.\n" + attachl.append(antpth) + Emailer.send_email(parsed, errorem, attachl, False) + tryredo = True + elif (succd != 0): + passed = False + + +def main(): + errore = "" + attachli = [] + config_file = "" + arg = sys.argv.pop(0) + arg = sys.argv.pop(0) + config_file = arg + parsedin = parse(config_file) + compile(errore, attachli, parsedin) + +class OS: + LINUX, MAC, WIN, CYGWIN = range(4) +if __name__ == "__main__": + global SYS + if _platform == "linux" or _platform == "linux2": + SYS = OS.LINUX + elif _platform == "darwin": + SYS = OS.MAC + elif _platform == "win32": + SYS = OS.WIN + elif _platform == "cygwin": + SYS = OS.CYGWIN + + if SYS is OS.WIN or SYS is OS.CYGWIN: + main() + else: + print("We only support Windows and Cygwin at this time.")