diff --git a/Core/nbproject/project.properties b/Core/nbproject/project.properties
index d9f4d9824a..945b30562a 100644
--- a/Core/nbproject/project.properties
+++ b/Core/nbproject/project.properties
@@ -3,6 +3,7 @@ file.reference.apache-mime4j-core-0.8.2.jar=release\\modules\\ext\\apache-mime4j
file.reference.apache-mime4j-dom-0.8.2.jar=release\\modules\\ext\\apache-mime4j-dom-0.8.2.jar
file.reference.asm-7.0.jar=release\\modules\\ext\\asm-7.0.jar
file.reference.bcmail-jdk15on-1.60.jar=release\\modules\\ext\\bcmail-jdk15on-1.60.jar
+file.reference.bcpkix-jdk15on-1.60.jar=release\\modules\\ext\\bcpkix-jdk15on-1.60.jar
file.reference.bcprov-jdk15on-1.60.jar=release\\modules\\ext\\bcprov-jdk15on-1.60.jar
file.reference.boilerpipe-1.1.0.jar=release\\modules\\ext\\boilerpipe-1.1.0.jar
file.reference.c3p0-0.9.5.jar=release/modules/ext/c3p0-0.9.5.jar
@@ -16,7 +17,6 @@ file.reference.commons-io-2.6.jar=release\\modules\\ext\\commons-io-2.6.jar
file.reference.commons-lang3-3.8.1.jar=release\\modules\\ext\\commons-lang3-3.8.1.jar
file.reference.commons-pool2-2.4.2.jar=release/modules/ext/commons-pool2-2.4.2.jar
file.reference.cxf-rt-rs-client-3.3.0.jar=release\\modules\\ext\\cxf-rt-rs-client-3.3.0.jar
-file.reference.dd-plist-1.20.jar=release/modules/ext/dd-plist-1.20.jar
file.reference.dec-0.1.2.jar=release\\modules\\ext\\dec-0.1.2.jar
file.reference.fontbox-2.0.13.jar=release\\modules\\ext\\fontbox-2.0.13.jar
file.reference.geoapi-3.0.1.jar=release\\modules\\ext\\geoapi-3.0.1.jar
@@ -55,7 +55,7 @@ file.reference.mchange-commons-java-0.2.9.jar=release/modules/ext/mchange-common
file.reference.metadata-extractor-2.11.0.jar=release\\modules\\ext\\metadata-extractor-2.11.0.jar
file.reference.netcdf4-4.5.5.jar=release\\modules\\ext\\netcdf4-4.5.5.jar
file.reference.openjson-1.0.10.jar=release\\modules\\ext\\openjson-1.0.10.jar
-file.reference.opennlp-tools-1.9.0.jar=release\\modules\\ext\\opennlp-tools-1.9.0.jar
+file.reference.opennlp-tools-1.9.1.jar=release\\modules\\ext\\opennlp-tools-1.9.1.jar
file.reference.parso-2.0.10.jar=release\\modules\\ext\\parso-2.0.10.jar
file.reference.pdfbox-2.0.13.jar=release\\modules\\ext\\pdfbox-2.0.13.jar
file.reference.pdfbox-tools-2.0.13.jar=release\\modules\\ext\\pdfbox-tools-2.0.13.jar
diff --git a/Core/nbproject/project.xml b/Core/nbproject/project.xml
index 967399cc20..53c318a22b 100644
--- a/Core/nbproject/project.xml
+++ b/Core/nbproject/project.xml
@@ -350,49 +350,29 @@
release\modules\ext\commons-lang3-3.8.1.jar
- ext/jempbox-1.8.16.jar
- release\modules\ext\jempbox-1.8.16.jar
-
-
- ext/jackcess-2.2.0.jar
- release\modules\ext\jackcess-2.2.0.jar
-
-
- ext/jericho-html-3.3.jar
- release/modules/ext/jericho-html-3.3.jar
+ ext/gax-grpc-1.44.0.jar
+ release/modules/ext/gax-grpc-1.44.0.jar
ext/cdm-4.5.5.jar
release\modules\ext\cdm-4.5.5.jar
-
- ext/httpservices-4.5.5.jar
- release\modules\ext\httpservices-4.5.5.jar
-
-
- ext/xz-1.8.jar
- release\modules\ext\xz-1.8.jar
-
-
- ext/commons-validator-1.6.jar
- release/modules/ext/commons-validator-1.6.jar
-
ext/sis-utility-0.8.jar
release\modules\ext\sis-utility-0.8.jar
- ext/jna-5.1.0.jar
- release\modules\ext\jna-5.1.0.jar
+ ext/opencensus-api-0.19.2.jar
+ release/modules/ext/opencensus-api-0.19.2.jar
+
+
+ ext/gax-httpjson-0.61.0.jar
+ release/modules/ext/gax-httpjson-0.61.0.jar
ext/boilerpipe-1.1.0.jar
release\modules\ext\boilerpipe-1.1.0.jar
-
- ext/jbig2-imageio-3.0.2.jar
- release\modules\ext\jbig2-imageio-3.0.2.jar
-
ext/jsoup-1.11.3.jar
release\modules\ext\jsoup-1.11.3.jar
@@ -401,22 +381,10 @@
ext/sevenzipjbinding.jar
release/modules/ext/sevenzipjbinding.jar
-
- ext/apache-mime4j-dom-0.8.2.jar
- release\modules\ext\apache-mime4j-dom-0.8.2.jar
-
ext/mchange-commons-java-0.2.9.jar
release/modules/ext/mchange-commons-java-0.2.9.jar
-
- ext/pdfbox-2.0.13.jar
- release\modules\ext\pdfbox-2.0.13.jar
-
-
- ext/xmlbeans-3.0.2.jar
- release\modules\ext\xmlbeans-3.0.2.jar
-
ext/jackson-databind-2.9.7.jar
release\modules\ext\jackson-databind-2.9.7.jar
@@ -425,41 +393,33 @@
ext/jai-imageio-core-1.4.0.jar
release\modules\ext\jai-imageio-core-1.4.0.jar
+
+ ext/api-common-1.7.0.jar
+ release/modules/ext/api-common-1.7.0.jar
+
ext/jcl-over-slf4j-1.7.25.jar
release\modules\ext\jcl-over-slf4j-1.7.25.jar
- ext/curator-recipes-2.8.0.jar
- release/modules/ext/curator-recipes-2.8.0.jar
+ ext/okhttp-2.7.5.jar
+ release/modules/ext/okhttp-2.7.5.jar
ext/tika-core-1.20.jar
release\modules\ext\tika-core-1.20.jar
-
- ext/tagsoup-1.2.1.jar
- release\modules\ext\tagsoup-1.2.1.jar
-
ext/StixLib.jar
release/modules/ext/StixLib.jar
-
- ext/jackson-core-2.9.7.jar
- release\modules\ext\jackson-core-2.9.7.jar
-
-
- ext/sis-metadata-0.8.jar
- release\modules\ext\sis-metadata-0.8.jar
-
ext/bcprov-jdk15on-1.60.jar
release\modules\ext\bcprov-jdk15on-1.60.jar
- ext/parso-2.0.10.jar
- release\modules\ext\parso-2.0.10.jar
+ ext/google-auth-library-credentials-0.15.0.jar
+ release/modules/ext/google-auth-library-credentials-0.15.0.jar
ext/json-simple-1.1.1.jar
@@ -473,18 +433,10 @@
ext/commons-codec-1.11.jar
release\modules\ext\commons-codec-1.11.jar
-
- ext/apache-mime4j-core-0.8.2.jar
- release\modules\ext\apache-mime4j-core-0.8.2.jar
-
ext/jmatio-1.5.jar
release\modules\ext\jmatio-1.5.jar
-
- ext/sleuthkit-postgresql-4.6.7.jar
- release/modules/ext/sleuthkit-postgresql-4.6.7.jar
-
ext/tika-parsers-1.20.jar
release\modules\ext\tika-parsers-1.20.jar
@@ -497,18 +449,10 @@
ext/commons-pool2-2.4.2.jar
release/modules/ext/commons-pool2-2.4.2.jar
-
- ext/commons-io-2.6.jar
- release\modules\ext\commons-io-2.6.jar
-
ext/jdom-2.0.5-contrib.jar
release/modules/ext/jdom-2.0.5-contrib.jar
-
- ext/SparseBitSet-1.1.jar
- release/modules/ext/SparseBitSet-1.1.jar
-
ext/openjson-1.0.10.jar
release\modules\ext\openjson-1.0.10.jar
@@ -517,38 +461,18 @@
ext/isoparser-1.1.22.jar
release\modules\ext\isoparser-1.1.22.jar
-
- ext/c3p0-0.9.5.jar
- release/modules/ext/c3p0-0.9.5.jar
-
ext/xmpcore-5.1.3.jar
release/modules/ext/xmpcore-5.1.3.jar
-
- ext/zookeeper-3.4.6.jar
- release/modules/ext/zookeeper-3.4.6.jar
-
ext/javax.activation-1.2.0.jar
release\modules\ext\javax.activation-1.2.0.jar
-
- ext/commons-csv-1.6.jar
- release\modules\ext\commons-csv-1.6.jar
-
-
- ext/jdom-2.0.5.jar
- release/modules/ext/jdom-2.0.5.jar
-
ext/rome-1.12.0.jar
release\modules\ext\rome-1.12.0.jar
-
- ext/jackson-annotations-2.9.7.jar
- release\modules\ext\jackson-annotations-2.9.7.jar
-
ext/javax.annotation-api-1.3.2.jar
release\modules\ext\javax.annotation-api-1.3.2.jar
@@ -557,49 +481,25 @@
ext/vorbis-java-core-0.8.jar
release\modules\ext\vorbis-java-core-0.8.jar
-
- ext/netcdf4-4.5.5.jar
- release\modules\ext\netcdf4-4.5.5.jar
-
ext/java-libpst-0.8.1.jar
release\modules\ext\java-libpst-0.8.1.jar
- ext/opennlp-tools-1.9.0.jar
- release\modules\ext\opennlp-tools-1.9.0.jar
-
-
- ext/sis-netcdf-0.8.jar
- release\modules\ext\sis-netcdf-0.8.jar
+ ext/okio-1.6.0.jar
+ release/modules/ext/okio-1.6.0.jar
ext/curator-framework-2.8.0.jar
release/modules/ext/curator-framework-2.8.0.jar
-
- ext/sentiment-analysis-parser-0.1.jar
- release\modules\ext\sentiment-analysis-parser-0.1.jar
-
-
- ext/commons-collections4-4.2.jar
- release\modules\ext\commons-collections4-4.2.jar
-
ext/commons-dbcp2-2.1.1.jar
release/modules/ext/commons-dbcp2-2.1.1.jar
- ext/jgraphx-v3.8.0.jar
- release/modules/ext/jgraphx-v3.8.0.jar
-
-
- ext/juniversalchardet-1.0.3.jar
- release\modules\ext\juniversalchardet-1.0.3.jar
-
-
- ext/jython-standalone-2.7.0.jar
- release/modules/ext/jython-standalone-2.7.0.jar
+ ext/google-http-client-appengine-1.29.0.jar
+ release/modules/ext/google-http-client-appengine-1.29.0.jar
ext/uimafit-core-2.4.0.jar
@@ -609,26 +509,30 @@
ext/jackcess-encrypt-2.1.4.jar
release\modules\ext\jackcess-encrypt-2.1.4.jar
-
- ext/jhighlight-1.0.3.jar
- release\modules\ext\jhighlight-1.0.3.jar
-
ext/junrar-2.0.0.jar
release\modules\ext\junrar-2.0.0.jar
- ext/jul-to-slf4j-1.7.25.jar
- release\modules\ext\jul-to-slf4j-1.7.25.jar
+ ext/google-http-client-1.29.0.jar
+ release/modules/ext/google-http-client-1.29.0.jar
- ext/postgresql-9.4.1211.jre7.jar
- release/modules/ext/postgresql-9.4.1211.jre7.jar
+ ext/bcpkix-jdk15on-1.60.jar
+ release\modules\ext\bcpkix-jdk15on-1.60.jar
+
+
+ ext/opennlp-tools-1.9.1.jar
+ release\modules\ext\opennlp-tools-1.9.1.jar
ext/slf4j-api-1.7.25.jar
release\modules\ext\slf4j-api-1.7.25.jar
+
+ ext/google-cloud-core-1.70.0.jar
+ release/modules/ext/google-cloud-core-1.70.0.jar
+
ext/geoapi-3.0.1.jar
release\modules\ext\geoapi-3.0.1.jar
@@ -641,18 +545,10 @@
ext/jdom2-2.0.6.jar
release\modules\ext\jdom2-2.0.6.jar
-
- ext/httpclient-4.5.6.jar
- release\modules\ext\httpclient-4.5.6.jar
-
ext/uimaj-core-3.0.1.jar
release\modules\ext\uimaj-core-3.0.1.jar
-
- ext/curator-client-2.8.0.jar
- release/modules/ext/curator-client-2.8.0.jar
-
ext/sqlite-jdbc-3.25.2.jar
release/modules/ext/sqlite-jdbc-3.25.2.jar
@@ -670,65 +566,133 @@
release\modules\ext\grib-4.5.5.jar
- ext/fontbox-2.0.13.jar
- release\modules\ext\fontbox-2.0.13.jar
+ ext/gax-1.44.0.jar
+ release/modules/ext/gax-1.44.0.jar
- ext/activemq-all-5.11.1.jar
- release/modules/ext/activemq-all-5.11.1.jar
+ ext/jempbox-1.8.16.jar
+ release\modules\ext\jempbox-1.8.16.jar
- ext/dec-0.1.2.jar
- release\modules\ext\dec-0.1.2.jar
-
-
- ext/Rejistry-1.1-SNAPSHOT.jar
- release/modules/ext/Rejistry-1.1-SNAPSHOT.jar
-
-
- ext/dd-plist-1.20.jar
- release/modules/ext/dd-plist-1.20.jar
-
-
- ext/sevenzipjbinding-AllPlatforms.jar
- release/modules/ext/sevenzipjbinding-AllPlatforms.jar
-
-
- ext/bcmail-jdk15on-1.60.jar
- release\modules\ext\bcmail-jdk15on-1.60.jar
-
-
- ext/vorbis-java-tika-0.8.jar
- release\modules\ext\vorbis-java-tika-0.8.jar
+ ext/jackcess-2.2.0.jar
+ release\modules\ext\jackcess-2.2.0.jar
ext/grpc-context-1.19.0.jar
release/modules/ext/grpc-context-1.19.0.jar
- ext/gax-grpc-1.44.0.jar
- release/modules/ext/gax-grpc-1.44.0.jar
+ ext/jericho-html-3.3.jar
+ release/modules/ext/jericho-html-3.3.jar
- ext/opencensus-api-0.19.2.jar
- release/modules/ext/opencensus-api-0.19.2.jar
+ ext/httpservices-4.5.5.jar
+ release\modules\ext\httpservices-4.5.5.jar
- ext/gax-httpjson-0.61.0.jar
- release/modules/ext/gax-httpjson-0.61.0.jar
+ ext/xz-1.8.jar
+ release\modules\ext\xz-1.8.jar
- ext/api-common-1.7.0.jar
- release/modules/ext/api-common-1.7.0.jar
+ ext/commons-validator-1.6.jar
+ release/modules/ext/commons-validator-1.6.jar
- ext/google-auth-library-credentials-0.15.0.jar
- release/modules/ext/google-auth-library-credentials-0.15.0.jar
+ ext/jna-5.1.0.jar
+ release\modules\ext\jna-5.1.0.jar
+
+
+ ext/jbig2-imageio-3.0.2.jar
+ release\modules\ext\jbig2-imageio-3.0.2.jar
+
+
+ ext/sleuthkit-postgresql-4.6.7.jar
+ release/modules/ext/sleuthkit-postgresql-4.6.7.jar
+
+
+ ext/apache-mime4j-dom-0.8.2.jar
+ release\modules\ext\apache-mime4j-dom-0.8.2.jar
+
+
+ ext/pdfbox-2.0.13.jar
+ release\modules\ext\pdfbox-2.0.13.jar
+
+
+ ext/xmlbeans-3.0.2.jar
+ release\modules\ext\xmlbeans-3.0.2.jar
+
+
+ ext/curator-recipes-2.8.0.jar
+ release/modules/ext/curator-recipes-2.8.0.jar
+
+
+ ext/tagsoup-1.2.1.jar
+ release\modules\ext\tagsoup-1.2.1.jar
+
+
+ ext/jackson-core-2.9.7.jar
+ release\modules\ext\jackson-core-2.9.7.jar
+
+
+ ext/sis-metadata-0.8.jar
+ release\modules\ext\sis-metadata-0.8.jar
+
+
+ ext/parso-2.0.10.jar
+ release\modules\ext\parso-2.0.10.jar
+
+
+ ext/apache-mime4j-core-0.8.2.jar
+ release\modules\ext\apache-mime4j-core-0.8.2.jar
+
+
+ ext/commons-io-2.6.jar
+ release\modules\ext\commons-io-2.6.jar
+
+
+ ext/SparseBitSet-1.1.jar
+ release/modules/ext/SparseBitSet-1.1.jar
+
+
+ ext/c3p0-0.9.5.jar
+ release/modules/ext/c3p0-0.9.5.jar
+
+
+ ext/zookeeper-3.4.6.jar
+ release/modules/ext/zookeeper-3.4.6.jar
+
+
+ ext/commons-csv-1.6.jar
+ release\modules\ext\commons-csv-1.6.jar
+
+
+ ext/jdom-2.0.5.jar
+ release/modules/ext/jdom-2.0.5.jar
+
+
+ ext/jackson-annotations-2.9.7.jar
+ release\modules\ext\jackson-annotations-2.9.7.jar
ext/google-api-client-1.27.0.jar
release/modules/ext/google-api-client-1.27.0.jar
+
+ ext/netcdf4-4.5.5.jar
+ release\modules\ext\netcdf4-4.5.5.jar
+
+
+ ext/sis-netcdf-0.8.jar
+ release\modules\ext\sis-netcdf-0.8.jar
+
+
+ ext/sentiment-analysis-parser-0.1.jar
+ release\modules\ext\sentiment-analysis-parser-0.1.jar
+
+
+ ext/commons-collections4-4.2.jar
+ release\modules\ext\commons-collections4-4.2.jar
+
ext/opencensus-contrib-http-util-0.19.2.jar
release/modules/ext/opencensus-contrib-http-util-0.19.2.jar
@@ -738,21 +702,57 @@
release/modules/ext/google-auth-library-oauth2-http-0.15.0.jar
- ext/google-http-client-appengine-1.29.0.jar
- release/modules/ext/google-http-client-appengine-1.29.0.jar
+ ext/jgraphx-v3.8.0.jar
+ release/modules/ext/jgraphx-v3.8.0.jar
- ext/google-http-client-1.29.0.jar
- release/modules/ext/google-http-client-1.29.0.jar
+ ext/juniversalchardet-1.0.3.jar
+ release\modules\ext\juniversalchardet-1.0.3.jar
- ext/google-cloud-core-1.70.0.jar
- release/modules/ext/google-cloud-core-1.70.0.jar
+ ext/jython-standalone-2.7.0.jar
+ release/modules/ext/jython-standalone-2.7.0.jar
+
+
+ ext/jhighlight-1.0.3.jar
+ release\modules\ext\jhighlight-1.0.3.jar
+
+
+ ext/jul-to-slf4j-1.7.25.jar
+ release\modules\ext\jul-to-slf4j-1.7.25.jar
+
+
+ ext/postgresql-9.4.1211.jre7.jar
+ release/modules/ext/postgresql-9.4.1211.jre7.jar
+
+
+ ext/httpclient-4.5.6.jar
+ release\modules\ext\httpclient-4.5.6.jar
+
+
+ ext/curator-client-2.8.0.jar
+ release/modules/ext/curator-client-2.8.0.jar
+
+
+ ext/fontbox-2.0.13.jar
+ release\modules\ext\fontbox-2.0.13.jar
+
+
+ ext/activemq-all-5.11.1.jar
+ release/modules/ext/activemq-all-5.11.1.jar
ext/google-cloud-core-http-1.70.0.jar
release/modules/ext/google-cloud-core-http-1.70.0.jar
+
+ ext/Rejistry-1.1-SNAPSHOT.jar
+ release/modules/ext/Rejistry-1.1-SNAPSHOT.jar
+
+
+ ext/dec-0.1.2.jar
+ release\modules\ext\dec-0.1.2.jar
+
ext/google-http-client-jackson2-1.29.0.jar
release/modules/ext/google-http-client-jackson2-1.29.0.jar
@@ -766,20 +766,20 @@
release/modules/ext/google-cloud-translate-1.70.0.jar
- ext/gax-1.44.0.jar
- release/modules/ext/gax-1.44.0.jar
+ ext/sevenzipjbinding-AllPlatforms.jar
+ release/modules/ext/sevenzipjbinding-AllPlatforms.jar
ext/google-api-services-translate-v2-rev20170525-1.27.0.jar
release/modules/ext/google-api-services-translate-v2-rev20170525-1.27.0.jar
- ext/okhttp-2.7.5.jar
- release/modules/ext/okhttp-2.7.5.jar
+ ext/bcmail-jdk15on-1.60.jar
+ release\modules\ext\bcmail-jdk15on-1.60.jar
- ext/okio-1.6.0.jar
- release/modules/ext/okio-1.6.0.jar
+ ext/vorbis-java-tika-0.8.jar
+ release\modules\ext\vorbis-java-tika-0.8.jar
diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/services/Services.java b/Core/src/org/sleuthkit/autopsy/casemodule/services/Services.java
index 02f98f2887..81bc6a643c 100644
--- a/Core/src/org/sleuthkit/autopsy/casemodule/services/Services.java
+++ b/Core/src/org/sleuthkit/autopsy/casemodule/services/Services.java
@@ -80,7 +80,7 @@ public class Services implements Closeable {
/**
* Gets the artifacts blackboard for the current case.
*
- * @return @org.sleuthkit.datamodel.Blackboard Blackboard for the current
+ * @return org.sleuthkit.datamodel.Blackboard Blackboard for the current
* case.
*/
public org.sleuthkit.datamodel.Blackboard getArtifactsBlackboard() {
diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/FileViewer.java b/Core/src/org/sleuthkit/autopsy/contentviewers/FileViewer.java
index dddf0f6296..1b0d048f35 100644
--- a/Core/src/org/sleuthkit/autopsy/contentviewers/FileViewer.java
+++ b/Core/src/org/sleuthkit/autopsy/contentviewers/FileViewer.java
@@ -117,7 +117,7 @@ public class FileViewer extends javax.swing.JPanel implements DataContentViewer
}
AbstractFile file = selectedNode.getLookup().lookup(AbstractFile.class);
- if (file == null) {
+ if ((file == null) || (file.isDir())) {
return;
}
@@ -189,7 +189,7 @@ public class FileViewer extends javax.swing.JPanel implements DataContentViewer
}
AbstractFile aFile = node.getLookup().lookup(AbstractFile.class);
- if (aFile == null) {
+ if ((aFile == null) || (aFile.isDir())) {
return false;
}
diff --git a/Core/src/org/sleuthkit/autopsy/coreutils/AppDBParserHelper.java b/Core/src/org/sleuthkit/autopsy/coreutils/AppDBParserHelper.java
index 538ea07098..01771f0581 100644
--- a/Core/src/org/sleuthkit/autopsy/coreutils/AppDBParserHelper.java
+++ b/Core/src/org/sleuthkit/autopsy/coreutils/AppDBParserHelper.java
@@ -283,8 +283,8 @@ public final class AppDBParserHelper {
/**
* Adds a relations between the two specified account instances.
*
- * @param selfAccount device owner account
- * @param otherAccount other account
+ * @param selfAccountInstance device owner account
+ * @param otherAccountInstance other account
* @param sourceArtifact artifact from which relationship is derived.
* @param relationshipType type of relationship
* @param dateTime date/time of relationship
@@ -316,7 +316,7 @@ public final class AppDBParserHelper {
* @param readStatus message read or not
* @param subject message subject, may be empty
* @param messageText message body, may be empty
- * @param threadId, message thread id
+ * @param threadId message thread id
*
* @return message artifact
*/
@@ -347,7 +347,7 @@ public final class AppDBParserHelper {
* @param readStatus message read or not
* @param subject message subject, may be empty
* @param messageText message body, may be empty
- * @param threadId, message thread id
+ * @param threadId message thread id
*
* @param otherAttributesList additional attributes
*
@@ -385,7 +385,7 @@ public final class AppDBParserHelper {
* @param readStatus message read or not
* @param subject message subject, may be empty
* @param messageText message body, may be empty
- * @param threadId, message thread id
+ * @param threadId message thread id
*
*
* @return message artifact
@@ -867,7 +867,7 @@ public final class AppDBParserHelper {
* @param accessTime last access time
* @param referrer referrer, may be empty
* @param title website title, may be empty
- * @param programName, application recording the history
+ * @param programName application recording the history
*
* @return artifact created
*/
@@ -884,7 +884,7 @@ public final class AppDBParserHelper {
* @param accessTime last access time
* @param referrer referrer, may be empty
* @param title website title, may be empty
- * @param programName, application recording the history
+ * @param programName application recording the history
* @param otherAttributesList other attributes
*
*
diff --git a/Core/src/org/sleuthkit/autopsy/coreutils/AppSQLiteDB.java b/Core/src/org/sleuthkit/autopsy/coreutils/AppSQLiteDB.java
index 6812f6daf3..bb8a1ed227 100644
--- a/Core/src/org/sleuthkit/autopsy/coreutils/AppSQLiteDB.java
+++ b/Core/src/org/sleuthkit/autopsy/coreutils/AppSQLiteDB.java
@@ -95,8 +95,8 @@ public final class AppSQLiteDB implements Closeable {
* AppSQLiteDB to help query the DB.
*
* A list of AppSQLiteDB instances is returned, one for each
- * match found.,
- * .
+ * match found.
+ *
* @param dataSource data source to search in
* @param dbName db file name to search
* @param matchExactName whether to look for exact file name or a pattern match
@@ -174,7 +174,7 @@ public final class AppSQLiteDB implements Closeable {
* @param dbName db file name to search
* @param matchExactName whether to look for exact file name or a pattern match
* @param dbPath path to match
- * @param matchExactName whether to look for exact path name or a substring match
+ * @param matchExactPath whether to look for exact path name or a substring match
*
* @return a collection of AppSQLiteDBFileBundle
*
diff --git a/Core/src/org/sleuthkit/autopsy/coreutils/ExecUtil.java b/Core/src/org/sleuthkit/autopsy/coreutils/ExecUtil.java
index 9a211c57a0..022b507212 100644
--- a/Core/src/org/sleuthkit/autopsy/coreutils/ExecUtil.java
+++ b/Core/src/org/sleuthkit/autopsy/coreutils/ExecUtil.java
@@ -206,7 +206,7 @@ public final class ExecUtil {
* @param terminator The ProcessTerminator used to determine if the process
* should be killed.
*
- * @returnthe exit value of the process
+ * @return the exit value of the process
*
* @throws SecurityException if a security manager exists and vetoes any
* aspect of running the process.
diff --git a/Core/src/org/sleuthkit/autopsy/report/ArtifactSelectionDialog.java b/Core/src/org/sleuthkit/autopsy/report/ArtifactSelectionDialog.java
index b0655dfa2b..4ab05a11aa 100644
--- a/Core/src/org/sleuthkit/autopsy/report/ArtifactSelectionDialog.java
+++ b/Core/src/org/sleuthkit/autopsy/report/ArtifactSelectionDialog.java
@@ -54,7 +54,7 @@ public class ArtifactSelectionDialog extends javax.swing.JDialog {
/**
* Creates new form ArtifactSelectionDialog
- *
+ *
* @param parent The parent window
* @param modal Block user-input to other top-level windows.
*/
@@ -110,9 +110,11 @@ public class ArtifactSelectionDialog extends javax.swing.JDialog {
@Override
public void mousePressed(MouseEvent evt) {
int index = artifactList.locationToIndex(evt.getPoint());
- BlackboardArtifact.Type type = model.getElementAt(index);
- artifactTypeSelections.put(type, !artifactTypeSelections.get(type));
- artifactList.repaint();
+ if (index >= 0) {
+ BlackboardArtifact.Type type = model.getElementAt(index);
+ artifactTypeSelections.put(type, !artifactTypeSelections.get(type));
+ artifactList.repaint();
+ }
}
});
}
diff --git a/Core/src/org/sleuthkit/autopsy/report/caseuco/CaseUcoFormatExporter.java b/Core/src/org/sleuthkit/autopsy/report/caseuco/CaseUcoFormatExporter.java
index 03bffed89d..d3ab17ffe9 100755
--- a/Core/src/org/sleuthkit/autopsy/report/caseuco/CaseUcoFormatExporter.java
+++ b/Core/src/org/sleuthkit/autopsy/report/caseuco/CaseUcoFormatExporter.java
@@ -198,7 +198,7 @@ public final class CaseUcoFormatExporter {
}
/**
- * Exports files that are tagged w/ the following TagNames and that belong to
+ * Exports files that are tagged with the following TagNames and that belong to
* the following interesting file sets (set name attributes of TSK_INTERSTING_FILE_HIT
* and TSK_INTERESTING_ARTIFACT_HIT). Artifacts that are tagged with
* the following TagNames also have their associated source files included.
@@ -208,7 +208,7 @@ public final class CaseUcoFormatExporter {
* @param tagTypes Collection of TagNames to match
* @param interestingItemSets Collection of SET_NAMEs to match on in TSK_INTERESTING_FILE_HITs
* and TSK_INTERESTING_ARTIFACT_HITs.
- * @param outputFilePath Path to the folder that the CASE-UCO report should be written into
+ * @param caseReportFolder Path to the folder that the CASE-UCO report should be written into
* @param progressPanel UI Component to be updated with current processing status
*/
@NbBundle.Messages({
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/FilteredEventsModel.java b/Core/src/org/sleuthkit/autopsy/timeline/FilteredEventsModel.java
index 457939f04c..6a9253043a 100755
--- a/Core/src/org/sleuthkit/autopsy/timeline/FilteredEventsModel.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/FilteredEventsModel.java
@@ -35,9 +35,7 @@ import javafx.beans.InvalidationListener;
import javafx.beans.property.ReadOnlyObjectProperty;
import javafx.beans.property.ReadOnlyObjectWrapper;
import javafx.collections.FXCollections;
-import javafx.collections.ObservableList;
import javafx.collections.ObservableMap;
-import javafx.collections.ObservableSet;
import static org.apache.commons.collections4.CollectionUtils.emptyIfNull;
import static org.apache.commons.collections4.CollectionUtils.isNotEmpty;
import org.joda.time.DateTimeZone;
@@ -58,7 +56,6 @@ import org.sleuthkit.autopsy.timeline.events.TagsAddedEvent;
import org.sleuthkit.autopsy.timeline.events.TagsDeletedEvent;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState;
-import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.TagsFilterState;
import org.sleuthkit.autopsy.timeline.utils.CacheLoaderImpl;
import org.sleuthkit.autopsy.timeline.utils.FilterUtils;
import org.sleuthkit.autopsy.timeline.zooming.ZoomState;
@@ -70,7 +67,6 @@ import org.sleuthkit.datamodel.ContentTag;
import org.sleuthkit.datamodel.DataSource;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.Tag;
-import org.sleuthkit.datamodel.TagName;
import org.sleuthkit.datamodel.TimelineManager;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TimelineEvent;
@@ -81,10 +77,8 @@ import org.sleuthkit.datamodel.TimelineFilter.DataSourcesFilter;
import org.sleuthkit.datamodel.TimelineFilter.EventTypeFilter;
import org.sleuthkit.datamodel.TimelineFilter.FileTypesFilter;
import org.sleuthkit.datamodel.TimelineFilter.HashHitsFilter;
-import org.sleuthkit.datamodel.TimelineFilter.HashSetFilter;
import org.sleuthkit.datamodel.TimelineFilter.HideKnownFilter;
import org.sleuthkit.datamodel.TimelineFilter.RootFilter;
-import org.sleuthkit.datamodel.TimelineFilter.TagNameFilter;
import org.sleuthkit.datamodel.TimelineFilter.TagsFilter;
import org.sleuthkit.datamodel.TimelineFilter.TextFilter;
@@ -129,8 +123,6 @@ public final class FilteredEventsModel {
private final LoadingCache> eventCountsCache;
/** Map from datasource id to datasource name. */
private final ObservableMap datasourcesMap = FXCollections.observableHashMap();
- private final ObservableSet< String> hashSets = FXCollections.observableSet();
- private final ObservableList tagNames = FXCollections.observableArrayList();
// end caches
/**
@@ -171,8 +163,6 @@ public final class FilteredEventsModel {
};
datasourcesMap.addListener(filterSyncListener);
- hashSets.addListener(filterSyncListener);
- tagNames.addListener(filterSyncListener);
requestedFilter.set(getDefaultFilter());
@@ -248,15 +238,11 @@ public final class FilteredEventsModel {
*/
synchronized private void populateFilterData() throws TskCoreException {
SleuthkitCase skCase = autoCase.getSleuthkitCase();
- hashSets.addAll(eventManager.getHashSetNames());
//because there is no way to remove a datasource we only add to this map.
for (DataSource ds : skCase.getDataSources()) {
datasourcesMap.putIfAbsent(ds.getId(), ds.getName());
}
-
- //should this only be tags applied to files or event bearing artifacts?
- tagNames.setAll(skCase.getTagNamesInUse());
}
/**
@@ -269,22 +255,8 @@ public final class FilteredEventsModel {
* with the tags in use in the case
*/
public void syncFilters(RootFilterState rootFilterState) {
- TagsFilterState tagsFilterState = rootFilterState.getTagsFilterState();
- for (TagName tagName : tagNames) {
- tagsFilterState.getFilter().addSubFilter(new TagNameFilter(tagName));
- }
- for (FilterState extends TagNameFilter> tagFilterState : rootFilterState.getTagsFilterState().getSubFilterStates()) {
- // disable states for tag names that don't exist in case.
- tagFilterState.setDisabled(tagNames.contains(tagFilterState.getFilter().getTagName()) == false);
- }
-
DataSourcesFilter dataSourcesFilter = rootFilterState.getDataSourcesFilterState().getFilter();
datasourcesMap.entrySet().forEach(entry -> dataSourcesFilter.addSubFilter(newDataSourceFromMapEntry(entry)));
-
- HashHitsFilter hashSetsFilter = rootFilterState.getHashHitsFilterState().getFilter();
- for (String hashSet : hashSets) {
- hashSetsFilter.addSubFilter(new HashSetFilter(hashSet));
- }
}
/**
@@ -351,10 +323,8 @@ public final class FilteredEventsModel {
-> dataSourcesFilter.addSubFilter(newDataSourceFromMapEntry(dataSourceEntry)));
HashHitsFilter hashHitsFilter = new HashHitsFilter();
- hashSets.stream().map(HashSetFilter::new).forEach(hashHitsFilter::addSubFilter);
TagsFilter tagsFilter = new TagsFilter();
- tagNames.stream().map(TagNameFilter::new).forEach(tagsFilter::addSubFilter);
FileTypesFilter fileTypesFilter = FilterUtils.createDefaultFileTypesFilter();
@@ -388,20 +358,6 @@ public final class FilteredEventsModel {
return events;
}
- /**
- * get a count of tagnames applied to the given event ids as a map from
- * tagname displayname to count of tag applications
- *
- * @param eventIDsWithTags the event ids to get the tag counts map for
- *
- * @return a map from tagname displayname to count of applications
- *
- * @throws org.sleuthkit.datamodel.TskCoreException
- */
- public Map getTagCountsByTagName(Set eventIDsWithTags) throws TskCoreException {
- return eventManager.getTagCountsByTagName(eventIDsWithTags);
- }
-
public List getEventIDs(Interval timeRange, FilterState extends TimelineFilter> filter) throws TskCoreException {
final Interval overlap;
@@ -512,8 +468,14 @@ public final class FilteredEventsModel {
DeletedContentTagInfo deletedTagInfo = evt.getDeletedTagInfo();
Content content = autoCase.getSleuthkitCase().getContentById(deletedTagInfo.getContentID());
- boolean tagged = autoCase.getServices().getTagsManager().getContentTagsByContent(content).isEmpty() == false;
- Set updatedEventIDs = deleteTag(content.getId(), null, deletedTagInfo.getTagID(), tagged);
+ boolean isContentTagged = autoCase.getServices().getTagsManager().getContentTagsByContent(content).isEmpty() == false;
+ boolean isArtifactTagged = false;
+
+ if(content instanceof BlackboardArtifact) {
+ isArtifactTagged = autoCase.getServices().getTagsManager().getBlackboardArtifactTagsByArtifact((BlackboardArtifact)content).isEmpty() == false;
+ }
+
+ Set updatedEventIDs = deleteTag(content.getId(), null, deletedTagInfo.getTagID(), isArtifactTagged || isContentTagged);
return postTagsDeleted(updatedEventIDs);
}
@@ -521,8 +483,9 @@ public final class FilteredEventsModel {
DeletedBlackboardArtifactTagInfo deletedTagInfo = evt.getDeletedTagInfo();
BlackboardArtifact artifact = autoCase.getSleuthkitCase().getBlackboardArtifact(deletedTagInfo.getArtifactID());
- boolean tagged = autoCase.getServices().getTagsManager().getBlackboardArtifactTagsByArtifact(artifact).isEmpty() == false;
- Set updatedEventIDs = deleteTag(artifact.getObjectID(), artifact.getArtifactID(), deletedTagInfo.getTagID(), tagged);
+ boolean isArtifactTagged = autoCase.getServices().getTagsManager().getBlackboardArtifactTagsByArtifact(artifact).isEmpty() == false;
+ boolean isContentTagged = autoCase.getServices().getTagsManager().getContentTagsByContent(artifact).isEmpty() == false;
+ Set updatedEventIDs = deleteTag(artifact.getObjectID(), artifact.getArtifactID(), deletedTagInfo.getTagID(), isArtifactTagged || isContentTagged);
return postTagsDeleted(updatedEventIDs);
}
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/actions/AddManualEvent.java b/Core/src/org/sleuthkit/autopsy/timeline/actions/AddManualEvent.java
index 5d02925db9..33220b173a 100755
--- a/Core/src/org/sleuthkit/autopsy/timeline/actions/AddManualEvent.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/actions/AddManualEvent.java
@@ -127,6 +127,7 @@ public class AddManualEvent extends Action {
* Use the supplied ManualEventInfo to make an TSK_TL_EVENT artifact which
* will trigger adding a TimelineEvent.
*
+ * @param controller
* @param eventInfo The ManualEventInfo with the info needed to create an
* event.
*
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/EventNodeBase.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/EventNodeBase.java
index 17c6e3c394..3ee088f2ad 100755
--- a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/EventNodeBase.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/EventNodeBase.java
@@ -24,15 +24,12 @@ import com.google.common.collect.Sets;
import com.google.common.eventbus.Subscribe;
import java.util.Arrays;
import java.util.Collection;
-import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
-import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.logging.Level;
-import java.util.stream.Collectors;
import javafx.animation.KeyFrame;
import javafx.animation.KeyValue;
import javafx.animation.Timeline;
@@ -82,9 +79,7 @@ import static org.sleuthkit.autopsy.timeline.ui.detailview.EventNodeBase.show;
import static org.sleuthkit.autopsy.timeline.ui.detailview.MultiEventNodeBase.CORNER_RADII_3;
import org.sleuthkit.autopsy.timeline.ui.detailview.datamodel.DetailViewEvent;
import org.sleuthkit.datamodel.SleuthkitCase;
-import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TimelineEventType;
-import org.sleuthkit.datamodel.TimelineEvent;
/**
*
@@ -269,7 +264,7 @@ public abstract class EventNodeBase extends StackP
}
/**
- * defer tooltip content creation till needed, this had a surprisingly large
+ * defer tooltip content creation until needed, this had a surprisingly large
* impact on speed of loading the chart
*/
@NbBundle.Messages({"# {0} - counts",
@@ -293,37 +288,9 @@ public abstract class EventNodeBase extends StackP
@Override
protected String call() throws Exception {
- HashMap hashSetCounts = new HashMap<>();
- if (tlEvent.getEventIDsWithHashHits().isEmpty() == false) {
- try {
- //TODO:push this to DB
- for (TimelineEvent tle : eventsModel.getEventsById(tlEvent.getEventIDsWithHashHits())) {
- Set hashSetNames = sleuthkitCase.getContentById(tle.getFileObjID()).getHashSetNames();
- for (String hashSetName : hashSetNames) {
- hashSetCounts.merge(hashSetName, 1L, Long::sum);
- }
- }
- } catch (TskCoreException ex) {
- LOGGER.log(Level.SEVERE, "Error getting hashset hit info for event.", ex); //NON-NLS
- }
- }
- String hashSetCountsString = hashSetCounts.entrySet().stream()
- .map((Map.Entry t) -> t.getKey() + " : " + t.getValue())
- .collect(Collectors.joining("\n"));
-
- Map tagCounts = new HashMap<>();
- if (tlEvent.getEventIDsWithTags().isEmpty() == false) {
- tagCounts.putAll(eventsModel.getTagCountsByTagName(tlEvent.getEventIDsWithTags()));
- }
- String tagCountsString = tagCounts.entrySet().stream()
- .map((Map.Entry t) -> t.getKey() + " : " + t.getValue())
- .collect(Collectors.joining("\n"));
-
return Bundle.EventNodeBase_tooltip_text(getEventIDs().size(), getEventType(), getDescription(),
TimeLineController.getZonedFormatter().print(getStartMillis()),
- TimeLineController.getZonedFormatter().print(getEndMillis() + 1000))
- + (hashSetCountsString.isEmpty() ? "" : Bundle.EventNodeBase_toolTip_hashSetHits(hashSetCountsString))
- + (tagCountsString.isEmpty() ? "" : Bundle.EventNodeBase_toolTip_tags(tagCountsString));
+ TimeLineController.getZonedFormatter().print(getEndMillis() + 1000));
}
@Override
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/datamodel/DetailsViewModel.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/datamodel/DetailsViewModel.java
index 8bb257867c..f37cce6035 100755
--- a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/datamodel/DetailsViewModel.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/datamodel/DetailsViewModel.java
@@ -102,6 +102,7 @@ final public class DetailsViewModel {
}
/**
+ * @param uiFilter
* @param zoom
*
* @return a list of aggregated events that are within the requested time
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/CompoundFilterState.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/CompoundFilterState.java
index 6723242141..e85fabe4da 100755
--- a/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/CompoundFilterState.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/CompoundFilterState.java
@@ -121,8 +121,6 @@ public class CompoundFilterState {
private final CompoundFilterState eventTypeFilterState;
private final SqlFilterState knownFilterState;
private final SqlFilterState textFilterState;
- private final TagsFilterState tagsFilterState;
- private final CompoundFilterState hashHitsFilterState;
+ private final SqlFilterState tagsFilterState;
+ private final SqlFilterState hashHitsFilterState;
private final CompoundFilterState dataSourcesFilterState;
private final CompoundFilterState fileTypesFilterState;
@@ -63,8 +63,8 @@ public class RootFilterState extends CompoundFilterState(delegate.getEventTypeFilter()),
new SqlFilterState<>(delegate.getKnownFilter()),
new SqlFilterState<>(delegate.getTextFilter()),
- new TagsFilterState(delegate.getTagsFilter()),
- new CompoundFilterState<>(delegate.getHashHitsFilter()),
+ new SqlFilterState<>(delegate.getTagsFilter()),
+ new SqlFilterState<>(delegate.getHashHitsFilter()),
new CompoundFilterState<>(delegate.getDataSourcesFilter()),
new CompoundFilterState<>(delegate.getFileTypesFilter())
);
@@ -74,8 +74,8 @@ public class RootFilterState extends CompoundFilterState eventTypeFilterState,
SqlFilterState knownFilterState,
SqlFilterState textFilterState,
- TagsFilterState tagsFilterState,
- CompoundFilterState hashHitsFilterState,
+ SqlFilterState tagsFilterState,
+ SqlFilterState hashHitsFilterState,
CompoundFilterState dataSourcesFilterState,
CompoundFilterState fileTypesFilterState) {
super(filter, Arrays.asList(eventTypeFilterState, knownFilterState, textFilterState, tagsFilterState, hashHitsFilterState, dataSourcesFilterState, fileTypesFilterState));
@@ -133,11 +133,11 @@ public class RootFilterState extends CompoundFilterState getTagsFilterState() {
return tagsFilterState;
}
- public CompoundFilterState getHashHitsFilterState() {
+ public SqlFilterState getHashHitsFilterState() {
return hashHitsFilterState;
}
@@ -161,18 +161,6 @@ public class RootFilterState extends CompoundFilterState> getSubFilterStates() {
ImmutableMap, Integer> filterOrder
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/SqlFilterState.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/SqlFilterState.java
index 5180dd3425..2f33ebec61 100755
--- a/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/SqlFilterState.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/SqlFilterState.java
@@ -36,6 +36,15 @@ public class SqlFilterState extends AbstractF
// the "Hide Known Filters", "Tags", "Hashsets" and "Text".
// There are better ways to do this, but this works in a pinch
this(filter, !(filter instanceof TimelineFilter.HideKnownFilter || filter instanceof TimelineFilter.TagsFilter || filter instanceof TimelineFilter.HashHitsFilter || filter instanceof TimelineFilter.TextFilter));
+
+ selectedProperty().addListener(selectedProperty -> {
+ if (filter instanceof TimelineFilter.TagsFilter) {
+ ((TimelineFilter.TagsFilter)filter).setTagged(isSelected());
+ } else if (filter instanceof TimelineFilter.HashHitsFilter) {
+ ((TimelineFilter.HashHitsFilter)filter).setTagged(isSelected());
+ }
+ });
+
}
/**
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/TagsFilterState.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/TagsFilterState.java
deleted file mode 100755
index 12b17ed201..0000000000
--- a/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/TagsFilterState.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Autopsy Forensic Browser
- *
- * Copyright 2018-2019 Basis Technology Corp.
- * Contact: carrier sleuthkit org
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.sleuthkit.autopsy.timeline.ui.filtering.datamodel;
-
-import com.google.common.collect.Lists;
-import java.util.Collection;
-import javafx.collections.ListChangeListener;
-import org.sleuthkit.datamodel.TimelineFilter.TagNameFilter;
-import org.sleuthkit.datamodel.TimelineFilter.TagsFilter;
-
-/**
- * Specialization of CompoundFilterState for TagName/Tags-Filter.
- *
- * Newly added subfilters made to be SELECTED when they are added.
- */
-public class TagsFilterState extends CompoundFilterState {
-
- public TagsFilterState(TagsFilter delegate) {
- super(delegate);
- installSelectNewFiltersListener();
-
- }
-
- public TagsFilterState(TagsFilter delegate, Collection> subFilterStates) {
- super(delegate, subFilterStates);
- installSelectNewFiltersListener();
- }
-
- private void installSelectNewFiltersListener() {
- getSubFilterStates().addListener((ListChangeListener.Change extends FilterState extends TagNameFilter>> change) -> {
- while (change.next()) {
- change.getAddedSubList().forEach(filterState -> filterState.setSelected(true));
- }
- });
- }
-
- @Override
- public TagsFilterState copyOf() {
- TagsFilterState copy = new TagsFilterState(getFilter().copyOf(),
- Lists.transform(getSubFilterStates(), FilterState::copyOf));
-
- copy.setSelected(isSelected());
- copy.setDisabled(isDisabled());
- return copy;
- }
-
- @Override
- public TagsFilter getActiveFilter() {
- if (isActive() == false) {
- return null;
- }
-
- TagsFilter copy = new TagsFilter();
- //add active subfilters to copy.
- getSubFilterStates().stream()
- .filter(FilterState::isActive)
- .map(FilterState::getActiveFilter)
- .forEach(copy::addSubFilter);
-
- return copy;
- }
-}
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/listvew/datamodel/CombinedEvent.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/listvew/datamodel/CombinedEvent.java
index 62e60fb597..4d4badb58c 100755
--- a/Core/src/org/sleuthkit/autopsy/timeline/ui/listvew/datamodel/CombinedEvent.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/listvew/datamodel/CombinedEvent.java
@@ -45,7 +45,6 @@ public class CombinedEvent {
*
* @param epochMillis The timestamp for this event, in millis from the Unix
* epoch.
- * @param fileID The ID of the file shared by all the combined events.
* @param eventMap A map from EventType to event ID.
*/
public CombinedEvent(long epochMillis, Map eventMap) {
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/utils/TimelineDBUtils.java b/Core/src/org/sleuthkit/autopsy/timeline/utils/TimelineDBUtils.java
index 5c67852ae5..5fde45aa13 100755
--- a/Core/src/org/sleuthkit/autopsy/timeline/utils/TimelineDBUtils.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/utils/TimelineDBUtils.java
@@ -63,7 +63,7 @@ public class TimelineDBUtils {
* into a set of X using the mapper to convert from string to X. If
* groupConcat is empty, null, or all whitespace, returns an empty list.
*
- * @param the type of elements to return
+ * X the type of elements to return
* @param groupConcat a string containing the group_concat result ( a comma
* separated list)
* @param mapper a function from String to X
diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED
index f27b253e16..805e776717 100755
--- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED
+++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED
@@ -2,9 +2,14 @@ cannotBuildXmlParser=Unable to build XML parser:
cannotLoadSEUQA=Unable to load Search Engine URL Query Analyzer settings file, SEUQAMappings.xml:
cannotParseXml=Unable to parse XML file:
ChromeCacheExtractor.moduleName=ChromeCacheExtractor
+# {0} - module name
+# {1} - row number
+# {2} - table length
+# {3} - cache path
ChromeCacheExtractor.progressMsg={0}: Extracting cache entry {1} of {2} entries from {3}
DataSourceUsage_AndroidMedia=Android Media Card
DataSourceUsage_FlashDrive=Flash Drive
+# {0} - OS name
DataSourceUsageAnalyzer.customVolume.label=OS Drive ({0})
DataSourceUsageAnalyzer.parentModuleName=Recent Activity
Extract.indexError.message=Failed to index artifact for keyword search.
@@ -46,6 +51,7 @@ ExtractOs.windowsVolume.label=OS Drive (Windows)
ExtractOs.yellowDogLinuxOs.label=Linux (Yellow Dog)
ExtractOs.yellowDogLinuxVolume.label=OS Drive (Linux Yellow Dog)
ExtractOS_progressMessage=Checking for OS
+ExtractRecycleBin_module_name=Recycle Bin
ExtractSafari_Error_Getting_History=An error occurred while processing Safari history files.
ExtractSafari_Error_Parsing_Bookmark=An error occured while processing Safari Bookmark files
ExtractSafari_Error_Parsing_Cookies=An error occured while processing Safari Cookies files
@@ -182,6 +188,7 @@ RecentDocumentsByLnk.parentModuleName.noSpace=RecentActivity
RecentDocumentsByLnk.parentModuleName=Recent Activity
RegRipperFullNotFound=Full version RegRipper executable not found.
RegRipperNotFound=Autopsy RegRipper executable not found.
+# {0} - file name
SearchEngineURLQueryAnalyzer.init.exception.msg=Unable to find {0}.
SearchEngineURLQueryAnalyzer.moduleName.text=Search Engine
SearchEngineURLQueryAnalyzer.engineName.none=NONE
@@ -189,4 +196,7 @@ SearchEngineURLQueryAnalyzer.domainSubStr.none=NONE
SearchEngineURLQueryAnalyzer.toString=Name: {0}\nDomain Substring: {1}\nCount: {2}\nSplit Tokens: \n{3}
SearchEngineURLQueryAnalyzer.parentModuleName.noSpace=RecentActivity
SearchEngineURLQueryAnalyzer.parentModuleName=Recent Activity
+Shellbag_Artifact_Display_Name=Shell Bags
+Shellbag_Key_Attribute_Display_Name=Key
+Shellbag_Last_Write_Attribute_Display_Name=Last Write
UsbDeviceIdMapper.parseAndLookup.text=Product: {0}
diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRecycleBin.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRecycleBin.java
new file mode 100755
index 0000000000..3fa746cf82
--- /dev/null
+++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRecycleBin.java
@@ -0,0 +1,620 @@
+/*
+ *
+ * Autopsy Forensic Browser
+ *
+ * Copyright 2019 Basis Technology Corp.
+ *
+ * Copyright 2012 42six Solutions.
+ * Contact: aebadirad 42six com
+ * Project Contact/Architect: carrier sleuthkit org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.autopsy.recentactivity;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.logging.Level;
+import org.joda.time.Instant;
+import org.openide.util.NbBundle.Messages;
+import org.sleuthkit.autopsy.casemodule.Case;
+import org.sleuthkit.autopsy.casemodule.services.FileManager;
+import org.sleuthkit.autopsy.coreutils.Logger;
+import org.sleuthkit.autopsy.datamodel.ContentUtils;
+import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
+import org.sleuthkit.autopsy.ingest.IngestJobContext;
+import org.sleuthkit.datamodel.AbstractFile;
+import org.sleuthkit.datamodel.BlackboardArtifact;
+import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_OS_ACCOUNT;
+import org.sleuthkit.datamodel.BlackboardAttribute;
+import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_DELETED;
+import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH;
+import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_ID;
+import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_NAME;
+import org.sleuthkit.datamodel.Content;
+import org.sleuthkit.datamodel.FsContent;
+import org.sleuthkit.datamodel.SleuthkitCase;
+import org.sleuthkit.datamodel.TskCoreException;
+import org.sleuthkit.datamodel.TskData;
+import org.sleuthkit.datamodel.TskDataException;
+
+/**
+ * This module is based on the RecycleBin python module from Mark McKinnon.
+ *
+ * @see
+ * Recycle_Bin.py
+ *
+ */
+final class ExtractRecycleBin extends Extract {
+
+ private static final Logger logger = Logger.getLogger(ExtractRecycleBin.class.getName());
+
+ private static final String RECYCLE_BIN_ARTIFACT_NAME = "TSK_RECYCLE_BIN"; //NON-NLS
+
+ private static final int V1_FILE_NAME_OFFSET = 24;
+ private static final int V2_FILE_NAME_OFFSET = 28;
+
+ @Messages({
+ "ExtractRecycleBin_module_name=Recycle Bin"
+ })
+ ExtractRecycleBin() {
+ this.moduleName = Bundle.ExtractRecycleBin_module_name();
+ }
+
+ @Override
+ void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) {
+ // At this time it was decided that we would not include TSK_RECYCLE_BIN
+ // in the default list of BlackboardArtifact types.
+ try {
+ createRecycleBinArtifactType();
+ } catch (TskCoreException ex) {
+ logger.log(Level.WARNING, String.format("%s may not have been created.", RECYCLE_BIN_ARTIFACT_NAME), ex);
+ }
+
+ BlackboardArtifact.Type recycleBinArtifactType;
+
+ try {
+ recycleBinArtifactType = tskCase.getArtifactType(RECYCLE_BIN_ARTIFACT_NAME);
+ } catch (TskCoreException ex) {
+ logger.log(Level.WARNING, String.format("Unable to retrive custom artifact type %s", RECYCLE_BIN_ARTIFACT_NAME), ex); // NON-NLS
+ // If this doesn't work bail.
+ return;
+ }
+
+ // map SIDs to user names so that we can include that in the artifact
+ Map userNameMap;
+ try {
+ userNameMap = makeUserNameMap(dataSource);
+ } catch (TskCoreException ex) {
+ logger.log(Level.WARNING, "Unable to create OS Account user name map", ex);
+ // This is not the end of the world we will just continue without
+ // user names
+ userNameMap = new HashMap<>();
+ }
+
+ FileManager fileManager = Case.getCurrentCase().getServices().getFileManager();
+
+ // Collect all of the $R files so that we can later easily map them to corresponding $I file
+ Map> rFileMap;
+ try {
+ rFileMap = makeRFileMap(dataSource);
+ } catch (TskCoreException ex) {
+ logger.log(Level.WARNING, String.format("Unable to create $R file map for dataSource: %s", dataSource.getName()), ex);
+ return; // No $R files, no need to continue;
+ }
+
+ // Get the $I files
+ List iFiles;
+ try {
+ iFiles = fileManager.findFiles(dataSource, "$I%"); //NON-NLS
+ } catch (TskCoreException ex) {
+ logger.log(Level.WARNING, "Unable to find recycle bin I files.", ex); //NON-NLS
+ return; // No need to continue
+ }
+
+ String tempRARecycleBinPath = RAImageIngestModule.getRATempPath(Case.getCurrentCase(), "recyclebin"); //NON-NLS
+
+ // cycle through the $I files and process each.
+ for (AbstractFile iFile : iFiles) {
+
+ if (context.dataSourceIngestIsCancelled()) {
+ return;
+ }
+
+ processIFile(context, recycleBinArtifactType, iFile, userNameMap, rFileMap, tempRARecycleBinPath);
+ }
+
+ (new File(tempRARecycleBinPath)).delete();
+ }
+
+ /**
+ * Process each individual iFile.
+ *
+ * @param context
+ * @param recycleBinArtifactType Module created artifact type
+ * @param iFile The AbstractFile to process
+ * @param userNameMap Map of user ids to names
+ * @param tempRARecycleBinPath Temp directory path
+ */
+ private void processIFile(IngestJobContext context, BlackboardArtifact.Type recycleBinArtifactType, AbstractFile iFile, Map userNameMap, Map> rFileMap, String tempRARecycleBinPath) {
+ String tempFilePath = tempRARecycleBinPath + File.separator + Instant.now().getMillis() + iFile.getName();
+ try {
+ try {
+ ContentUtils.writeToFile(iFile, new File(tempFilePath));
+ } catch (IOException ex) {
+ logger.log(Level.WARNING, String.format("Unable to write %s to temp directory. File name: %s", iFile.getName(), tempFilePath), ex); //NON-NLS
+ // if we cannot make a copy of the $I file for later processing
+ // move onto the next file
+ return;
+ }
+
+ // get the original name, dates, etc. from the $I file
+ RecycledFileMetaData metaData;
+ try {
+ metaData = parseIFile(tempFilePath);
+ } catch (IOException ex) {
+ logger.log(Level.WARNING, String.format("Unable to parse iFile %s", iFile.getName()), ex); //NON-NLS
+ // Unable to parse the $I file move onto the next file
+ return;
+ }
+
+ // each user has its own Recyle Bin folder. Figure out the user name based on its name .
+ String userID = getUserIDFromPath(iFile.getParentPath());
+ String userName = "";
+ if (!userID.isEmpty()) {
+ userName = userNameMap.get(userID);
+ } else {
+ // If the iFile doesn't have a user ID in its parent
+ // directory structure then it is not from the recyle bin
+ return;
+ }
+
+ // get the corresponding $R file, which is in the same folder and has the file content
+ String rFileName = iFile.getName().replace("$I", "$R"); //NON-NLS
+ List rFiles = rFileMap.get(rFileName);
+ if (rFiles == null) {
+ return;
+ }
+ SleuthkitCase skCase = Case.getCurrentCase().getSleuthkitCase();
+ for (AbstractFile rFile : rFiles) {
+ if (context.dataSourceIngestIsCancelled()) {
+ return;
+ }
+
+ if (iFile.getParentPath().equals(rFile.getParentPath())
+ && iFile.getMetaFlagsAsString().equals(rFile.getMetaFlagsAsString())) {
+ try {
+ postArtifact(createArtifact(rFile, recycleBinArtifactType, metaData.getFullWindowsPath(), userName, metaData.getDeletedTimeStamp()));
+
+ // If we are processing a disk image, we will also make a deleted file entry so that the user
+ // sees the deleted file in its original folder. We re-use the metadata address so that the user
+ // can see the content.
+ if (rFile instanceof FsContent) {
+ // if the user deleted a folder, then we need to recusively go into it. Note the contents of the $R folder
+ // do not have corresponding $I files anymore. Only the $R folder does.
+ if (rFile.isDir()) {
+ AbstractFile directory = getOrMakeFolder(Case.getCurrentCase().getSleuthkitCase(), (FsContent) rFile, metaData.getFullWindowsPath());
+ popuplateDeletedDirectory(Case.getCurrentCase().getSleuthkitCase(), directory, rFile.getChildren(), metaData.getFullWindowsPath(), metaData.getDeletedTimeStamp());
+
+ } else {
+ AbstractFile folder = getOrMakeFolder(Case.getCurrentCase().getSleuthkitCase(), (FsContent) rFile.getParent(), Paths.get(metaData.getFullWindowsPath()).getParent().toString());
+ addFileSystemFile(skCase, (FsContent)rFile, folder, Paths.get(metaData.getFullWindowsPath()).getFileName().toString(), metaData.getDeletedTimeStamp());
+ }
+ }
+ } catch (TskCoreException ex) {
+ logger.log(Level.WARNING, String.format("Unable to add attributes to artifact %s", rFile.getName()), ex); //NON-NLS
+ }
+ }
+ }
+ } finally {
+ (new File(tempFilePath)).delete();
+ }
+ }
+
+ /**
+ * Add the children of recycled $R folder to the folder.
+ *
+ * @param skCase The current Sleuthkit case
+ * @param parentFolder The folder to folder the deleted files are to be
+ * added.
+ * @param children The recycled children of the $R folder
+ * @param parentPath String path to the directory the children were
+ * deleted from
+ * @param deletedTimeStamp The time at which the files were deleted,
+ * inherited from the $R file.
+ *
+ * @throws TskCoreException
+ */
+ private void popuplateDeletedDirectory(SleuthkitCase skCase, AbstractFile parentFolder, List recycledChildren, String parentPath, long deletedTimeStamp) throws TskCoreException {
+ if (recycledChildren == null) {
+ return;
+ }
+
+ for (Content child : recycledChildren) {
+ if (child instanceof FsContent) {
+ FsContent fsContent = (FsContent) child;
+ if (fsContent.isFile()) {
+ addFileSystemFile(skCase, fsContent, parentFolder, fsContent.getName(), deletedTimeStamp);
+ } else if (fsContent.isDir()) {
+ String newPath = parentPath + "\\" + fsContent.getName();
+ AbstractFile childFolder = getOrMakeFolder(skCase, (FsContent) fsContent, parentPath);
+ popuplateDeletedDirectory(skCase, childFolder, fsContent.getChildren(), newPath, deletedTimeStamp);
+ }
+ }
+ }
+ }
+
+ /**
+ * Parse the $I file.
+ *
+ * File format prior to Windows 10:
+ *
+ * Offset | Size | Description |
+ * 0 | 8 | Header |
+ * 8 | 8 | File Size |
+ * 16 | 8 | Deleted Timestamp |
+ * 24 | 520 | File Name |
+ *
+ *
+ * File format Windows 10+
+ *
+ * Offset | Size | Description |
+ * 0 | 8 | Header |
+ * 8 | 8 | File Size |
+ * 16 | 8 | Deleted TimeStamp |
+ * 24 | 4 | File Name Length |
+ * 28 | var | File Name |
+ *
+ *
+ * For versions of Windows prior to 10, header = 0x01. Windows 10+ header ==
+ * 0x02
+ *
+ * @param iFilePath Path to local copy of file in temp folder
+ *
+ * @throws FileNotFoundException
+ * @throws IOException
+ */
+ private RecycledFileMetaData parseIFile(String iFilePath) throws FileNotFoundException, IOException {
+ byte[] allBytes = Files.readAllBytes(Paths.get(iFilePath));
+
+ ByteBuffer byteBuffer = ByteBuffer.wrap(allBytes);
+ byteBuffer.order(ByteOrder.LITTLE_ENDIAN);
+
+ long version = byteBuffer.getLong();
+ long fileSize = byteBuffer.getLong();
+ long timestamp = byteBuffer.getLong();
+
+ // Convert from windows FILETIME to Unix Epoch seconds
+ timestamp = Util.filetimeToMillis(timestamp) / 1000;
+
+ byte[] stringBytes;
+
+ if (version == 1) {
+ stringBytes = Arrays.copyOfRange(allBytes, V1_FILE_NAME_OFFSET, allBytes.length);
+ } else {
+ int fileNameLength = byteBuffer.getInt() * 2; //Twice the bytes for unicode
+ stringBytes = Arrays.copyOfRange(allBytes, V2_FILE_NAME_OFFSET, V2_FILE_NAME_OFFSET + fileNameLength);
+ }
+
+ String fileName = new String(stringBytes, "UTF-16LE"); //NON-NLS
+
+ return new RecycledFileMetaData(fileSize, timestamp, fileName);
+ }
+
+ /**
+ * Create a map of userids to usernames from the OS Accounts.
+ *
+ * @param dataSource
+ *
+ * @return A Map of userIDs and userNames
+ *
+ * @throws TskCoreException
+ */
+ private Map makeUserNameMap(Content dataSource) throws TskCoreException {
+ Map userNameMap = new HashMap<>();
+
+ List accounts = blackboard.getArtifacts(TSK_OS_ACCOUNT.getTypeID(), dataSource.getId());
+
+ for (BlackboardArtifact account : accounts) {
+ BlackboardAttribute nameAttribute = getAttributeForArtifact(account, TSK_USER_NAME);
+ BlackboardAttribute idAttribute = getAttributeForArtifact(account, TSK_USER_ID);
+
+ String userName = nameAttribute != null ? nameAttribute.getDisplayString() : "";
+ String userID = idAttribute != null ? idAttribute.getDisplayString() : "";
+
+ if (!userID.isEmpty()) {
+ userNameMap.put(userID, userName);
+ }
+ }
+
+ return userNameMap;
+ }
+
+ /**
+ * Get a list of files that start with $R and create a map of the file to
+ * their name.
+ *
+ * @param dataSource
+ *
+ * @return File map
+ *
+ * @throws TskCoreException
+ */
+ private Map> makeRFileMap(Content dataSource) throws TskCoreException {
+ FileManager fileManager = Case.getCurrentCase().getServices().getFileManager();
+ List rFiles = fileManager.findFiles(dataSource, "$R%");
+ Map> fileMap = new HashMap<>();
+
+ for (AbstractFile rFile : rFiles) {
+ String fileName = rFile.getName();
+ List fileList = fileMap.get(fileName);
+
+ if (fileList == null) {
+ fileList = new ArrayList<>();
+ fileMap.put(fileName, fileList);
+ }
+
+ fileList.add(rFile);
+ }
+
+ return fileMap;
+ }
+
+ /**
+ * Helper functions to get the user ID from the iFile parent path. User ids
+ * will be of the form S-.
+ *
+ * @param iFileParentPath String parent path of the iFile
+ *
+ * @return String user id
+ */
+ private String getUserIDFromPath(String iFileParentPath) {
+ int index = iFileParentPath.indexOf('-') - 1;
+ if (index >= 0) {
+ return (iFileParentPath.substring(index)).replace("/", "");
+ } else {
+ return "";
+ }
+ }
+
+ /**
+ * Gets the attribute for the given type from the given artifact.
+ *
+ * @param artifact BlackboardArtifact to get the attribute from
+ * @param type The BlackboardAttribute Type to get
+ *
+ * @return BlackboardAttribute for given artifact and type
+ *
+ * @throws TskCoreException
+ */
+ private BlackboardAttribute getAttributeForArtifact(BlackboardArtifact artifact, BlackboardAttribute.ATTRIBUTE_TYPE type) throws TskCoreException {
+ return artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.fromID(type.getTypeID())));
+ }
+
+ /**
+ * Create TSK_RECYCLE_BIN artifact type.
+ *
+ * @throws TskCoreException
+ */
+ private void createRecycleBinArtifactType() throws TskCoreException {
+ try {
+ tskCase.addBlackboardArtifactType(RECYCLE_BIN_ARTIFACT_NAME, "Recycle Bin"); //NON-NLS
+ } catch (TskDataException ex) {
+ logger.log(Level.INFO, String.format("%s may have already been defined for this case", RECYCLE_BIN_ARTIFACT_NAME));
+ }
+
+ }
+
+ /**
+ * Create the new artifact for the give rFile
+ *
+ * @param rFile AbstractFile to create the artifact for
+ * @param type Type of artifact to create
+ * @param fileName The original path of the deleted file
+ * @param userName The name of the user that deleted the file
+ * @param dateTime The time in epoch seconds that the file was deleted
+ *
+ * @return Newly created artifact
+ *
+ * @throws TskCoreException
+ */
+ private BlackboardArtifact createArtifact(AbstractFile rFile, BlackboardArtifact.Type type, String fileName, String userName, long dateTime) throws TskCoreException {
+ BlackboardArtifact bba = rFile.newArtifact(type.getTypeID());
+ bba.addAttribute(new BlackboardAttribute(TSK_PATH, getName(), fileName));
+ bba.addAttribute(new BlackboardAttribute(TSK_DATETIME_DELETED, getName(), dateTime));
+ bba.addAttribute(new BlackboardAttribute(TSK_USER_NAME, getName(), userName == null || userName.isEmpty() ? "" : userName));
+ return bba;
+ }
+
+ /**
+ * Returns a folder for the given path. If the path does not exist the
+ * the folder is created. Recursively makes as many parent folders as needed.
+ *
+ * @param skCase
+ * @param dataSource
+ * @param path
+ *
+ * @return AbstractFile for the given path.
+ *
+ * @throws TskCoreException
+ */
+ private AbstractFile getOrMakeFolder(SleuthkitCase skCase, FsContent dataSource, String path) throws TskCoreException {
+
+ String parentPath = getParentPath(path);
+ String folderName = getFileName(path);
+
+ List files = null;
+ if (parentPath != null) {
+ if (!parentPath.equals("/")) {
+ parentPath = parentPath + "/";
+ }
+
+ files = skCase.findAllFilesWhere(String.format("fs_obj_id=%s AND parent_path='%s' AND name='%s'",
+ dataSource.getFileSystemId(), SleuthkitCase.escapeSingleQuotes(parentPath), folderName != null ? SleuthkitCase.escapeSingleQuotes(folderName) : ""));
+ } else {
+ files = skCase.findAllFilesWhere(String.format("fs_obj_id=%s AND parent_path='/' AND name=''", dataSource.getFileSystemId()));
+ }
+
+ if (files == null || files.isEmpty()) {
+ AbstractFile parent = getOrMakeFolder(skCase, dataSource, parentPath);
+ return skCase.addVirtualDirectory(parent.getId(), folderName);
+ } else {
+ return files.get(0);
+ }
+ }
+
+ /**
+ * Adds a new file system file that is unallocated and maps to the original
+ * file in recycle bin directory.
+ *
+ * @param skCase The current case.
+ * @param recycleBinFile The file from the recycle bin.
+ * @param parentDir The directory that the recycled file was deleted.
+ * @param fileName The name of the file.
+ * @param deletedTime The time the file was deleted.
+ *
+ * @throws TskCoreException
+ */
+ private void addFileSystemFile(SleuthkitCase skCase, FsContent recycleBinFile, Content parentDir, String fileName, long deletedTime) throws TskCoreException {
+ skCase.addFileSystemFile(
+ recycleBinFile.getDataSourceObjectId(),
+ recycleBinFile.getFileSystemId(),
+ fileName,
+ recycleBinFile.getMetaAddr(),
+ (int) recycleBinFile.getMetaSeq(),
+ recycleBinFile.getAttrType(),
+ recycleBinFile.getAttributeId(),
+ TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC,
+ (short) (TskData.TSK_FS_META_FLAG_ENUM.UNALLOC.getValue() | TskData.TSK_FS_META_FLAG_ENUM.USED.getValue()),
+ recycleBinFile.getSize(),
+ recycleBinFile.getCtime(), recycleBinFile.getCrtime(), recycleBinFile.getAtime(), deletedTime,
+ true, parentDir);
+ }
+
+ /**
+ * Clean up the windows path string to match what the autopsy db uses.
+ *
+ * @param path The file\folder path to normalize
+ *
+ * @return New path string with the root removed (ie X:) and the slashes
+ * changed from windows to unix.
+ */
+ String normalizeFilePath(String pathString) {
+ if (pathString == null || pathString.isEmpty()) {
+ return null;
+ }
+
+ Path path = Paths.get(pathString);
+ int nameCount = path.getNameCount();
+ if(nameCount > 0) {
+ String rootless = "/" + path.subpath(0, nameCount);
+ return rootless.replace("\\", "/");
+ } else {
+ return "/";
+ }
+ }
+
+ /**
+ * Helper function get from the given path either the file name or
+ * the last directory in the path.
+ *
+ * @param filePath The file\directory path
+ *
+ * @return If file path, returns the file name. If directory path the
+ * The last directory in the path is returned.
+ */
+ String getFileName(String filePath) {
+ Path fileNamePath = Paths.get(filePath).getFileName();
+ if (fileNamePath != null) {
+ return fileNamePath.toString();
+ }
+ return filePath;
+ }
+
+ /**
+ * Returns the parent path for the given path.
+ *
+ * @param path Path string
+ *
+ * @return The parent path for the given path.
+ */
+ String getParentPath(String path) {
+ Path parentPath = Paths.get(path).getParent();
+ if (parentPath != null) {
+ return normalizeFilePath(parentPath.toString());
+ }
+ return null;
+ }
+
+ /**
+ * Stores the data from the $I files.
+ */
+ final class RecycledFileMetaData {
+
+ private final long fileSize;
+ private final long deletedTimeStamp;
+ private final String fileName;
+
+ /**
+ * Constructs a new instance.
+ *
+ * @param fileSize Size of the deleted file.
+ * @param deletedTimeStamp Time the file was deleted.
+ * @param fileName Name of the deleted file.
+ */
+ RecycledFileMetaData(Long fileSize, long deletedTimeStamp, String fileName) {
+ this.fileSize = fileSize;
+ this.deletedTimeStamp = deletedTimeStamp;
+ this.fileName = fileName;
+ }
+
+ /**
+ * Returns the size of the recycled file.
+ *
+ * @return Size of deleted file
+ */
+ long getFileSize() {
+ return fileSize;
+ }
+
+ /**
+ * Returns the time the file was deleted.
+ *
+ * @return deleted time in epoch seconds.
+ */
+ long getDeletedTimeStamp() {
+ return deletedTimeStamp;
+ }
+
+ /**
+ * Returns the full path to the deleted file or folder. This path will
+ * include the drive letter, ie C:\
+ *
+ * @return String name of the deleted file
+ */
+ String getFullWindowsPath() {
+ return fileName.trim();
+ }
+ }
+}
diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java
index 6e4d0fc4c7..628b84ea08 100644
--- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java
+++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java
@@ -68,13 +68,19 @@ import org.openide.util.Lookup;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
import org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException;
import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchService;
+import org.sleuthkit.autopsy.recentactivity.ShellBagParser.ShellBag;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
+import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED;
+import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_CREATED;
+import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_MODIFIED;
+import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.ReadContentInputStream.ReadContentInputStreamException;
import org.sleuthkit.datamodel.Report;
import org.sleuthkit.datamodel.TskCoreException;
+import org.sleuthkit.datamodel.TskDataException;
/**
* Extract windows registry data using regripper. Runs two versions of
@@ -85,7 +91,10 @@ import org.sleuthkit.datamodel.TskCoreException;
@NbBundle.Messages({
"RegRipperNotFound=Autopsy RegRipper executable not found.",
"RegRipperFullNotFound=Full version RegRipper executable not found.",
- "Progress_Message_Analyze_Registry=Analyzing Registry Files"
+ "Progress_Message_Analyze_Registry=Analyzing Registry Files",
+ "Shellbag_Artifact_Display_Name=Shell Bags",
+ "Shellbag_Key_Attribute_Display_Name=Key",
+ "Shellbag_Last_Write_Attribute_Display_Name=Last Write"
})
class ExtractRegistry extends Extract {
@@ -132,6 +141,14 @@ class ExtractRegistry extends Extract {
private final Path rrFullHome; // Path to the full version of RegRipper
private Content dataSource;
private IngestJobContext context;
+
+ private static final String SHELLBAG_ARTIFACT_NAME = "RA_SHELL_BAG"; //NON-NLS
+ private static final String SHELLBAG_ATTRIBUTE_LAST_WRITE = "RA_SHELL_BAG_LAST_WRITE"; //NON-NLS
+ private static final String SHELLBAG_ATTRIBUTE_KEY= "RA_SHELL_BAG_KEY"; //NON-NLS
+
+ BlackboardArtifact.Type shellBagArtifactType = null;
+ BlackboardAttribute.Type shellBagKeyAttributeType = null;
+ BlackboardAttribute.Type shellBagLastWriteAttributeType = null;
ExtractRegistry() throws IngestModuleException {
moduleName = NbBundle.getMessage(ExtractIE.class, "ExtractRegistry.moduleName.text");
@@ -195,6 +212,13 @@ class ExtractRegistry extends Extract {
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "Error fetching 'ntuser.dat' file."); //NON-NLS
}
+
+ // find the user-specific ntuser-dat files
+ try {
+ allRegistryFiles.addAll(fileManager.findFiles(dataSource, "usrclass.dat")); //NON-NLS
+ } catch (TskCoreException ex) {
+ logger.log(Level.WARNING, String.format("Error finding 'usrclass.dat' files."), ex); //NON-NLS
+ }
// find the system hives'
String[] regFileNames = new String[]{"system", "software", "security", "sam"}; //NON-NLS
@@ -204,7 +228,7 @@ class ExtractRegistry extends Extract {
} catch (TskCoreException ex) {
String msg = NbBundle.getMessage(this.getClass(),
"ExtractRegistry.findRegFiles.errMsg.errReadingFile", regFileName);
- logger.log(Level.WARNING, msg);
+ logger.log(Level.WARNING, msg, ex);
this.addErrorMessage(this.getName() + ": " + msg);
}
}
@@ -282,6 +306,13 @@ class ExtractRegistry extends Extract {
this.addErrorMessage(
NbBundle.getMessage(this.getClass(), "ExtractRegistry.analyzeRegFiles.failedParsingResults",
this.getName(), regFileName));
+ } else if (regFileNameLocal.toLowerCase().contains("ntuser") || regFileNameLocal.toLowerCase().contains("usrclass")) {
+ try {
+ List shellbags = ShellBagParser.parseShellbagOutput(regOutputFiles.fullPlugins);
+ createShellBagArtifacts(regFile, shellbags);
+ } catch (IOException | TskCoreException ex) {
+ logger.log(Level.WARNING, String.format("Unable to get shell bags from file %s", regOutputFiles.fullPlugins), ex);
+ }
}
try {
Report report = currentCase.addReport(regOutputFiles.fullPlugins,
@@ -340,6 +371,8 @@ class ExtractRegistry extends Extract {
fullType = "sam"; //NON-NLS
} else if (regFilePath.toLowerCase().contains("security")) { //NON-NLS
fullType = "security"; //NON-NLS
+ }else if (regFilePath.toLowerCase().contains("usrclass")) { //NON-NLS
+ fullType = "usrclass"; //NON-NLS
} else {
return regOutputFiles;
}
@@ -840,13 +873,13 @@ class ExtractRegistry extends Extract {
} // for
return true;
} catch (FileNotFoundException ex) {
- logger.log(Level.SEVERE, "Error finding the registry file.", ex); //NON-NLS
+ logger.log(Level.WARNING, String.format("Error finding the registry file: %s", regFilePath), ex); //NON-NLS
} catch (SAXException ex) {
- logger.log(Level.SEVERE, "Error parsing the registry XML.", ex); //NON-NLS
+ logger.log(Level.WARNING, String.format("Error parsing the registry XML: %s", regFilePath), ex); //NON-NLS
} catch (IOException ex) {
- logger.log(Level.SEVERE, "Error building the document parser.", ex); //NON-NLS
+ logger.log(Level.WARNING, String.format("Error building the document parser: %s", regFilePath), ex); //NON-NLS
} catch (ParserConfigurationException ex) {
- logger.log(Level.SEVERE, "Error configuring the registry parser.", ex); //NON-NLS
+ logger.log(Level.WARNING, String.format("Error configuring the registry parser: %s", regFilePath), ex); //NON-NLS
} finally {
try {
if (fstream != null) {
@@ -1119,7 +1152,120 @@ class ExtractRegistry extends Extract {
}
}
+
/**
+ * Create the shellbag artifacts from the list of ShellBag objects.
+ *
+ * @param regFile The data source file
+ * @param shellbags List of shellbags from source file
+ *
+ * @throws TskCoreException
+ */
+ void createShellBagArtifacts(AbstractFile regFile, List shellbags) throws TskCoreException {
+ List artifacts = new ArrayList<>();
+ for (ShellBag bag : shellbags) {
+ Collection attributes = new ArrayList<>();
+ BlackboardArtifact artifact = regFile.newArtifact(getShellBagArtifact().getTypeID());
+ attributes.add(new BlackboardAttribute(TSK_PATH, getName(), bag.getResource()));
+ attributes.add(new BlackboardAttribute(getKeyAttribute(), getName(), bag.getKey()));
+
+ long time;
+ time = bag.getLastWrite();
+ if (time != 0) {
+ attributes.add(new BlackboardAttribute(getLastWriteAttribute(), getName(), time));
+ }
+
+ time = bag.getModified();
+ if (time != 0) {
+ attributes.add(new BlackboardAttribute(TSK_DATETIME_MODIFIED, getName(), time));
+ }
+
+ time = bag.getCreated();
+ if (time != 0) {
+ attributes.add(new BlackboardAttribute(TSK_DATETIME_CREATED, getName(), time));
+ }
+
+ time = bag.getAccessed();
+ if (time != 0) {
+ attributes.add(new BlackboardAttribute(TSK_DATETIME_ACCESSED, getName(), time));
+ }
+
+ artifact.addAttributes(attributes);
+
+ artifacts.add(artifact);
+ }
+
+ postArtifacts(artifacts);
+ }
+
+ /**
+ * Returns the custom Shellbag artifact type or creates it if it does not
+ * currently exist.
+ *
+ * @return BlackboardArtifact.Type for shellbag artifacts
+ *
+ * @throws TskCoreException
+ */
+ private BlackboardArtifact.Type getShellBagArtifact() throws TskCoreException {
+ if (shellBagArtifactType == null) {
+ try {
+ tskCase.addBlackboardArtifactType(SHELLBAG_ARTIFACT_NAME, Bundle.Shellbag_Artifact_Display_Name()); //NON-NLS
+ } catch (TskDataException ex) {
+ // Artifact already exists
+ logger.log(Level.INFO, String.format("%s may have already been defined for this case", SHELLBAG_ARTIFACT_NAME), ex);
+ }
+
+ shellBagArtifactType = tskCase.getArtifactType(SHELLBAG_ARTIFACT_NAME);
+ }
+
+ return shellBagArtifactType;
+ }
+
+ /**
+ * Gets the custom BlackboardAttribute type. The attribute type is created
+ * if it does not currently exist.
+ *
+ * @return The BlackboardAttribute type
+ *
+ * @throws TskCoreException
+ */
+ private BlackboardAttribute.Type getLastWriteAttribute() throws TskCoreException {
+ if (shellBagLastWriteAttributeType == null) {
+ try {
+ shellBagLastWriteAttributeType = tskCase.addArtifactAttributeType(SHELLBAG_ATTRIBUTE_LAST_WRITE,
+ BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME,
+ Bundle.Shellbag_Last_Write_Attribute_Display_Name());
+ } catch (TskDataException ex) {
+ // Attribute already exists get it from the case
+ shellBagLastWriteAttributeType = tskCase.getAttributeType(SHELLBAG_ATTRIBUTE_LAST_WRITE);
+ }
+ }
+ return shellBagLastWriteAttributeType;
+ }
+
+ /**
+ * Gets the custom BlackboardAttribute type. The attribute type is created
+ * if it does not currently exist.
+ *
+ * @return The BlackboardAttribute type
+ *
+ * @throws TskCoreException
+ */
+ private BlackboardAttribute.Type getKeyAttribute() throws TskCoreException {
+ if (shellBagKeyAttributeType == null) {
+ try {
+ shellBagKeyAttributeType = tskCase.addArtifactAttributeType(SHELLBAG_ATTRIBUTE_KEY,
+ BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
+ Bundle.Shellbag_Key_Attribute_Display_Name());
+ } catch (TskDataException ex) {
+ // The attribute already exists get it from the case
+ shellBagKeyAttributeType = tskCase.getAttributeType(SHELLBAG_ATTRIBUTE_KEY);
+ }
+ }
+ return shellBagKeyAttributeType;
+ }
+
+ /**
* Maps the user groups to the sid that are a part of them.
*
* @param bufferedReader
diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java
index db815e9274..444a6d638d 100644
--- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java
+++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java
@@ -77,6 +77,7 @@ public final class RAImageIngestModule implements DataSourceIngestModule {
Extract dataSourceAnalyzer = new DataSourceUsageAnalyzer();
Extract safari = new ExtractSafari();
Extract zoneInfo = new ExtractZoneIdentifier();
+ Extract recycleBin = new ExtractRecycleBin();
extractors.add(chrome);
extractors.add(firefox);
@@ -89,6 +90,7 @@ public final class RAImageIngestModule implements DataSourceIngestModule {
extractors.add(osExtract); // this needs to run before the DataSourceUsageAnalyzer
extractors.add(dataSourceAnalyzer); //this needs to run after ExtractRegistry and ExtractOs
extractors.add(zoneInfo); // this needs to run after the web browser modules
+ extractors.add(recycleBin); // this needs to run after ExtractRegistry and ExtractOS
browserExtractors.add(chrome);
browserExtractors.add(firefox);
diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ShellBagParser.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ShellBagParser.java
new file mode 100755
index 0000000000..a7de5ebf5c
--- /dev/null
+++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ShellBagParser.java
@@ -0,0 +1,362 @@
+/*
+ * Autopsy Forensic Browser
+ *
+ * Copyright 2019 Basis Technology Corp.
+ *
+ * Copyright 2012 42six Solutions.
+ * Contact: aebadirad 42six com
+ * Project Contact/Architect: carrier sleuthkit org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.autopsy.recentactivity;
+
+import java.io.BufferedReader;
+import java.io.FileReader;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Locale;
+import java.util.logging.Level;
+import org.sleuthkit.autopsy.coreutils.Logger;
+
+/**
+ * Parse the ntuser and ursclass regripper output files for shellbags.
+ */
+public class ShellBagParser {
+ private static final Logger logger = Logger.getLogger(ShellBagParser.class.getName());
+
+ static SimpleDateFormat DATE_TIME_FORMATTER = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.getDefault());
+ // Last Write date\time format from itempos plugin
+ static SimpleDateFormat DATE_TIME_FORMATTER2 = new SimpleDateFormat("EEE MMM dd HH:mm:ss yyyyy", Locale.getDefault());
+
+ private ShellBagParser() {
+ }
+
+ /**
+ * Parse the given file for shell bags.
+ *
+ * @param regFilePath Regripper output file
+ *
+ * @return List of the found shellbags
+ *
+ * @throws FileNotFoundException
+ * @throws IOException
+ */
+ static List parseShellbagOutput(String regFilePath) throws FileNotFoundException, IOException {
+ List shellbags = new ArrayList<>();
+ File regfile = new File(regFilePath);
+
+ ShellBagParser sbparser = new ShellBagParser();
+
+ try (BufferedReader reader = new BufferedReader(new FileReader(regfile))) {
+ String line = reader.readLine();
+ while (line != null) {
+ line = line.trim();
+
+ if (line.matches("^shellbags_xp v.*")) {
+ shellbags.addAll(sbparser.parseShellBagsXP(reader));
+ } else if (line.matches("^shellbags v.*")) {
+ shellbags.addAll(sbparser.parseShellBags(reader));
+ } else if (line.matches("^itempos.*")) {
+ shellbags.addAll(sbparser.parseItempos(reader));
+ }
+
+ line = reader.readLine();
+ }
+ }
+
+ return shellbags;
+ }
+
+ /**
+ * Parse the output from the shellbag_xp plugin.
+ *
+ * @param reader File reader
+ *
+ * @return List of found shellbags
+ *
+ * @throws IOException
+ */
+ List parseShellBagsXP(BufferedReader reader) throws IOException {
+ List shellbags = new ArrayList<>();
+ String line = reader.readLine();
+
+ while (line != null && !isSectionSeparator(line)) {
+
+ if (isShellbagXPDataLine(line)) {
+ String[] tokens = line.split("\\|");
+ if (tokens.length >= 6) {
+ shellbags.add(new ShellBag(tokens[5].trim(), "Software\\Microsoft\\Windows\\ShellNoRoam\\BagMRU", tokens[0].trim(), tokens[1].trim(), tokens[2].trim(), tokens[3].trim()));
+ }
+ }
+
+ line = reader.readLine();
+ }
+
+ return shellbags;
+ }
+
+ /**
+ * Parse the output of the shellbags regripper plugin.
+ *
+ * @param reader
+ * @return List of found shellbags
+ *
+ * @throws IOException
+ */
+ List parseShellBags(BufferedReader reader) throws IOException {
+ List shellbags = new ArrayList<>();
+ String line = reader.readLine();
+ String regPath = "Local Settings\\Software\\Microsoft\\Windows\\Shell\\BagMRU";
+
+ while (line != null && !isSectionSeparator(line)) {
+
+ if (isShellbagDataLine(line)) {
+ String[] tokens = line.split("\\|");
+ String path = tokens[6].replaceAll("\\[.*?\\]", "").trim();
+ int index = line.lastIndexOf('[');
+ String endstuff = "";
+ if (index != -1) {
+ endstuff = line.substring(index, line.length() - 1).replace("[Desktop", "");
+ }
+ if (tokens.length >= 7) {
+ shellbags.add(new ShellBag(path, regPath + endstuff, tokens[0].trim(), tokens[1].trim(), tokens[2].trim(), tokens[3].trim()));
+ }
+ }
+
+ line = reader.readLine();
+ }
+
+ return shellbags;
+ }
+
+ /**
+ * Parse the output of the Itempos regripper plugin.
+ *
+ * @param reader
+ *
+ * @return List of found shell bags.
+ *
+ * @throws IOException
+ */
+ List parseItempos(BufferedReader reader) throws IOException {
+ List shellbags = new ArrayList<>();
+ String bagpath = "";
+ String lastWrite = "";
+ String line = reader.readLine();
+
+ while (line != null && !isSectionSeparator(line)) {
+
+ if (isItemposDataLine(line)) {
+ String[] tokens = line.split("\\|");
+ if (tokens.length >= 5) {
+ shellbags.add(new ShellBag(tokens[4].trim(), bagpath, lastWrite, tokens[1].trim(), tokens[2].trim(), tokens[3].trim()));
+ }
+ } else if (line.contains("Software\\")) {
+ bagpath = line.trim();
+ lastWrite = "";
+ } else if (line.contains("LastWrite:")) {
+ lastWrite = line.replace("LastWrite:", "").trim();
+ }
+
+ line = reader.readLine();
+ }
+
+ return shellbags;
+ }
+
+ /**
+ * Return whether or not the given line is a plugin output separator.
+ *
+ * The format of the plugin output separators is:
+ * ----------------------------------------
+ *
+ * @param line
+ *
+ * @return True if the line is a section separator
+ */
+ boolean isSectionSeparator(String line) {
+ if (line == null || line.isEmpty()) {
+ return false;
+ }
+
+ return line.trim().matches("^-+");
+ }
+
+ /**
+ * This data rows from the itempos plugin are in the format:
+ * | | | |
+ * The times are in the format YYYY-MM-dd HH:mm:ss
+ *
+ * @param line
+ *
+ * @return
+ */
+ boolean isItemposDataLine(String line) {
+ return line.matches("^\\d*?\\s*?\\|.*?\\|.*?\\|.*?\\|.*?");
+ }
+
+ /**
+ * The data rows from the shellbags_xp plug look like
+ * | | | |
+ * |
+ *
+ * The times are in the format YYYY-MM-dd HH:mm:ss
+ *
+ * @param line
+ *
+ * @return
+ */
+ boolean isShellbagXPDataLine(String line) {
+ return line.matches("^(\\d+?.*?\\s*? | \\s*?)\\|.*?\\|.*?\\|.*?\\|.*?\\|.*?");
+ }
+
+ /**
+ * The data rows from the shellbags plug look like
+ * | | | |
+ * |
+ *
+ * The times are in the format YYYY-MM-dd HH:mm:ss
+ *
+ * @param line
+ *
+ * @return
+ */
+ boolean isShellbagDataLine(String line) {
+ return line.matches("^(\\d+?.*?\\s*? | \\s*?)\\|.*?\\|.*?\\|.*?\\|.*?\\|.*?\\|.*?");
+ }
+
+ /**
+ * Class to hold the shell bag data.
+ *
+ */
+ class ShellBag {
+
+ private final String resource;
+ private final String key;
+ private final String lastWrite;
+ private final String modified;
+ private final String accessed;
+ private final String created;
+
+ /**
+ * Creates a new shell bag object.
+ *
+ * Any of the parameters can be "";
+ *
+ * @param resource String from the "Resource" or "Name" column, depending on the plug in
+ * @param key String registry key value
+ * @param lastWrite Depending on the plug in lastWrite is either Last write value or the MRU Time value
+ * @param modified Modified time string
+ * @param accessed Accessed time string
+ * @param created Created time string
+ */
+ ShellBag(String resource, String key, String lastWrite, String modified, String accessed, String created) {
+ this.resource = resource;
+ this.key = key;
+ this.lastWrite = lastWrite;
+ this.accessed = accessed;
+ this.modified = modified;
+ this.created = created;
+ }
+
+ /**
+ * Returns the resource string.
+ *
+ * @return The shellbag resource or empty string.
+ */
+ String getResource() {
+ return resource == null ? "" : resource;
+ }
+
+ /**
+ * Returns the key string.
+ *
+ * @return The shellbag key or empty string.
+ */
+ String getKey() {
+ return key == null ? "" : key;
+ }
+
+ /**
+ * Returns the last time in seconds since java epoch or
+ * 0 if no valid time was found.
+ *
+ * @return The time in seconds or 0 if no valid time.
+ */
+ long getLastWrite() {
+ return parseDateTime(lastWrite);
+ }
+
+ /**
+ * Returns the last time in seconds since java epoch or
+ * 0 if no valid time was found.
+ *
+ * @return The time in seconds or 0 if no valid time.
+ */
+ long getModified() {
+ return parseDateTime(modified);
+ }
+
+ /**
+ * Returns the last time in seconds since java epoch or
+ * 0 if no valid time was found.
+ *
+ * @return The time in seconds or 0 if no valid time.
+ */
+ long getAccessed() {
+ return parseDateTime(accessed);
+ }
+
+ /**
+ * Returns the last time in seconds since java epoch or
+ * 0 if no valid time was found.
+ *
+ * @return The time in seconds or 0 if no valid time.
+ */
+ long getCreated() {
+ return parseDateTime(created);
+ }
+
+ /**
+ * Returns the date\time in seconds from epoch for the given string with
+ * format yyyy-MM-dd HH:mm:ss;
+ *
+ * @param dateTimeString String of format yyyy-MM-dd HH:mm:ss
+ *
+ * @return time in seconds from java epoch
+ */
+ long parseDateTime(String dateTimeString) {
+ if (!dateTimeString.isEmpty()) {
+ try {
+ return DATE_TIME_FORMATTER.parse(dateTimeString).getTime() / 1000;
+ } catch (ParseException ex) {
+ // The parse of the string may fail because there are two possible formats.
+ }
+
+ try {
+ return DATE_TIME_FORMATTER2.parse(dateTimeString).getTime() / 1000;
+ } catch (ParseException ex) {
+ logger.log(Level.WARNING, String.format("ShellBag parse failure. %s is not formated as expected.", dateTimeString), ex);
+ }
+ }
+ return 0;
+ }
+ }
+
+}
diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Util.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Util.java
index a6c6416416..ff95c60ca8 100644
--- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Util.java
+++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Util.java
@@ -51,6 +51,12 @@ import org.sleuthkit.datamodel.TskCoreException;
class Util {
private static Logger logger = Logger.getLogger(Util.class.getName());
+
+ /** Difference between Filetime epoch and Unix epoch (in ms). */
+ private static final long FILETIME_EPOCH_DIFF = 11644473600000L;
+
+ /** One millisecond expressed in units of 100s of nanoseconds. */
+ private static final long FILETIME_ONE_MILLISECOND = 10 * 1000;
private Util() {
}
@@ -176,4 +182,16 @@ class Util {
}
return results;
}
+
+ /**
+ * Converts a windows FILETIME to java-unix epoch milliseconds
+ *
+ * @param filetime 100 nanosecond intervals from jan 1, 1601
+ *
+ * @return java-unix epoch milliseconds
+ */
+ static long filetimeToMillis(final long filetime) {
+ return (filetime / FILETIME_ONE_MILLISECOND) - FILETIME_EPOCH_DIFF;
+ }
+
}
diff --git a/docs/doxygen-user/images/photorec_output.PNG b/docs/doxygen-user/images/photorec_output.PNG
index 168e8b0272..566e6a4652 100644
Binary files a/docs/doxygen-user/images/photorec_output.PNG and b/docs/doxygen-user/images/photorec_output.PNG differ
diff --git a/docs/doxygen-user/images/portable_case_folder.png b/docs/doxygen-user/images/portable_case_folder.png
index a02bd4e087..1e3a5b98df 100644
Binary files a/docs/doxygen-user/images/portable_case_folder.png and b/docs/doxygen-user/images/portable_case_folder.png differ
diff --git a/docs/doxygen-user/portable_case.dox b/docs/doxygen-user/portable_case.dox
index b476206a38..34dcc7bf57 100644
--- a/docs/doxygen-user/portable_case.dox
+++ b/docs/doxygen-user/portable_case.dox
@@ -2,7 +2,7 @@
\section portable_case_overview Overview
-A portable case is a partial copy of a normal Autopsy case that can be opened from anywhere. It contains a subset of the data from its original case and has been designed to make it easy to share relevant data with other examiners.
+A portable case is a partial copy of a normal Autopsy case that can be opened from anywhere. It contains a subset of the data from its original case and has been designed to make it easy to share relevant data with other examiners. Portable cases will contain a \ref report_case_uco report detailing the contents of the portable case.
The general use case is as follows:
@@ -29,7 +29,7 @@ A portable case can contain tagged files and results and data from the Interesti
\image html portable_case_interesting_items.png
-You can tag any additional files you want to include in the portable case. See the \ref tagging_page page for details on how to create tags. You can see what tags you've added in the \ref tree_viewer_page.
+You can tag any additional files you want to include in the portable case. See the \ref tagging_page page for details on how to create tags. Note that the outlines from \ref image_tagging "image tags" will also be visible in the portable case. You can see what tags you've added in the \ref tree_viewer_page.
\image html portable_case_tags.png
@@ -39,7 +39,7 @@ The portable case will be placed in the "Reports" folder in the current case.
\image html portable_case_report_panel.png
-Here you can see an unpackaged portable case. It will be named with the original case name plus "(Portable)". The portable case is initially missing many of the normal Autopsy folders - these will be created the first time a user opens it.
+Here you can see an unpackaged portable case. It will be named with the original case name plus "(Portable)". The portable case is initially missing many of the normal Autopsy folders - these will be created the first time a user opens it. It will however start with a "Reports" folder that contains an automatically generated \ref report_case_uco report.
\image html portable_case_folder.png
diff --git a/thirdparty/rr-full/plugins/shellbags.pl b/thirdparty/rr-full/plugins/shellbags.pl
index 5b8587af38..d8e770471e 100644
--- a/thirdparty/rr-full/plugins/shellbags.pl
+++ b/thirdparty/rr-full/plugins/shellbags.pl
@@ -3,6 +3,10 @@
# RR plugin to parse (Vista, Win7/Win2008R2) shell bags
#
# History:
+# 20190715 - updated to parse WPD devices better
+# 20180702 - update to parseGUID function
+# 20180117 - modification thanks to input/data from Mike Godfrey
+# 20160706 - update
# 20150325 - updated parsing based on input from Eric Zimmerman
# 20140728 - updated shell item 0x01 parsing
# 20131216 - updated to support shell item type 0x52
@@ -31,7 +35,7 @@
# Moore for writing the shell bag parser for Registry Decoder, as well as
# assistance with some parsing.
#
-# License: GPL v3
+#
# copyright 2015 Quantum Analytics Research, LLC
# Author: H. Carvey, keydet89@yahoo.com
#-----------------------------------------------------------
@@ -47,12 +51,12 @@ my %config = (hive => "USRCLASS\.DAT",
hasShortDescr => 1,
hasDescr => 0,
hasRefs => 0,
- version => 20150325);
+ version => 20190715);
sub getConfig{return %config}
sub getShortDescr {
- return "Shell/BagMRU traversal in Win7 USRCLASS.DAT hives";
+ return "Shell/BagMRU traversal in Win7+ USRCLASS\.DAT hives";
}
sub getDescr{}
sub getRefs {}
@@ -90,6 +94,7 @@ my %cp_guids = ("{bb64f8a7-bee7-4e1a-ab8d-7d8273f7fdb6}" => "Action Center",
"{a3dd4f92-658a-410f-84fd-6fbbbef2fffe}" => "Internet Options",
"{a304259d-52b8-4526-8b1a-a1d6cecc8243}" => "iSCSI Initiator",
"{725be8f7-668e-4c7b-8f90-46bdb0936430}" => "Keyboard",
+ "{bf782cc9-5a52-4a17-806c-2a894ffeeac5}" => "Language Settings",
"{e9950154-c418-419e-a90a-20c5287ae24b}" => "Location and Other Sensors",
"{1fa9085f-25a2-489b-85d4-86326eedcd87}" => "Manage Wireless Networks",
"{6c8eec18-8d75-41b2-a177-8831d59d2d50}" => "Mouse",
@@ -178,8 +183,8 @@ sub pluginmain {
my $class = shift;
my $hive = shift;
::logMsg("Launching shellbags v.".$VERSION);
- ::rptMsg("shellbags v.".$VERSION); # banner
- ::rptMsg("(".getHive().") ".getShortDescr()."\n"); # banner
+ ::rptMsg("shellbags v.".$VERSION);
+ ::rptMsg("(".getHive().") ".getShortDescr()."\n");
my %item = ();
my $reg = Parse::Win32Registry->new($hive);
@@ -226,6 +231,13 @@ sub traverse {
my $type = unpack("C",substr($values{$v},2,1));
+# DEBUG ------------------------------------------------
+# ::rptMsg($key->get_path()."\\".$v);
+# ::rptMsg(sprintf "Type = 0x%x",$type);
+# probe($values{$v});
+# ::rptMsg("");
+# DEBUG ------------------------------------------------
+
# Need to first check to see if the parent of the item was a zip folder
# and if the 'zipsubfolder' value is set to 1
if (exists ${$parent}{zipsubfolder} && ${$parent}{zipsubfolder} == 1) {
@@ -245,6 +257,9 @@ sub traverse {
# System Folder
%item = parseSystemFolderEntry($values{$v});
}
+ elsif ($type == 0x2a) {
+ $item{name} = substr($values{$v},0x3,3);
+ }
elsif ($type == 0x2e) {
# Device
%item = parseDeviceEntry($values{$v});
@@ -374,11 +389,10 @@ sub parseVariableEntry {
# Ref: http://msdn.microsoft.com/en-us/library/aa965725(v=vs.85).aspx
my $stuff = $segs{"{b725f130-47ef-101a-a5f1-02608c9eebac}"};
- my $tag = 1;
+ my $t = 1;
my $cnt = 0x10;
- while($tag) {
+ while($t) {
my $sz = unpack("V",substr($stuff,$cnt,4));
- return %item unless (defined $sz);
my $id = unpack("V",substr($stuff,$cnt + 4,4));
#--------------------------------------------------------------
# sub-segment types
@@ -388,14 +402,14 @@ sub parseVariableEntry {
# 0x0c - size
#--------------------------------------------------------------
if ($sz == 0x00) {
- $tag = 0;
+ $t = 0;
next;
}
elsif ($id == 0x0a) {
my $num = unpack("V",substr($stuff,$cnt + 13,4));
my $str = substr($stuff,$cnt + 13 + 4,($num * 2));
- $str =~ s/\x00//g;
+ $str =~ s/\00//g;
$item{name} = $str;
}
$cnt += $sz;
@@ -406,24 +420,41 @@ sub parseVariableEntry {
elsif (substr($data,4,4) eq "AugM") {
%item = parseFolderEntry($data);
}
+# Code for Windows Portable Devices
+# Added 20190715
+ elsif (parseGUID(substr($data,42,16)) eq "{27e2e392-a111-48e0-ab0c-e17705a05f85}") {
+ my ($n0, $n1, $n2) = unpack("VVV",substr($data,62,12));
+
+ my $n0_name = substr($data,0x4A,($n0 * 2));
+ $n0_name =~ s/\00//g;
+
+ my $n1_name = substr($data,(0x4A + ($n0 * 2)),($n1 * 2));
+ $n1_name =~ s/\00//g;
+
+ if ($n0_name eq "") {
+ $item{name} = $n1_name;
+ }
+ else {
+ $item{name} = $n0_name;
+ }
+ }
# Following two entries are for Device Property data
elsif ($tag == 0x7b || $tag == 0xbb || $tag == 0xfb) {
my ($sz1,$sz2,$sz3) = unpack("VVV",substr($data,0x3e,12));
$item{name} = substr($data,0x4a,$sz1 * 2);
- $item{name} =~ s/\x00//g;
+ $item{name} =~ s/\00//g;
}
elsif ($tag == 0x02 || $tag == 0x03) {
my ($sz1,$sz2,$sz3,$sz4) = unpack("VVVV",substr($data,0x26,16));
$item{name} = substr($data,0x36,$sz1 * 2);
- $item{name} =~ s/\x00//g;
+ $item{name} =~ s/\00//g;
}
elsif (unpack("v",substr($data,6,2)) == 0x05) {
my $o = 0x26;
my $t = 1;
while ($t) {
my $i = substr($data,$o,1);
- return %item unless (defined $i);
- if ($i =~ m/\x00/) {
+ if ($i =~ m/\00/) {
$t = 0;
}
else {
@@ -447,7 +478,7 @@ sub parseNetworkEntry {
my %item = ();
$item{type} = unpack("C",substr($data,2,1));
- my @n = split(/\x00/,substr($data,4,length($data) - 4));
+ my @n = split(/\00/,substr($data,4,length($data) - 4));
$item{name} = $n[0];
return %item;
}
@@ -464,13 +495,13 @@ sub parseZipSubFolderItem {
# Get the opened/accessed date/time
$item{datetime} = substr($data,0x24,6);
- $item{datetime} =~ s/\x00//g;
+ $item{datetime} =~ s/\00//g;
if ($item{datetime} eq "N/A") {
}
else {
$item{datetime} = substr($data,0x24,40);
- $item{datetime} =~ s/\x00//g;
+ $item{datetime} =~ s/\00//g;
my ($date,$time) = split(/\s+/,$item{datetime},2);
my ($mon,$day,$yr) = split(/\//,$date,3);
my ($hr,$min,$sec) = split(/:/,$time,3);
@@ -483,9 +514,9 @@ sub parseZipSubFolderItem {
my $sz2 = unpack("V",substr($data,0x58,4));
my $str1 = substr($data,0x5C,$sz *2) if ($sz > 0);
- $str1 =~ s/\x00//g;
+ $str1 =~ s/\00//g;
my $str2 = substr($data,0x5C + ($sz * 2),$sz2 *2) if ($sz2 > 0);
- $str2 =~ s/\x00//g;
+ $str2 =~ s/\00//g;
if ($sz2 > 0) {
$item{name} = $str1."\\".$str2;
@@ -548,10 +579,10 @@ sub parseURIEntry {
my $sz = unpack("V",substr($data,0x2a,4));
my $uri = substr($data,0x2e,$sz);
- $uri =~ s/\x00//g;
+ $uri =~ s/\00//g;
my $proto = substr($data,length($data) - 6, 6);
- $proto =~ s/\x00//g;
+ $proto =~ s/\00//g;
$item{name} = $proto."://".$uri." [".gmtime($item{uritime})."]";
@@ -601,8 +632,8 @@ sub parseGUID {
my $d3 = unpack("v",substr($data,6,2));
my $d4 = unpack("H*",substr($data,8,2));
my $d5 = unpack("H*",substr($data,10,6));
- my $guid = sprintf "{%08x-%x-%x-$d4-$d5}",$d1,$d2,$d3;
-
+ my $guid = sprintf "{%08x-%04x-%04x-$d4-$d5}",$d1,$d2,$d3;
+
if (exists $cp_guids{$guid}) {
return "CLSID_".$cp_guids{$guid};
}
@@ -625,6 +656,10 @@ sub parseDeviceEntry {
my $ofs = unpack("v",substr($data,4,2));
my $tag = unpack("V",substr($data,6,4));
+#-----------------------------------------------------
+# DEBUG
+# ::rptMsg("parseDeviceEntry, tag = ".$tag);
+#-----------------------------------------------------
if ($tag == 0) {
my $guid1 = parseGUID(substr($data,$ofs + 6,16));
my $guid2 = parseGUID(substr($data,$ofs + 6 + 16,16));
@@ -632,13 +667,17 @@ sub parseDeviceEntry {
}
elsif ($tag == 2) {
$item{name} = substr($data,0x0a,($ofs + 6) - 0x0a);
- $item{name} =~ s/\x00//g;
+ $item{name} =~ s/\00//g;
}
else {
my $ver = unpack("C",substr($data,9,1));
-
+ my $idx = unpack("C",substr($data,3,1));
+
+ if ($idx == 0x80) {
+ $item{name} = parseGUID(substr($data,4,16));
+ }
# Version 3 = XP
- if ($ver == 3) {
+ elsif ($ver == 3) {
my $guid1 = parseGUID(substr($data,$ofs + 6,16));
my $guid2 = parseGUID(substr($data,$ofs + 6 + 16,16));
$item{name} = $guid1."\\".$guid2
@@ -649,14 +688,11 @@ sub parseDeviceEntry {
my $userlen = unpack("V",substr($data,30,4));
my $devlen = unpack("V",substr($data,34,4));
my $user = substr($data,0x28,$userlen * 2);
- $user =~ s/\x00//g;
+ $user =~ s/\00//g;
my $dev = substr($data,0x28 + ($userlen * 2),$devlen * 2);
- $dev =~ s/\x00//g;
+ $dev =~ s/\00//g;
$item{name} = $user;
- }
- elsif (unpack("C",substr($data,3,1)) == 0x80) {
- $item{name} = parseGUID(substr($data,4,16));
- }
+ }
# Version unknown
else {
$item{name} = "Device Entry - Unknown Version";
@@ -697,7 +733,7 @@ sub parseControlPanelEntry {
#
#-----------------------------------------------------------
sub parseFolderEntry {
- my $data = shift;
+ my $data = shift;
my %item = ();
$item{type} = unpack("C",substr($data,2,1));
@@ -726,85 +762,105 @@ sub parseFolderEntry {
my @m = unpack("vv",substr($data,$ofs_mdate,4));
($item{mtime_str},$item{mtime}) = convertDOSDate($m[0],$m[1]);
-# Need to read in short name; nul-term ASCII
-# $item{shortname} = (split(/\x00/,substr($data,12,length($data) - 12),2))[0];
- $ofs_shortname = $ofs_mdate + 6;
- my $tag = 1;
- my $cnt = 0;
- my $str = "";
- while($tag) {
- my $s = substr($data,$ofs_shortname + $cnt,1);
- return %item unless (defined $s);
- if ($s =~ m/\x00/ && ((($cnt + 1) % 2) == 0)) {
- $tag = 0;
- }
- else {
- $str .= $s;
- $cnt++;
- }
- }
-# $str =~ s/\x00//g;
- my $shortname = $str;
- my $ofs = $ofs_shortname + $cnt + 1;
-# Read progressively, 1 byte at a time, looking for 0xbeef
- $tag = 1;
- $cnt = 0;
- while ($tag) {
- my $s = substr($data,$ofs + $cnt,2);
- return %item unless (defined $s);
- if (unpack("v",$s) == 0xbeef) {
- $tag = 0;
- }
- else {
- $cnt++;
- }
- }
- $item{extver} = unpack("v",substr($data,$ofs + $cnt - 4,2));
-# printf "Version: 0x%x\n",$item{extver};
- $ofs = $ofs + $cnt + 2;
+# DEBUG ------------------------------------------------
+# Added 20160706 based on sample data provided by J. Poling
- @m = unpack("vv",substr($data,$ofs,4));
- ($item{ctime_str},$item{ctime}) = convertDOSDate($m[0],$m[1]);
- $ofs += 4;
- @m = unpack("vv",substr($data,$ofs,4));
- ($item{atime_str},$item{atime}) = convertDOSDate($m[0],$m[1]);
-
- my $jmp;
- if ($item{extver} == 0x03) {
- $jmp = 8;
- }
- elsif ($item{extver} == 0x07) {
- $jmp = 26;
- }
- elsif ($item{extver} == 0x08) {
- $jmp = 30;
- }
- elsif ($item{extver} == 0x09) {
- $jmp = 34;
- }
- else {}
-
- if ($item{type} == 0x31 && $item{extver} >= 0x07) {
- my @n = unpack("Vvv",substr($data,$ofs + 8, 8));
- if ($n[2] != 0) {
- $item{mft_rec_num} = getNum48($n[0],$n[1]);
- $item{mft_seq_num} = $n[2];
-# ::rptMsg("MFT: ".$item{mft_rec_num}."/".$item{mft_seq_num});
-# probe($data);
- }
- }
-
- $ofs += $jmp;
-
- $str = substr($data,$ofs,length($data) - 30);
- my $longname = (split(/\x00\x00/,$str,2))[0];
- $longname =~ s/\x00//g;
-
- if ($longname ne "") {
- $item{name} = $longname;
+ if (length($data) < 0x30) {
+# start at offset 0xE, read in nul-term ASCII string (until "\00" is reached)
+ $ofs_shortname = 0xE;
+ my $tag = 1;
+ my $cnt = 0;
+ my $str = "";
+ while($tag) {
+ my $s = substr($data,$ofs_shortname + $cnt,1);
+ if ($s =~ m/\00/) {
+ $tag = 0;
+ }
+ else {
+ $str .= $s;
+ $cnt++;
+ }
+ }
+ $item{name} = $str;
}
else {
- $item{name} = $shortname;
+# Need to read in short name; nul-term ASCII
+# $item{shortname} = (split(/\00/,substr($data,12,length($data) - 12),2))[0];
+ $ofs_shortname = $ofs_mdate + 6;
+ my $tag = 1;
+ my $cnt = 0;
+ my $str = "";
+ while($tag) {
+ my $s = substr($data,$ofs_shortname + $cnt,1);
+ if ($s =~ m/\00/ && ((($cnt + 1) % 2) == 0)) {
+ $tag = 0;
+ }
+ else {
+ $str .= $s;
+ $cnt++;
+ }
+ }
+# $str =~ s/\00//g;
+ my $shortname = $str;
+ my $ofs = $ofs_shortname + $cnt + 1;
+# Read progressively, 1 byte at a time, looking for 0xbeef
+ my $tag = 1;
+ my $cnt = 0;
+ while ($tag) {
+ if (unpack("v",substr($data,$ofs + $cnt,2)) == 0xbeef) {
+ $tag = 0;
+ }
+ else {
+ $cnt++;
+ }
+ }
+ $item{extver} = unpack("v",substr($data,$ofs + $cnt - 4,2));
+# printf "Version: 0x%x\n",$item{extver};
+ $ofs = $ofs + $cnt + 2;
+
+ my @m = unpack("vv",substr($data,$ofs,4));
+ ($item{ctime_str},$item{ctime}) = convertDOSDate($m[0],$m[1]);
+ $ofs += 4;
+ my @m = unpack("vv",substr($data,$ofs,4));
+ ($item{atime_str},$item{atime}) = convertDOSDate($m[0],$m[1]);
+
+ my $jmp;
+ if ($item{extver} == 0x03) {
+ $jmp = 8;
+ }
+ elsif ($item{extver} == 0x07) {
+ $jmp = 26;
+ }
+ elsif ($item{extver} == 0x08) {
+ $jmp = 30;
+ }
+ elsif ($item{extver} == 0x09) {
+ $jmp = 34;
+ }
+ else {}
+
+ if ($item{type} == 0x31 && $item{extver} >= 0x07) {
+ my @n = unpack("Vvv",substr($data,$ofs + 8, 8));
+ if ($n[2] != 0) {
+ $item{mft_rec_num} = getNum48($n[0],$n[1]);
+ $item{mft_seq_num} = $n[2];
+# ::rptMsg("MFT: ".$item{mft_rec_num}."/".$item{mft_seq_num});
+# probe($data);
+ }
+ }
+
+ $ofs += $jmp;
+
+ my $str = substr($data,$ofs,length($data) - 30);
+ my $longname = (split(/\00\00/,$str,2))[0];
+ $longname =~ s/\00//g;
+
+ if ($longname ne "") {
+ $item{name} = $longname;
+ }
+ else {
+ $item{name} = $shortname;
+ }
}
return %item;
}
@@ -855,9 +911,7 @@ sub parseFolderEntry2 {
my $tag = 1;
while ($tag) {
- my $s = substr($data,$ofs,2);
- return %item unless (defined $s);
- if (unpack("v",$s) == 0xbeef) {
+ if (unpack("v",substr($data,$ofs,2)) == 0xbeef) {
$tag = 0;
}
else {
@@ -894,9 +948,9 @@ sub parseFolderEntry2 {
# }
# ::rptMsg("");
- $item{name} = (split(/\x00\x00/,$str,2))[0];
- $item{name} =~ s/\x13\x20/\x2D\x00/;
- $item{name} =~ s/\x00//g;
+ $item{name} = (split(/\00\00/,$str,2))[0];
+ $item{name} =~ s/\13\20/\2D\00/;
+ $item{name} =~ s/\00//g;
return %item;
}
@@ -907,7 +961,7 @@ sub parseNetworkEntry {
my $data = shift;
my %item = ();
$item{type} = unpack("C",substr($data,2,1));
- my @names = split(/\x00/,substr($data,5,length($data) - 5));
+ my @names = split(/\00/,substr($data,5,length($data) - 5));
$item{name} = $names[0];
return %item;
}
@@ -919,9 +973,9 @@ sub parseDatePathItem {
my $data = shift;
my %item = ();
$item{datestr} = substr($data,0x18,30);
- my ($file,$dir) = split(/\x00\x00/,substr($data,0x44,length($data) - 0x44));
- $file =~ s/\x00//g;
- $dir =~ s/\x00//g;
+ my ($file,$dir) = split(/\00\00/,substr($data,0x44,length($data) - 0x44));
+ $file =~ s/\00//g;
+ $dir =~ s/\00//g;
$item{name} = $dir.$file;
return %item;
}
@@ -958,7 +1012,6 @@ sub shellItem0x52 {
while ($tag) {
$d = substr($data,0x32 + $cnt,2);
- return %item unless (defined $d);
if (unpack("v",$d) == 0) {
$tag = 0;
}
@@ -967,7 +1020,7 @@ sub shellItem0x52 {
$cnt += 2;
}
}
- $item{name} =~ s/\x00//g;
+ $item{name} =~ s/\00//g;
if ($item{subtype} < 3) {
$ofs = 0x32 + $cnt + 2;
@@ -977,7 +1030,7 @@ sub shellItem0x52 {
}
$sz = unpack("V",substr($data,$ofs,4));
$item{str} = substr($data,$ofs + 4,$sz * 2);
- $item{str} =~ s/\x00//g;
+ $item{str} =~ s/\00//g;
return %item;
}
@@ -1058,4 +1111,4 @@ sub getNum48 {
}
}
-1;
+1;
\ No newline at end of file
diff --git a/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxFileIngestModule.java b/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxFileIngestModule.java
index f428b455cf..3f1daa1a1c 100644
--- a/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxFileIngestModule.java
+++ b/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxFileIngestModule.java
@@ -411,7 +411,7 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
* appropriate artifacts and derived files.
*
* @param partialEmailsForThreading
- * @param fileMessageIterator
+ * @param fullMessageIterator
* @param abstractFile
*/
private void processEmails(List partialEmailsForThreading, Iterator fullMessageIterator, AbstractFile abstractFile) {