diff --git a/Core/nbproject/project.properties b/Core/nbproject/project.properties
index a55cb7e02c..9cdbb08f9e 100644
--- a/Core/nbproject/project.properties
+++ b/Core/nbproject/project.properties
@@ -2,7 +2,8 @@ file.reference.jdom-2.0.5-contrib.jar=release/modules/ext/jdom-2.0.5-contrib.jar
file.reference.jdom-2.0.5.jar=release/modules/ext/jdom-2.0.5.jar
file.reference.jython-standalone-2.7.0.jar=release/modules/ext/jython-standalone-2.7.0.jar
file.reference.jython.jar-1=release/modules/ext/jython.jar
-file.reference.metadata-extractor-2.6.2.jar=release/modules/ext/metadata-extractor-2.6.2.jar
+file.reference.metadata-extractor-2.8.1.jar=release/modules/ext/metadata-extractor-2.8.1.jar
+file.reference.opencv-248.jar=release/modules/ext/opencv-248.jar
file.reference.Rejistry-1.0-SNAPSHOT.jar=release/modules/ext/Rejistry-1.0-SNAPSHOT.jar
file.reference.sevenzipjbinding-AllPlatforms.jar=release/modules/ext/sevenzipjbinding-AllPlatforms.jar
file.reference.sevenzipjbinding.jar=release/modules/ext/sevenzipjbinding.jar
@@ -10,12 +11,13 @@ file.reference.sqlite-jdbc-3.8.11.jar=release/modules/ext/sqlite-jdbc-3.8.11.jar
file.reference.StixLib.jar=release/modules/ext/StixLib.jar
file.reference.tika-core-1.2.jar=release/modules/ext/tika-core-1.2.jar
file.reference.Tsk_DataModel.jar=release/modules/ext/Tsk_DataModel.jar
-file.reference.xmpcore.jar=release/modules/ext/xmpcore.jar
+file.reference.xmpcore-5.1.2.jar=release/modules/ext/xmpcore-5.1.2.jar
javac.source=1.8
javac.compilerargs=-Xlint -Xlint:-serial
license.file=../LICENSE-2.0.txt
nbm.homepage=http://www.sleuthkit.org/
nbm.module.author=Brian Carrier
nbm.needs.restart=true
+source.reference.metadata-extractor-2.8.1.jar=release/modules/ext/metadata-extractor-2.8.1-src.zip!/Source/
spec.version.base=10.3
diff --git a/Core/nbproject/project.xml b/Core/nbproject/project.xml
index 440fdaa0bc..c8089bda6c 100644
--- a/Core/nbproject/project.xml
+++ b/Core/nbproject/project.xml
@@ -203,10 +203,26 @@
org.sleuthkit.autopsy.reportorg.sleuthkit.datamodel
+
+ ext/xmpcore-5.1.2.jar
+ release/modules/ext/xmpcore-5.1.2.jar
+ ext/jdom-2.0.5.jarrelease/modules/ext/jdom-2.0.5.jar
+
+ ext/StixLib.jar
+ release/modules/ext/StixLib.jar
+
+
+ ext/sqlite-jdbc-3.8.11.jar
+ release/modules/ext/sqlite-jdbc-3.8.11.jar
+
+
+ ext/opencv-248.jar
+ release/modules/ext/opencv-248.jar
+ ext/Rejistry-1.0-SNAPSHOT.jarrelease/modules/ext/Rejistry-1.0-SNAPSHOT.jar
@@ -219,34 +235,18 @@
ext/jython-standalone-2.7.0.jarrelease/modules/ext/jython-standalone-2.7.0.jar
-
- ext/StixLib.jar
- release/modules/ext/StixLib.jar
-
-
- ext/opencv-248.jar
- release/modules/ext/opencv-248.jar
-
-
- ext/sqlite-jdbc-3.8.11.jar
- release/modules/ext/sqlite-jdbc-3.8.11.jar
- ext/sevenzipjbinding-AllPlatforms.jarrelease/modules/ext/sevenzipjbinding-AllPlatforms.jar
-
- ext/metadata-extractor-2.6.2.jar
- release/modules/ext/metadata-extractor-2.6.2.jar
-
-
- ext/xmpcore.jar
- release/modules/ext/xmpcore.jar
- ext/tika-core-1.2.jarrelease/modules/ext/tika-core-1.2.jar
+
+ ext/metadata-extractor-2.8.1.jar
+ release/modules/ext/metadata-extractor-2.8.1.jar
+ ext/jdom-2.0.5-contrib.jarrelease/modules/ext/jdom-2.0.5-contrib.jar
diff --git a/Core/release/modules/ext/metadata-extractor-2.6.2.jar b/Core/release/modules/ext/metadata-extractor-2.6.2.jar
deleted file mode 100755
index 68426ac059..0000000000
Binary files a/Core/release/modules/ext/metadata-extractor-2.6.2.jar and /dev/null differ
diff --git a/Core/release/modules/ext/metadata-extractor-2.8.1.jar b/Core/release/modules/ext/metadata-extractor-2.8.1.jar
new file mode 100755
index 0000000000..a5fe48640b
Binary files /dev/null and b/Core/release/modules/ext/metadata-extractor-2.8.1.jar differ
diff --git a/Core/release/modules/ext/xmpcore-5.1.2.jar b/Core/release/modules/ext/xmpcore-5.1.2.jar
new file mode 100755
index 0000000000..ecd5db142e
Binary files /dev/null and b/Core/release/modules/ext/xmpcore-5.1.2.jar differ
diff --git a/Core/release/modules/ext/xmpcore.jar b/Core/release/modules/ext/xmpcore.jar
deleted file mode 100755
index 884c2dd57f..0000000000
Binary files a/Core/release/modules/ext/xmpcore.jar and /dev/null differ
diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/FileTypeExtensions.java b/Core/src/org/sleuthkit/autopsy/datamodel/FileTypeExtensions.java
index e9af7ca1d2..963b081b13 100644
--- a/Core/src/org/sleuthkit/autopsy/datamodel/FileTypeExtensions.java
+++ b/Core/src/org/sleuthkit/autopsy/datamodel/FileTypeExtensions.java
@@ -37,7 +37,7 @@ public class FileTypeExtensions {
private final static List TEXT_EXTENSIONS = Arrays.asList(".txt", ".rtf", ".log", ".text", ".xml"); //NON-NLS
private final static List WEB_EXTENSIONS = Arrays.asList(".html", ".htm", ".css", ".js", ".php", ".aspx"); //NON-NLS
private final static List PDF_EXTENSIONS = Arrays.asList(".pdf"); //NON-NLS
- private final static List ARCHIVE_EXTENSIONS = Arrays.asList(".zip", ".rar", ".7zip", ".7z", ".arj", ".tar", ".gzip", ".bzip", ".bzip2", ".cab", ".jar", ".cpio", ".ar", ".gz", ".tgz"); //NON-NLS
+ private final static List ARCHIVE_EXTENSIONS = Arrays.asList(".zip", ".rar", ".7zip", ".7z", ".arj", ".tar", ".gzip", ".bzip", ".bzip2", ".cab", ".jar", ".cpio", ".ar", ".gz", ".tgz", ".bz2"); //NON-NLS
public static List getImageExtensions() {
return IMAGE_EXTENSIONS;
diff --git a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/Bundle.properties b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/Bundle.properties
index c8904ee1aa..c91965d14b 100644
--- a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/Bundle.properties
+++ b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/Bundle.properties
@@ -36,4 +36,6 @@ EmbeddedFileExtractorIngestModule.ImageExtractor.pptxContainer.init.err=Pptx con
EmbeddedFileExtractorIngestModule.ImageExtractor.xlsContainer.init.err=Xls container could not be initialized while reading: {0}
EmbeddedFileExtractorIngestModule.ImageExtractor.xlsxContainer.init.err=Xlsx container could not be initialized while reading: {0}
EmbeddedFileExtractorIngestModule.ImageExtractor.extractImage.addToDB.exception.msg=Unable to add the derived files to the database.
-EmbeddedFileExtractorIngestModule.ImageExtractor.getOutputFolderPath.exception.msg=Could not get path for image extraction from Abstract File: {0}
\ No newline at end of file
+EmbeddedFileExtractorIngestModule.ImageExtractor.getOutputFolderPath.exception.msg=Could not get path for image extraction from Abstract File: {0}
+EmbeddedFileExtractorIngestModule.ImageExtractor.getOutputFolderPath.exception.msg=Could not get path for image extraction from Abstract File: {0}
+EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackStream.write.noSpace.msg=Unable to write content to disk. Not enough space.
\ No newline at end of file
diff --git a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java
index 11ba04612e..b0934c1216 100755
--- a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java
+++ b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java
@@ -24,6 +24,8 @@ import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
+import java.nio.file.Files;
+import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
@@ -40,7 +42,6 @@ import net.sf.sevenzipjbinding.simple.ISimpleInArchive;
import net.sf.sevenzipjbinding.simple.ISimpleInArchiveItem;
import org.netbeans.api.progress.ProgressHandle;
import org.netbeans.api.progress.ProgressHandleFactory;
-import org.openide.util.Exceptions;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.services.FileManager;
@@ -98,7 +99,8 @@ class SevenZipExtractor {
GZIP("application/gzip"),
XGZIP("application/x-gzip"),
XBZIP2("application/x-bzip2"),
- XTAR("application/x-tar");
+ XTAR("application/x-tar"),
+ XGTAR("application/x-gtar");
private final String mimeType;
@@ -121,9 +123,9 @@ class SevenZipExtractor {
logger.log(Level.INFO, "7-Zip-JBinding library was initialized on supported platform: {0}", platform); //NON-NLS
} catch (SevenZipNativeInitializationException e) {
logger.log(Level.SEVERE, "Error initializing 7-Zip-JBinding library", e); //NON-NLS
- String msg = NbBundle.getMessage(this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.init.errInitModule.msg",
+ String msg = NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.init.errInitModule.msg",
EmbeddedFileExtractorModuleFactory.getModuleName());
- String details = NbBundle.getMessage(this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.init.errCantInitLib",
+ String details = NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.init.errCantInitLib",
e.getMessage());
services.postMessage(IngestMessage.createErrorMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details));
throw new IngestModuleException(e.getMessage());
@@ -204,7 +206,7 @@ class SevenZipExtractor {
if (cRatio >= MAX_COMPRESSION_RATIO) {
String itemName = archiveFileItem.getPath();
logger.log(Level.INFO, "Possible zip bomb detected, compression ration: {0} for in archive item: {1}", new Object[]{cRatio, itemName}); //NON-NLS
- String msg = NbBundle.getMessage(this.getClass(),
+ String msg = NbBundle.getMessage(SevenZipExtractor.class,
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnMsg", archiveFile.getName(), itemName);
String path;
try {
@@ -212,7 +214,7 @@ class SevenZipExtractor {
} catch (TskCoreException ex) {
path = archiveFile.getParentPath() + archiveFile.getName();
}
- String details = NbBundle.getMessage(this.getClass(),
+ String details = NbBundle.getMessage(SevenZipExtractor.class,
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnDetails", cRatio, path);
//MessageNotifyUtil.Notify.error(msg, details);
services.postMessage(IngestMessage.createWarningMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details));
@@ -311,9 +313,9 @@ class SevenZipExtractor {
if (parentAr == null) {
parentAr = archiveDepthCountTree.addArchive(null, archiveId);
} else if (parentAr.getDepth() == MAX_DEPTH) {
- String msg = NbBundle.getMessage(this.getClass(),
+ String msg = NbBundle.getMessage(SevenZipExtractor.class,
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.warnMsg.zipBomb", archiveFile.getName());
- String details = NbBundle.getMessage(this.getClass(),
+ String details = NbBundle.getMessage(SevenZipExtractor.class,
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.warnDetails.zipBomb",
parentAr.getDepth(), archiveFilePath);
//MessageNotifyUtil.Notify.error(msg, details);
@@ -328,7 +330,7 @@ class SevenZipExtractor {
SevenZipContentReadStream stream = null;
final ProgressHandle progress = ProgressHandleFactory.createHandle(
- NbBundle.getMessage(this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.moduleName"));
+ NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.moduleName"));
int processedItems = 0;
boolean progressStarted = false;
@@ -400,7 +402,7 @@ class SevenZipExtractor {
pathInArchive = "/" + useName;
}
- String msg = NbBundle.getMessage(this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.unknownPath.msg",
+ String msg = NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.unknownPath.msg",
archiveFilePath, pathInArchive);
logger.log(Level.WARNING, msg);
@@ -432,24 +434,19 @@ class SevenZipExtractor {
fullEncryption = false;
}
- final Long size = item.getSize();
- if (size == null) {
- // If the size property cannot be determined, out-of-disk-space
- // situations cannot be ascertained.
- // Hence skip this file.
- logger.log(Level.WARNING, "Size cannot be determined. Skipping file in archive: {0}", pathInArchive); //NON-NLS
- continue;
- }
+ // NOTE: item.getSize() may return null in case of certain
+ // archiving formats. Eg: BZ2
+ Long size = item.getSize();
//check if unpacking this file will result in out of disk space
//this is additional to zip bomb prevention mechanism
- if (freeDiskSpace != IngestMonitor.DISK_FREE_SPACE_UNKNOWN && size > 0) { //if known free space and file not empty
+ if (freeDiskSpace != IngestMonitor.DISK_FREE_SPACE_UNKNOWN && size != null && size > 0) { //if free space is known and file is not empty.
long newDiskSpace = freeDiskSpace - size;
if (newDiskSpace < MIN_FREE_DISK_SPACE) {
- String msg = NbBundle.getMessage(this.getClass(),
+ String msg = NbBundle.getMessage(SevenZipExtractor.class,
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.notEnoughDiskSpace.msg",
archiveFilePath, fileName);
- String details = NbBundle.getMessage(this.getClass(),
+ String details = NbBundle.getMessage(SevenZipExtractor.class,
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.notEnoughDiskSpace.details");
//MessageNotifyUtil.Notify.error(msg, details);
services.postMessage(IngestMessage.createErrorMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details));
@@ -501,21 +498,31 @@ class SevenZipExtractor {
final long modtime = writeTime == null ? 0L : writeTime.getTime() / 1000;
final long accesstime = accessTime == null ? 0L : accessTime.getTime() / 1000;
- //record derived data in unode, to be traversed later after unpacking the archive
- unpackedNode.addDerivedInfo(size, !isDir,
- 0L, createtime, accesstime, modtime, localRelPath);
-
//unpack locally if a file
+ SevenZipExtractor.UnpackStream unpackStream = null;
if (!isDir) {
- SevenZipExtractor.UnpackStream unpackStream = null;
try {
- unpackStream = new SevenZipExtractor.UnpackStream(localAbsPath);
+ unpackStream = new SevenZipExtractor.UnpackStream(localAbsPath, freeDiskSpace, size == null);
item.extractSlow(unpackStream);
} catch (Exception e) {
//could be something unexpected with this file, move on
logger.log(Level.WARNING, "Could not extract file from archive: " + localAbsPath, e); //NON-NLS
} finally {
if (unpackStream != null) {
+ //record derived data in unode, to be traversed later after unpacking the archive
+ if (size != null) {
+ // unpackedNode.bytesWritten will not be set in
+ // this case. Use 'size' which has been set
+ // previously.
+ unpackedNode.addDerivedInfo(size, !isDir,
+ 0L, createtime, accesstime, modtime, localRelPath);
+ } else {
+ // since size is unknown, use
+ // unpackStream.getNumberOfBytesWritten() to get
+ // the size.
+ unpackedNode.addDerivedInfo(unpackStream.getNumberOfBytesWritten(), !isDir,
+ 0L, createtime, accesstime, modtime, localRelPath);
+ }
unpackStream.close();
}
}
@@ -549,9 +556,9 @@ class SevenZipExtractor {
// print a message if the file is allocated
if (archiveFile.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.ALLOC)) {
- String msg = NbBundle.getMessage(this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.errUnpacking.msg",
+ String msg = NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.errUnpacking.msg",
archiveFile.getName());
- String details = NbBundle.getMessage(this.getClass(),
+ String details = NbBundle.getMessage(SevenZipExtractor.class,
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.errUnpacking.details",
archiveFilePath, ex.getMessage());
services.postMessage(IngestMessage.createErrorMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details));
@@ -590,8 +597,8 @@ class SevenZipExtractor {
logger.log(Level.SEVERE, "Error creating blackboard artifact for encryption detected for file: " + archiveFilePath, ex); //NON-NLS
}
- String msg = NbBundle.getMessage(this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.encrFileDetected.msg");
- String details = NbBundle.getMessage(this.getClass(),
+ String msg = NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.encrFileDetected.msg");
+ String details = NbBundle.getMessage(SevenZipExtractor.class,
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.encrFileDetected.details",
archiveFile.getName(), EmbeddedFileExtractorModuleFactory.getModuleName());
services.postMessage(IngestMessage.createWarningMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details));
@@ -612,8 +619,15 @@ class SevenZipExtractor {
private OutputStream output;
private String localAbsPath;
+ private long freeDiskSpace;
+ private boolean sizeUnknown = false;
+ private boolean outOfSpace = false;
+ private long bytesWritten = 0;
- UnpackStream(String localAbsPath) {
+ UnpackStream(String localAbsPath, long freeDiskSpace, boolean sizeUnknown) {
+ this.sizeUnknown = sizeUnknown;
+ this.freeDiskSpace = freeDiskSpace;
+ this.localAbsPath = localAbsPath;
try {
output = new BufferedOutputStream(new FileOutputStream(localAbsPath));
} catch (FileNotFoundException ex) {
@@ -622,13 +636,38 @@ class SevenZipExtractor {
}
+ public long getNumberOfBytesWritten() {
+ return this.bytesWritten;
+ }
+
@Override
public int write(byte[] bytes) throws SevenZipException {
try {
- output.write(bytes);
+ if (!sizeUnknown) {
+ output.write(bytes);
+ } else {
+ // If the content size is unknown, cautiously write to disk.
+ // Write only if byte array is less than 80% of the current
+ // free disk space.
+ if (freeDiskSpace == IngestMonitor.DISK_FREE_SPACE_UNKNOWN || bytes.length < 0.8 * freeDiskSpace) {
+ output.write(bytes);
+ // NOTE: this method is called multiple times for a
+ // single extractSlow() call. Update bytesWritten and
+ // freeDiskSpace after every write operation.
+ this.bytesWritten += bytes.length;
+ this.freeDiskSpace -= bytes.length;
+ } else {
+ this.outOfSpace = true;
+ logger.log(Level.INFO, NbBundle.getMessage(
+ SevenZipExtractor.class,
+ "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackStream.write.noSpace.msg"));
+ throw new SevenZipException(
+ NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackStream.write.noSpace.msg"));
+ }
+ }
} catch (IOException ex) {
throw new SevenZipException(
- NbBundle.getMessage(this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackStream.write.exception.msg",
+ NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackStream.write.exception.msg",
localAbsPath), ex);
}
return bytes.length;
@@ -639,6 +678,9 @@ class SevenZipExtractor {
try {
output.flush();
output.close();
+ if (this.outOfSpace) {
+ Files.delete(Paths.get(this.localAbsPath));
+ }
} catch (IOException e) {
logger.log(Level.SEVERE, "Error closing unpack stream for file: {0}", localAbsPath); //NON-NLS
}
@@ -774,7 +816,7 @@ class SevenZipExtractor {
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error adding a derived file to db:" + fileName, ex); //NON-NLS
throw new TskCoreException(
- NbBundle.getMessage(this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackedTree.exception.msg",
+ NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackedTree.exception.msg",
fileName), ex);
}
@@ -961,4 +1003,4 @@ class SevenZipExtractor {
}
}
-}
+}
\ No newline at end of file
diff --git a/Core/src/org/sleuthkit/autopsy/modules/exif/ExifParserFileIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/exif/ExifParserFileIngestModule.java
index 7c186e93b4..deb55fa2ab 100644
--- a/Core/src/org/sleuthkit/autopsy/modules/exif/ExifParserFileIngestModule.java
+++ b/Core/src/org/sleuthkit/autopsy/modules/exif/ExifParserFileIngestModule.java
@@ -22,10 +22,21 @@ import com.drew.imaging.ImageMetadataReader;
import com.drew.imaging.ImageProcessingException;
import com.drew.lang.GeoLocation;
import com.drew.lang.Rational;
+import com.drew.metadata.Directory;
import com.drew.metadata.Metadata;
+import com.drew.metadata.MetadataException;
+import com.drew.metadata.exif.makernotes.CanonMakernoteDirectory;
import com.drew.metadata.exif.ExifIFD0Directory;
import com.drew.metadata.exif.ExifSubIFDDirectory;
import com.drew.metadata.exif.GpsDirectory;
+import com.drew.metadata.exif.makernotes.CasioType1MakernoteDirectory;
+import com.drew.metadata.exif.makernotes.FujifilmMakernoteDirectory;
+import com.drew.metadata.exif.makernotes.KodakMakernoteDirectory;
+import com.drew.metadata.exif.makernotes.NikonType2MakernoteDirectory;
+import com.drew.metadata.exif.makernotes.PanasonicMakernoteDirectory;
+import com.drew.metadata.exif.makernotes.PentaxMakernoteDirectory;
+import com.drew.metadata.exif.makernotes.SanyoMakernoteDirectory;
+import com.drew.metadata.exif.makernotes.SonyType1MakernoteDirectory;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
@@ -33,6 +44,8 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashSet;
+import java.util.List;
+import java.util.TimeZone;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.logging.Level;
import org.openide.util.NbBundle;
@@ -47,6 +60,8 @@ import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
+import org.sleuthkit.datamodel.Content;
+import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.ReadContentInputStream;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
@@ -63,10 +78,13 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
private final IngestServices services = IngestServices.getInstance();
private final AtomicInteger filesProcessed = new AtomicInteger(0);
private volatile boolean filesToFire = false;
+ private volatile boolean facesDetected = false;
+ private final List listOfFacesDetectedArtifacts = new ArrayList<>();
private long jobId;
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
private FileTypeDetector fileTypeDetector;
private final HashSet supportedMimeTypes = new HashSet<>();
+ private TimeZone timeZone = null;
ExifParserFileIngestModule() {
supportedMimeTypes.add("audio/x-wav");
@@ -103,9 +121,16 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
// update the tree every 1000 files if we have EXIF data that is not being being displayed
final int filesProcessedValue = filesProcessed.incrementAndGet();
- if ((filesToFire) && (filesProcessedValue % 1000 == 0)) {
- services.fireModuleDataEvent(new ModuleDataEvent(ExifParserModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF));
- filesToFire = false;
+ if ((filesProcessedValue % 1000 == 0)) {
+ if (filesToFire) {
+ services.fireModuleDataEvent(new ModuleDataEvent(ExifParserModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF));
+ filesToFire = false;
+ }
+ if (facesDetected) {
+ services.fireModuleDataEvent(new ModuleDataEvent(ExifParserModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_FACE_DETECTED, listOfFacesDetectedArtifacts));
+ listOfFacesDetectedArtifacts.clear();
+ facesDetected = false;
+ }
}
//skip unsupported
@@ -125,19 +150,32 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
bin = new BufferedInputStream(in);
Collection attributes = new ArrayList<>();
- Metadata metadata = ImageMetadataReader.readMetadata(bin, true);
+ Metadata metadata = ImageMetadataReader.readMetadata(bin);
// Date
- ExifSubIFDDirectory exifDir = metadata.getDirectory(ExifSubIFDDirectory.class);
+ ExifSubIFDDirectory exifDir = metadata.getFirstDirectoryOfType(ExifSubIFDDirectory.class);
if (exifDir != null) {
- Date date = exifDir.getDate(ExifSubIFDDirectory.TAG_DATETIME_ORIGINAL);
+
+ // set the timeZone for the current datasource.
+ if (timeZone == null) {
+ try {
+ Content dataSource = f.getDataSource();
+ if ((dataSource != null) && (dataSource instanceof Image)) {
+ Image image = (Image) dataSource;
+ timeZone = TimeZone.getTimeZone(image.getTimeZone());
+ }
+ } catch (TskCoreException ex) {
+ logger.log(Level.INFO, "Error getting time zones", ex); //NON-NLS
+ }
+ }
+ Date date = exifDir.getDate(ExifSubIFDDirectory.TAG_DATETIME_ORIGINAL, timeZone);
if (date != null) {
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED.getTypeID(), ExifParserModuleFactory.getModuleName(), date.getTime() / 1000));
}
}
// GPS Stuff
- GpsDirectory gpsDir = metadata.getDirectory(GpsDirectory.class);
+ GpsDirectory gpsDir = metadata.getFirstDirectoryOfType(GpsDirectory.class);
if (gpsDir != null) {
GeoLocation loc = gpsDir.getGeoLocation();
if (loc != null) {
@@ -147,14 +185,14 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID(), ExifParserModuleFactory.getModuleName(), longitude));
}
- Rational altitude = gpsDir.getRational(GpsDirectory.TAG_GPS_ALTITUDE);
+ Rational altitude = gpsDir.getRational(GpsDirectory.TAG_ALTITUDE);
if (altitude != null) {
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE.getTypeID(), ExifParserModuleFactory.getModuleName(), altitude.doubleValue()));
}
}
// Device info
- ExifIFD0Directory devDir = metadata.getDirectory(ExifIFD0Directory.class);
+ ExifIFD0Directory devDir = metadata.getFirstDirectoryOfType(ExifIFD0Directory.class);
if (devDir != null) {
String model = devDir.getString(ExifIFD0Directory.TAG_MODEL);
if (model != null && !model.isEmpty()) {
@@ -167,6 +205,11 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
}
}
+ if (containsFace(metadata)) {
+ listOfFacesDetectedArtifacts.add(f.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_FACE_DETECTED));
+ facesDetected = true;
+ }
+
// Add the attributes, if there are any, to a new artifact
if (!attributes.isEmpty()) {
BlackboardArtifact bba = f.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF);
@@ -199,6 +242,121 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
}
}
+ /**
+ * Checks if this metadata contains any tags related to facial information.
+ * NOTE: Cases with this metadata containing tags like enabled red-eye
+ * reduction settings, portrait settings, etc are also assumed to contain
+ * facial information. The method returns true. The return value of this
+ * method does NOT guarantee actual presence of face.
+ *
+ * @param metadata the metadata which needs to be parsed for possible facial
+ * information.
+ *
+ * @return returns true if the metadata contains any tags related to facial
+ * information.
+ */
+ private boolean containsFace(Metadata metadata) {
+ Directory d = metadata.getFirstDirectoryOfType(CanonMakernoteDirectory.class);
+ if (d != null) {
+ if (d.containsTag(CanonMakernoteDirectory.TAG_FACE_DETECT_ARRAY_1)
+ && d.getString(CanonMakernoteDirectory.TAG_FACE_DETECT_ARRAY_1) != null) {
+ return true;
+ }
+ if (d.containsTag(CanonMakernoteDirectory.TAG_FACE_DETECT_ARRAY_2)
+ && d.getString(CanonMakernoteDirectory.TAG_FACE_DETECT_ARRAY_2) != null) {
+ return true;
+ }
+ }
+
+ d = metadata.getFirstDirectoryOfType(CasioType1MakernoteDirectory.class);
+ if (d != null) {
+ try {
+ if (d.containsTag(CasioType1MakernoteDirectory.TAG_FLASH_MODE)
+ && d.getInt(CasioType1MakernoteDirectory.TAG_FLASH_MODE) == 0x04) { //0x04 = "Red eye reduction"
+ return true;
+ }
+ } catch (MetadataException ex) {
+ // move on and check next directory
+ }
+ }
+
+ d = metadata.getFirstDirectoryOfType(FujifilmMakernoteDirectory.class);
+ if (d != null) {
+ if (d.containsTag(FujifilmMakernoteDirectory.TAG_FACES_DETECTED)
+ && d.getString(FujifilmMakernoteDirectory.TAG_FACES_DETECTED) != null) {
+ return true;
+ }
+ }
+
+ d = metadata.getFirstDirectoryOfType(KodakMakernoteDirectory.class);
+ if (d != null) {
+ try {
+ if (d.containsTag(KodakMakernoteDirectory.TAG_FLASH_MODE)
+ && d.getInt(KodakMakernoteDirectory.TAG_FLASH_MODE) == 0x03) { //0x03 = "Red Eye"
+ return true;
+ }
+ } catch (MetadataException ex) {
+ /// move on and check next directory
+ }
+ }
+
+ d = metadata.getFirstDirectoryOfType(NikonType2MakernoteDirectory.class);
+ if (d != null) {
+ if (d.containsTag(NikonType2MakernoteDirectory.TAG_SCENE_MODE)
+ && d.getString(NikonType2MakernoteDirectory.TAG_SCENE_MODE) != null
+ && (d.getString(NikonType2MakernoteDirectory.TAG_SCENE_MODE).equals("BEST FACE") // NON-NLS
+ || (d.getString(NikonType2MakernoteDirectory.TAG_SCENE_MODE).equals("SMILE")))) { // NON-NLS
+ return true;
+ }
+ }
+
+ d = metadata.getFirstDirectoryOfType(PanasonicMakernoteDirectory.class);
+ if (d != null) {
+ if (d.containsTag(PanasonicMakernoteDirectory.TAG_FACES_DETECTED)
+ && d.getString(PanasonicMakernoteDirectory.TAG_FACES_DETECTED) != null) {
+ return true;
+ }
+ }
+
+ d = metadata.getFirstDirectoryOfType(PentaxMakernoteDirectory.class);
+ if (d != null) {
+ try {
+ if (d.containsTag(PentaxMakernoteDirectory.TAG_FLASH_MODE)
+ && d.getInt(PentaxMakernoteDirectory.TAG_FLASH_MODE) == 6) { // 6 = Red-eye Reduction
+ return true;
+ }
+ } catch (MetadataException ex) {
+ // move on and check next directory
+ }
+ }
+
+ d = metadata.getFirstDirectoryOfType(SanyoMakernoteDirectory.class);
+ if (d != null) {
+ if (d.containsTag(SanyoMakernoteDirectory.TAG_MANUAL_FOCUS_DISTANCE_OR_FACE_INFO)
+ && d.getString(SanyoMakernoteDirectory.TAG_MANUAL_FOCUS_DISTANCE_OR_FACE_INFO) != null) {
+ return true;
+ }
+ }
+
+ d = metadata.getFirstDirectoryOfType(SonyType1MakernoteDirectory.class);
+ if (d != null) {
+ try {
+ if (d.containsTag(SonyType1MakernoteDirectory.TAG_AF_MODE)
+ && d.getInt(SonyType1MakernoteDirectory.TAG_AF_MODE) == 15) { //15 = "Face Detected"
+ return true;
+ }
+ if (d.containsTag(SonyType1MakernoteDirectory.TAG_EXPOSURE_MODE)
+ && d.getInt(SonyType1MakernoteDirectory.TAG_EXPOSURE_MODE) == 14) { //14 = "Smile shutter"
+ return true;
+ }
+ } catch (MetadataException ex) {
+ // move on and check next directory
+ }
+ }
+
+ return false;
+ }
+
/**
* Checks if should try to attempt to extract exif. Currently checks if JPEG
* image (by signature)
@@ -225,10 +383,15 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
public void shutDown() {
// We only need to check for this final event on the last module per job
if (refCounter.decrementAndGet(jobId) == 0) {
+ timeZone = null;
if (filesToFire) {
//send the final new data event
services.fireModuleDataEvent(new ModuleDataEvent(ExifParserModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF));
}
+ if (facesDetected) {
+ //send the final new data event
+ services.fireModuleDataEvent(new ModuleDataEvent(ExifParserModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_FACE_DETECTED, listOfFacesDetectedArtifacts));
+ }
}
}
}
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/Bundle.properties b/Core/src/org/sleuthkit/autopsy/timeline/Bundle.properties
index 7e3fb1cb56..bf26cc976e 100644
--- a/Core/src/org/sleuthkit/autopsy/timeline/Bundle.properties
+++ b/Core/src/org/sleuthkit/autopsy/timeline/Bundle.properties
@@ -1,4 +1,3 @@
-OpenIDE-Module-Name=Timeline
CTL_MakeTimeline="Timeline"
CTL_TimeLineTopComponentAction=TimeLineTopComponent
CTL_TimeLineTopComponent=Timeline Window
@@ -11,10 +10,6 @@ Timeline.goToButton.text=Go To\:
Timeline.yearBarChart.x.years=Years
Timeline.resultPanel.loading=Loading...
Timeline.node.root=Root
-Timeline.propChg.confDlg.timelineOOD.msg=The event data is out of date. Would you like to regenerate it?
-Timeline.propChg.confDlg.timelineOOD.details=Timeline
-Timeline.initTimeline.confDlg.genBeforeIngest.msg=You are trying to generate a timeline before ingest has been completed. The timeline may be incomplete. Do you want to continue?
-Timeline.initTimeline.confDlg.genBeforeIngest.details=Timeline
TimelineFrame.title=Timeline
TimelinePanel.jButton1.text=6m
TimelinePanel.jButton13.text=all
@@ -29,18 +24,10 @@ TimelinePanel.jButton7.text=3d
TimelinePanel.jButton2.text=1m
TimelinePanel.jButton3.text=3m
TimelinePanel.jButton4.text=2w
-ProgressWindow.progressHeader.text=\
TimeLineTopComponent.eventsTab.name=Events
TimeLineTopComponent.filterTab.name=Filters
-Timeline.showLastPopulatedWhileIngestingConf.confDlg.details=Timeline
-Timeline.do_repopulate.msg=The Timeline events database was previously populated while ingest was running.\nSome events may not have been populated or may have been populated inaccurately.\nDo you want to repopulate the events database now?
-Timeline.pushDescrLOD.confdlg.msg=You are about to show details for {0} events. This might be very slow or even crash Autopsy.\n\nDo you want to continue?
-Timeline.pushDescrLOD.confdlg.details=
OpenTimelineAction.title=Timeline
-Timeline.ProgressWindow.cancel.confdlg.msg=Do you want to cancel timeline creation?
-Timeline.ProgressWindow.cancel.confdlg.detail=Cancel timeline creation?
-Timeline.progressWindow.name=Timeline
-Timeline.progressWindow.title=Generating Timeline data
OpenTimeLineAction.msgdlg.text=Could not create timeline, there are no data sources.
TimeLineTopComponent.timeZonePanel.text=Display Times In\:
-datasource.missing.confirmation=The Timeline events database was previously populated with an old version of Autopsy.\nThe data source filter will be unavailable unless you update the events database.\nDo you want to update the events database now?
+ProgressWindow.progressHeader.text=\
+
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ProgressWindow.form b/Core/src/org/sleuthkit/autopsy/timeline/ProgressWindow.form
index 6c99e006c8..397abea1b4 100644
--- a/Core/src/org/sleuthkit/autopsy/timeline/ProgressWindow.form
+++ b/Core/src/org/sleuthkit/autopsy/timeline/ProgressWindow.form
@@ -54,7 +54,10 @@
-
+
+
+
+
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ProgressWindow.java b/Core/src/org/sleuthkit/autopsy/timeline/ProgressWindow.java
index 772d1503a2..8f784263be 100644
--- a/Core/src/org/sleuthkit/autopsy/timeline/ProgressWindow.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/ProgressWindow.java
@@ -21,25 +21,19 @@ package org.sleuthkit.autopsy.timeline;
import java.awt.Component;
import java.awt.event.ActionEvent;
import java.awt.event.KeyEvent;
-import java.awt.event.WindowAdapter;
-import java.awt.event.WindowEvent;
import javax.annotation.concurrent.Immutable;
import javax.swing.AbstractAction;
import javax.swing.ActionMap;
-import javax.swing.GroupLayout;
import javax.swing.InputMap;
import javax.swing.JComponent;
import javax.swing.JFrame;
-import javax.swing.JLabel;
import javax.swing.JOptionPane;
-import javax.swing.JProgressBar;
import javax.swing.KeyStroke;
-import javax.swing.LayoutStyle;
import javax.swing.SwingUtilities;
import javax.swing.SwingWorker;
-import org.openide.awt.Mnemonics;
import org.openide.util.NbBundle;
import org.openide.windows.WindowManager;
+import org.sleuthkit.autopsy.coreutils.ThreadConfined;
/**
* Dialog with progress bar that pops up when timeline is being generated
@@ -51,6 +45,8 @@ public class ProgressWindow extends JFrame {
/**
* Creates new form TimelineProgressDialog
*/
+ @NbBundle.Messages({"Timeline.progressWindow.name=Timeline",
+ "Timeline.progressWindow.title=Generating Timeline data"})
public ProgressWindow(Component parent, boolean modal, SwingWorker, ?> worker) {
super();
initComponents();
@@ -64,9 +60,8 @@ public class ProgressWindow extends JFrame {
setIconImage(WindowManager.getDefault().getMainWindow().getIconImage());
});
- //progressBar.setIndeterminate(true);
- setName(NbBundle.getMessage(TimeLineTopComponent.class, "Timeline.progressWindow.name"));
- setTitle(NbBundle.getMessage(TimeLineTopComponent.class, "Timeline.progressWindow.title"));
+ setName(Bundle.Timeline_progressWindow_name());
+ setTitle(Bundle.Timeline_progressWindow_title());
// Close the dialog when Esc is pressed
String cancelName = "cancel"; // NON-NLS
InputMap inputMap = getRootPane().getInputMap(JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT);
@@ -83,46 +78,6 @@ public class ProgressWindow extends JFrame {
this.worker = worker;
}
- public void updateProgress(final int progress) {
- SwingUtilities.invokeLater(() -> {
- progressBar.setValue(progress);
- });
- }
-
- public void updateProgress(final int progress, final String message) {
- SwingUtilities.invokeLater(() -> {
- progressBar.setValue(progress);
- progressBar.setString(message);
- });
- }
-
- public void updateProgress(final String message) {
- SwingUtilities.invokeLater(() -> {
- progressBar.setString(message);
- });
- }
-
- public void setProgressTotal(final int total) {
- SwingUtilities.invokeLater(() -> {
- progressBar.setIndeterminate(false);
- progressBar.setMaximum(total);
- progressBar.setStringPainted(true);
- });
- }
-
- public void updateHeaderMessage(final String headerMessage) {
- SwingUtilities.invokeLater(() -> {
- progressHeader.setText(headerMessage);
- });
- }
-
- public void setIndeterminate() {
- SwingUtilities.invokeLater(() -> {
- progressBar.setIndeterminate(true);
- progressBar.setStringPainted(true);
- });
- }
-
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
@@ -132,38 +87,39 @@ public class ProgressWindow extends JFrame {
// //GEN-BEGIN:initComponents
private void initComponents() {
- progressBar = new JProgressBar();
- progressHeader = new JLabel();
+ progressBar = new javax.swing.JProgressBar();
+ progressHeader = new javax.swing.JLabel();
- addWindowListener(new WindowAdapter() {
- public void windowClosing(WindowEvent evt) {
+ addWindowListener(new java.awt.event.WindowAdapter() {
+ public void windowClosing(java.awt.event.WindowEvent evt) {
closeDialog(evt);
}
});
- Mnemonics.setLocalizedText(progressHeader, NbBundle.getMessage(ProgressWindow.class, "ProgressWindow.progressHeader.text")); // NOI18N
+ org.openide.awt.Mnemonics.setLocalizedText(progressHeader, NbBundle.getMessage(ProgressWindow.class, "ProgressWindow.progressHeader.text")); // NOI18N
+ progressHeader.setMinimumSize(new java.awt.Dimension(10, 14));
- GroupLayout layout = new GroupLayout(getContentPane());
+ javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
- layout.createParallelGroup(GroupLayout.Alignment.LEADING)
+ layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
- .addGroup(layout.createParallelGroup(GroupLayout.Alignment.LEADING)
- .addComponent(progressBar, GroupLayout.DEFAULT_SIZE, 504, Short.MAX_VALUE)
+ .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
+ .addComponent(progressBar, javax.swing.GroupLayout.DEFAULT_SIZE, 504, Short.MAX_VALUE)
.addGroup(layout.createSequentialGroup()
- .addComponent(progressHeader)
+ .addComponent(progressHeader, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(0, 0, Short.MAX_VALUE)))
.addContainerGap())
);
layout.setVerticalGroup(
- layout.createParallelGroup(GroupLayout.Alignment.LEADING)
+ layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
- .addComponent(progressHeader)
- .addPreferredGap(LayoutStyle.ComponentPlacement.RELATED)
- .addComponent(progressBar, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)
- .addContainerGap(GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
+ .addComponent(progressHeader, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
+ .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
+ .addComponent(progressBar, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
+ .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
pack();
@@ -176,14 +132,14 @@ public class ProgressWindow extends JFrame {
cancel();
}//GEN-LAST:event_closeDialog
+ @NbBundle.Messages({"Timeline.ProgressWindow.cancel.confdlg.msg=Do you want to cancel timeline creation?",
+ "Timeline.ProgressWindow.cancel.confdlg.detail=Cancel timeline creation?"})
public void cancel() {
SwingUtilities.invokeLater(() -> {
if (isVisible()) {
int showConfirmDialog = JOptionPane.showConfirmDialog(ProgressWindow.this,
- NbBundle.getMessage(TimeLineTopComponent.class,
- "Timeline.ProgressWindow.cancel.confdlg.msg"),
- NbBundle.getMessage(TimeLineTopComponent.class,
- "Timeline.ProgressWindow.cancel.confdlg.detail"),
+ Bundle.Timeline_ProgressWindow_cancel_confdlg_msg(),
+ Bundle.Timeline_ProgressWindow_cancel_confdlg_detail(),
JOptionPane.YES_NO_OPTION, JOptionPane.WARNING_MESSAGE);
if (showConfirmDialog == JOptionPane.YES_OPTION) {
close();
@@ -200,18 +156,23 @@ public class ProgressWindow extends JFrame {
dispose();
}
// Variables declaration - do not modify//GEN-BEGIN:variables
- private JProgressBar progressBar;
- private JLabel progressHeader;
+ private javax.swing.JProgressBar progressBar;
+ private javax.swing.JLabel progressHeader;
// End of variables declaration//GEN-END:variables
+ @ThreadConfined(type = ThreadConfined.ThreadType.AWT)
public void update(ProgressUpdate chunk) {
- updateHeaderMessage(chunk.getHeaderMessage());
+ progressHeader.setText(chunk.getHeaderMessage());
if (chunk.getTotal() >= 0) {
- setProgressTotal(chunk.getTotal());
- updateProgress(chunk.getProgress(), chunk.getDetailMessage());
+ progressBar.setIndeterminate(false);
+ progressBar.setMaximum(chunk.getTotal());
+ progressBar.setStringPainted(true);
+ progressBar.setValue(chunk.getProgress());
+ progressBar.setString(chunk.getDetailMessage());
} else {
- setIndeterminate();
- updateProgress(chunk.getDetailMessage());
+ progressBar.setIndeterminate(true);
+ progressBar.setStringPainted(true);
+ progressBar.setString(chunk.getDetailMessage());
}
}
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java b/Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java
index ecf44a0268..eebf0475bb 100644
--- a/Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java
@@ -34,6 +34,7 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.logging.Level;
import javafx.application.Platform;
+import javafx.beans.InvalidationListener;
import javafx.beans.Observable;
import javafx.beans.property.ReadOnlyBooleanProperty;
import javafx.beans.property.ReadOnlyBooleanWrapper;
@@ -66,14 +67,15 @@ import static org.sleuthkit.autopsy.casemodule.Case.Events.DATA_SOURCE_ADDED;
import org.sleuthkit.autopsy.coreutils.History;
import org.sleuthkit.autopsy.coreutils.LoggedTask;
import org.sleuthkit.autopsy.coreutils.Logger;
+import org.sleuthkit.autopsy.coreutils.ThreadConfined;
import org.sleuthkit.autopsy.events.BlackBoardArtifactTagAddedEvent;
import org.sleuthkit.autopsy.events.BlackBoardArtifactTagDeletedEvent;
import org.sleuthkit.autopsy.events.ContentTagAddedEvent;
import org.sleuthkit.autopsy.events.ContentTagDeletedEvent;
import org.sleuthkit.autopsy.ingest.IngestManager;
-import org.sleuthkit.autopsy.timeline.events.FilteredEventsModel;
-import org.sleuthkit.autopsy.timeline.events.db.EventsRepository;
-import org.sleuthkit.autopsy.timeline.events.type.EventType;
+import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel;
+import org.sleuthkit.autopsy.timeline.datamodel.eventtype.EventType;
+import org.sleuthkit.autopsy.timeline.db.EventsRepository;
import org.sleuthkit.autopsy.timeline.filters.RootFilter;
import org.sleuthkit.autopsy.timeline.filters.TypeFilter;
import org.sleuthkit.autopsy.timeline.utils.IntervalUtils;
@@ -99,13 +101,11 @@ import org.sleuthkit.datamodel.TskCoreException;
*
*
*/
+@NbBundle.Messages({"Timeline.confirmation.dialogs.title=Update Timeline database?"})
public class TimeLineController {
private static final Logger LOGGER = Logger.getLogger(TimeLineController.class.getName());
- private static final String DO_REPOPULATE_MESSAGE = NbBundle.getMessage(TimeLineController.class,
- "Timeline.do_repopulate.msg");
-
private static final ReadOnlyObjectWrapper timeZone = new ReadOnlyObjectWrapper<>(TimeZone.getDefault());
public static ZoneId getTimeZoneID() {
@@ -189,6 +189,9 @@ public class TimeLineController {
@GuardedBy("this")
private final History historyManager = new History<>();
+ @GuardedBy("this")
+ private final ReadOnlyObjectWrapper currentParams = new ReadOnlyObjectWrapper<>();
+
//all members should be access with the intrinsict lock of this object held
//selected events (ie shown in the result viewer)
@GuardedBy("this")
@@ -231,16 +234,30 @@ public class TimeLineController {
private final ReadOnlyBooleanWrapper newEventsFlag = new ReadOnlyBooleanWrapper(false);
public TimeLineController(Case autoCase) {
- this.autoCase = autoCase; //initalize repository and filteredEvents on creation
- eventsRepository = new EventsRepository(autoCase, historyManager.currentState());
+ this.autoCase = autoCase;
+ /*
+ * as the history manager's current state changes, modify the tags
+ * filter to be in sync, and expose that as propery from
+ * TimeLineController. Do we need to do this with datasource or hash hit
+ * filters?
+ */
+ historyManager.currentState().addListener(new InvalidationListener() {
+ public void invalidated(Observable observable) {
+ ZoomParams historyManagerParams = historyManager.getCurrentState();
+ eventsRepository.syncTagsFilter(historyManagerParams.getFilter().getTagsFilter());
+ currentParams.set(historyManagerParams);
+ }
+ });
+
+ eventsRepository = new EventsRepository(autoCase, currentParams.getReadOnlyProperty());
filteredEvents = eventsRepository.getEventsModel();
+
InitialZoomState = new ZoomParams(filteredEvents.getSpanningInterval(),
EventTypeZoomLevel.BASE_TYPE,
- filteredEvents.filter().get(),
+ filteredEvents.filterProperty().get(),
DescriptionLOD.SHORT);
historyManager.advance(InitialZoomState);
-
}
/**
@@ -256,7 +273,7 @@ public class TimeLineController {
public void zoomOutToActivity() {
Interval boundingEventsInterval = filteredEvents.getBoundingEventsInterval();
- advance(filteredEvents.getRequestedZoomParamters().get().withTimeRange(boundingEventsInterval));
+ advance(filteredEvents.zoomParametersProperty().get().withTimeRange(boundingEventsInterval));
}
/**
@@ -287,7 +304,6 @@ public class TimeLineController {
//TODO: verify this locking is correct? -jm
synchronized (eventsRepository) {
eventsRepository.rebuildRepository(() -> {
-
synchronized (eventsRepository) {
eventsRepository.recordLastObjID(lastObjId);
eventsRepository.recordLastArtifactID(lastArtfID);
@@ -303,7 +319,7 @@ public class TimeLineController {
Platform.runLater(() -> {
//TODO: should this be an event?
newEventsFlag.set(false);
- historyManager.reset(filteredEvents.getRequestedZoomParamters().get());
+ historyManager.reset(filteredEvents.zoomParametersProperty().get());
TimeLineController.this.showFullRange();
});
});
@@ -315,6 +331,28 @@ public class TimeLineController {
return true;
}
+ /**
+ * Since tags might have changed while TimeLine wasn't listening, drop the
+ * tags table and rebuild it by querying for all the tags and inserting them
+ * in to the TimeLine DB.
+ */
+ void rebuildTagsTable() {
+ LOGGER.log(Level.INFO, "starting to rebuild tags table"); // NON-NLS
+ SwingUtilities.invokeLater(() -> {
+ if (isWindowOpen()) {
+ mainFrame.close();
+ }
+ });
+ synchronized (eventsRepository) {
+ eventsRepository.rebuildTags(() -> {
+ showWindow();
+ Platform.runLater(() -> {
+ showFullRange();
+ });
+ });
+ }
+ }
+
public void showFullRange() {
synchronized (filteredEvents) {
pushTimeRange(filteredEvents.getSpanningInterval());
@@ -337,7 +375,6 @@ public class TimeLineController {
* show the timeline window and prompt for rebuilding database if necessary.
*/
synchronized void openTimeLine() {
-
// listen for case changes (specifically images being added, and case changes).
if (Case.isCaseOpen() && !listeningToAutopsy) {
IngestManager.getInstance().addIngestModuleEventListener(ingestModuleListener);
@@ -347,13 +384,16 @@ public class TimeLineController {
}
try {
+ boolean repoRebuilt = false; //has the repo been rebuilt
long timeLineLastObjectId = eventsRepository.getLastObjID();
- boolean repoRebuilt = false;
+ //if the repo is empty rebuild it
if (timeLineLastObjectId == -1) {
repoRebuilt = rebuildRepo();
}
+
if (repoRebuilt == false) {
+ //if ingest was running uring last rebuild, prompt to rebuild
if (eventsRepository.getWasIngestRunning()) {
if (confirmLastBuiltDuringIngestRebuild()) {
repoRebuilt = rebuildRepo();
@@ -363,6 +403,7 @@ public class TimeLineController {
if (repoRebuilt == false) {
final SleuthkitCase sleuthkitCase = autoCase.getSleuthkitCase();
+ //if the last artifact and object ids don't match between skc and tldb, prompt to rebuild
if (sleuthkitCase.getLastObjectId() != timeLineLastObjectId
|| getCaseLastArtifactID(sleuthkitCase) != eventsRepository.getLastArtfactID()) {
if (confirmOutOfDateRebuild()) {
@@ -372,8 +413,8 @@ public class TimeLineController {
}
if (repoRebuilt == false) {
- boolean hasDSInfo = eventsRepository.hasDataSourceInfo();
- if (hasDSInfo == false) {
+ // if the TLDB schema has been upgraded since last time TL ran, prompt for rebuild
+ if (eventsRepository.hasNewColumns() == false) {
if (confirmDataSourceIDsMissingRebuild()) {
repoRebuilt = rebuildRepo();
}
@@ -381,12 +422,11 @@ public class TimeLineController {
}
/*
- * if the repo was not rebuilt show the UI. If the repo was rebuild
- * it will be displayed as part of that process
+ * if the repo was not rebuilt at minimum rebuild the tags which may
+ * have been updated without our knowing it.
*/
if (repoRebuilt == false) {
- showWindow();
- showFullRange();
+ rebuildTagsTable();
}
} catch (TskCoreException ex) {
@@ -419,13 +459,13 @@ public class TimeLineController {
*/
synchronized public void pushPeriod(ReadablePeriod period) {
synchronized (filteredEvents) {
- final DateTime middleOf = IntervalUtils.middleOf(filteredEvents.timeRange().get());
+ final DateTime middleOf = IntervalUtils.middleOf(filteredEvents.timeRangeProperty().get());
pushTimeRange(IntervalUtils.getIntervalAround(middleOf, period));
}
}
synchronized public void pushZoomOutTime() {
- final Interval timeRange = filteredEvents.timeRange().get();
+ final Interval timeRange = filteredEvents.timeRangeProperty().get();
long toDurationMillis = timeRange.toDurationMillis() / 4;
DateTime start = timeRange.getStart().minus(toDurationMillis);
DateTime end = timeRange.getEnd().plus(toDurationMillis);
@@ -433,7 +473,7 @@ public class TimeLineController {
}
synchronized public void pushZoomInTime() {
- final Interval timeRange = filteredEvents.timeRange().get();
+ final Interval timeRange = filteredEvents.timeRangeProperty().get();
long toDurationMillis = timeRange.toDurationMillis() / 4;
DateTime start = timeRange.getStart().plus(toDurationMillis);
DateTime end = timeRange.getEnd().minus(toDurationMillis);
@@ -481,7 +521,7 @@ public class TimeLineController {
if (mainFrame == null) {
LOGGER.log(Level.WARNING, "Tried to show timeline with invalid window. Rebuilding GUI."); // NON-NLS
mainFrame = (TimeLineTopComponent) WindowManager.getDefault().findTopComponent(
- NbBundle.getMessage(TimeLineTopComponent.class, "CTL_TimeLineTopComponentAction"));
+ NbBundle.getMessage(TimeLineController.class, "CTL_TimeLineTopComponentAction"));
if (mainFrame == null) {
mainFrame = new TimeLineTopComponent();
}
@@ -495,7 +535,7 @@ public class TimeLineController {
}
synchronized public void pushEventTypeZoom(EventTypeZoomLevel typeZoomeLevel) {
- ZoomParams currentZoom = filteredEvents.getRequestedZoomParamters().get();
+ ZoomParams currentZoom = filteredEvents.zoomParametersProperty().get();
if (currentZoom == null) {
advance(InitialZoomState.withTypeZoomLevel(typeZoomeLevel));
} else if (currentZoom.hasTypeZoomLevel(typeZoomeLevel) == false) {
@@ -505,7 +545,7 @@ public class TimeLineController {
synchronized public void pushTimeRange(Interval timeRange) {
// timeRange = this.filteredEvents.getSpanningInterval().overlap(timeRange);
- ZoomParams currentZoom = filteredEvents.getRequestedZoomParamters().get();
+ ZoomParams currentZoom = filteredEvents.zoomParametersProperty().get();
if (currentZoom == null) {
advance(InitialZoomState.withTimeRange(timeRange));
} else if (currentZoom.hasTimeRange(timeRange) == false) {
@@ -513,26 +553,27 @@ public class TimeLineController {
}
}
+ @NbBundle.Messages({"# {0} - the number of events",
+ "Timeline.pushDescrLOD.confdlg.msg=You are about to show details for {0} events."
+ + " This might be very slow or even crash Autopsy.\n\nDo you want to continue?"})
synchronized public boolean pushDescrLOD(DescriptionLOD newLOD) {
- Map eventCounts = filteredEvents.getEventCounts(filteredEvents.getRequestedZoomParamters().get().getTimeRange());
+ Map eventCounts = filteredEvents.getEventCounts(filteredEvents.zoomParametersProperty().get().getTimeRange());
final Long count = eventCounts.values().stream().reduce(0l, Long::sum);
boolean shouldContinue = true;
if (newLOD == DescriptionLOD.FULL && count > 10_000) {
+ String format = NumberFormat.getInstance().format(count);
int showConfirmDialog = JOptionPane.showConfirmDialog(mainFrame,
- NbBundle.getMessage(this.getClass(),
- "Timeline.pushDescrLOD.confdlg.msg",
- NumberFormat.getInstance().format(count)),
- NbBundle.getMessage(TimeLineTopComponent.class,
- "Timeline.pushDescrLOD.confdlg.details"),
+ Bundle.Timeline_pushDescrLOD_confdlg_msg(format),
+ Bundle.Timeline_confirmation_dialogs_title(),
JOptionPane.YES_NO_OPTION);
shouldContinue = (showConfirmDialog == JOptionPane.YES_OPTION);
}
if (shouldContinue) {
- ZoomParams currentZoom = filteredEvents.getRequestedZoomParamters().get();
+ ZoomParams currentZoom = filteredEvents.zoomParametersProperty().get();
if (currentZoom == null) {
advance(InitialZoomState.withDescrLOD(newLOD));
} else if (currentZoom.hasDescrLOD(newLOD) == false) {
@@ -544,7 +585,7 @@ public class TimeLineController {
synchronized public void pushTimeAndType(Interval timeRange, EventTypeZoomLevel typeZoom) {
// timeRange = this.filteredEvents.getSpanningInterval().overlap(timeRange);
- ZoomParams currentZoom = filteredEvents.getRequestedZoomParamters().get();
+ ZoomParams currentZoom = filteredEvents.zoomParametersProperty().get();
if (currentZoom == null) {
advance(InitialZoomState.withTimeAndType(timeRange, typeZoom));
} else if (currentZoom.hasTimeRange(timeRange) == false && currentZoom.hasTypeZoomLevel(typeZoom) == false) {
@@ -557,7 +598,7 @@ public class TimeLineController {
}
synchronized public void pushFilters(RootFilter filter) {
- ZoomParams currentZoom = filteredEvents.getRequestedZoomParamters().get();
+ ZoomParams currentZoom = filteredEvents.zoomParametersProperty().get();
if (currentZoom == null) {
advance(InitialZoomState.withFilter(filter.copyOf()));
} else if (currentZoom.hasFilter(filter) == false) {
@@ -565,17 +606,17 @@ public class TimeLineController {
}
}
- synchronized public ZoomParams advance() {
- return historyManager.advance();
-
+ synchronized public void advance() {
+ historyManager.advance();
}
- synchronized public ZoomParams retreat() {
- return historyManager.retreat();
+ synchronized public void retreat() {
+ historyManager.retreat();
}
synchronized private void advance(ZoomParams newState) {
historyManager.advance(newState);
+
}
public void selectTimeAndType(Interval interval, EventType type) {
@@ -683,16 +724,36 @@ public class TimeLineController {
return mainFrame != null && mainFrame.isOpened() && mainFrame.isVisible();
}
+ /**
+ * prompt the user to rebuild the db because the db is out of date and
+ * doesn't include things from subsequent ingests ONLY IF THE TIMELINE
+ * WINDOW IS OPEN
+ *
+ * @return true if they agree to rebuild
+ */
+ @ThreadConfined(type = ThreadConfined.ThreadType.AWT)
+ private void confirmOutOfDateRebuildIfWindowOpen() throws MissingResourceException, HeadlessException {
+ if (isWindowOpen()) {
+ if (confirmOutOfDateRebuild()) {
+ rebuildRepo();
+ }
+ }
+ }
+
/**
* prompt the user to rebuild the db because that datasource_ids are missing
* from the database and that the datasource filter will not work
*
* @return true if they agree to rebuild
*/
+ @ThreadConfined(type = ThreadConfined.ThreadType.AWT)
+ @NbBundle.Messages({"datasource.missing.confirmation=The Timeline events database was previously populated with an old version of Autopsy."
+ + "\nThe data source filter will be unavailable unless you update the events database."
+ + "\nDo you want to update the events database now?"})
synchronized boolean confirmDataSourceIDsMissingRebuild() {
return JOptionPane.showConfirmDialog(mainFrame,
- NbBundle.getMessage(TimeLineController.class, "datasource.missing.confirmation"),
- "Update Timeline database?",
+ Bundle.datasource_missing_confirmation(),
+ Bundle.Timeline_confirmation_dialogs_title(),
JOptionPane.YES_NO_OPTION,
JOptionPane.QUESTION_MESSAGE) == JOptionPane.YES_OPTION;
}
@@ -703,11 +764,14 @@ public class TimeLineController {
*
* @return true if they agree to rebuild
*/
+ @ThreadConfined(type = ThreadConfined.ThreadType.AWT)
+ @NbBundle.Messages({"Timeline.do_repopulate.msg=The Timeline events database was previously populated while ingest was running."
+ + "\nSome events may not have been populated or may have been populated inaccurately."
+ + "\nDo you want to repopulate the events database now?"})
synchronized boolean confirmLastBuiltDuringIngestRebuild() {
return JOptionPane.showConfirmDialog(mainFrame,
- DO_REPOPULATE_MESSAGE,
- NbBundle.getMessage(TimeLineTopComponent.class,
- "Timeline.showLastPopulatedWhileIngestingConf.confDlg.details"),
+ Bundle.Timeline_do_repopulate_msg(),
+ Bundle.Timeline_confirmation_dialogs_title(),
JOptionPane.YES_NO_OPTION,
JOptionPane.QUESTION_MESSAGE) == JOptionPane.YES_OPTION;
}
@@ -718,12 +782,12 @@ public class TimeLineController {
*
* @return true if they agree to rebuild
*/
+ @ThreadConfined(type = ThreadConfined.ThreadType.AWT)
+ @NbBundle.Messages({"Timeline.propChg.confDlg.timelineOOD.msg=The event data is out of date. Would you like to regenerate it?",})
synchronized boolean confirmOutOfDateRebuild() throws MissingResourceException, HeadlessException {
return JOptionPane.showConfirmDialog(mainFrame,
- NbBundle.getMessage(TimeLineController.class,
- "Timeline.propChg.confDlg.timelineOOD.msg"),
- NbBundle.getMessage(TimeLineController.class,
- "Timeline.propChg.confDlg.timelineOOD.details"),
+ Bundle.Timeline_propChg_confDlg_timelineOOD_msg(),
+ Bundle.Timeline_confirmation_dialogs_title(),
JOptionPane.YES_NO_OPTION) == JOptionPane.YES_OPTION;
}
@@ -733,12 +797,13 @@ public class TimeLineController {
*
* @return true if they want to continue anyways
*/
+ @ThreadConfined(type = ThreadConfined.ThreadType.AWT)
+ @NbBundle.Messages({"Timeline.initTimeline.confDlg.genBeforeIngest.msg=You are trying to generate a timeline before ingest has been completed. "
+ + "The timeline may be incomplete. Do you want to continue?"})
synchronized boolean confirmRebuildDuringIngest() throws MissingResourceException, HeadlessException {
return JOptionPane.showConfirmDialog(mainFrame,
- NbBundle.getMessage(TimeLineController.class,
- "Timeline.initTimeline.confDlg.genBeforeIngest.msg"),
- NbBundle.getMessage(TimeLineController.class,
- "Timeline.initTimeline.confDlg.genBeforeIngest.details"),
+ Bundle.Timeline_initTimeline_confDlg_genBeforeIngest_msg(),
+ Bundle.Timeline_confirmation_dialogs_title(),
JOptionPane.YES_NO_OPTION) == JOptionPane.YES_OPTION;
}
@@ -748,16 +813,9 @@ public class TimeLineController {
public void propertyChange(PropertyChangeEvent evt) {
switch (IngestManager.IngestModuleEvent.valueOf(evt.getPropertyName())) {
case CONTENT_CHANGED:
-// ((ModuleContentEvent)evt.getOldValue())????
- //ModuleContentEvent doesn't seem to provide any usefull information...
- break;
case DATA_ADDED:
-// Collection artifacts = ((ModuleDataEvent) evt.getOldValue()).getArtifacts();
- //new artifacts, insert them into db
break;
case FILE_DONE:
-// Long fileID = (Long) evt.getOldValue();
- //update file (known status) for file with id
Platform.runLater(() -> {
newEventsFlag.set(true);
});
@@ -774,14 +832,7 @@ public class TimeLineController {
switch (IngestManager.IngestJobEvent.valueOf(evt.getPropertyName())) {
case CANCELLED:
case COMPLETED:
- //if we are doing incremental updates, drop this
- SwingUtilities.invokeLater(() -> {
- if (isWindowOpen()) {
- if (confirmOutOfDateRebuild()) {
- rebuildRepo();
- }
- }
- });
+ SwingUtilities.invokeLater(TimeLineController.this::confirmOutOfDateRebuildIfWindowOpen);
}
}
}
@@ -793,30 +844,30 @@ public class TimeLineController {
public void propertyChange(PropertyChangeEvent evt) {
switch (Case.Events.valueOf(evt.getPropertyName())) {
case BLACKBOARD_ARTIFACT_TAG_ADDED:
- filteredEvents.handleTagAdded((BlackBoardArtifactTagAddedEvent) evt);
- break;
- case BLACKBOARD_ARTIFACT_TAG_DELETED:
- filteredEvents.handleTagDeleted((BlackBoardArtifactTagDeletedEvent) evt);
- break;
- case CONTENT_TAG_ADDED:
- filteredEvents.handleTagAdded((ContentTagAddedEvent) evt);
- break;
- case CONTENT_TAG_DELETED:
- filteredEvents.handleTagDeleted((ContentTagDeletedEvent) evt);
- break;
- case DATA_SOURCE_ADDED:
-// Content content = (Content) evt.getNewValue();
- //if we are doing incremental updates, drop this
- SwingUtilities.invokeLater(() -> {
- if (isWindowOpen()) {
- if (confirmOutOfDateRebuild()) {
- rebuildRepo();
- }
- }
+ executor.submit(() -> {
+ filteredEvents.handleArtifactTagAdded((BlackBoardArtifactTagAddedEvent) evt);
});
break;
+ case BLACKBOARD_ARTIFACT_TAG_DELETED:
+ executor.submit(() -> {
+ filteredEvents.handleArtifactTagDeleted((BlackBoardArtifactTagDeletedEvent) evt);
+ });
+ break;
+ case CONTENT_TAG_ADDED:
+ executor.submit(() -> {
+ filteredEvents.handleContentTagAdded((ContentTagAddedEvent) evt);
+ });
+ break;
+ case CONTENT_TAG_DELETED:
+ executor.submit(() -> {
+ filteredEvents.handleContentTagDeleted((ContentTagDeletedEvent) evt);
+ });
+ break;
+ case DATA_SOURCE_ADDED:
+ SwingUtilities.invokeLater(TimeLineController.this::confirmOutOfDateRebuildIfWindowOpen);
+ break;
case CURRENT_CASE:
- OpenTimelineAction.invalidateController();
+ OpenTimelineAction.invalidateController();
SwingUtilities.invokeLater(TimeLineController.this::closeTimeLine);
break;
}
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/TimeLineView.java b/Core/src/org/sleuthkit/autopsy/timeline/TimeLineView.java
index b6ea9b87db..4c880c9cf0 100644
--- a/Core/src/org/sleuthkit/autopsy/timeline/TimeLineView.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/TimeLineView.java
@@ -24,7 +24,7 @@ package org.sleuthkit.autopsy.timeline;
* Most implementations should install the relevant listeners in their
* {@link #setController} and {@link #setModel} methods
*/
-import org.sleuthkit.autopsy.timeline.events.FilteredEventsModel;
+import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel;
public interface TimeLineView extends TimeLineUI {
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/actions/ResetFilters.java b/Core/src/org/sleuthkit/autopsy/timeline/actions/ResetFilters.java
index 23fe416f01..c869c871c8 100644
--- a/Core/src/org/sleuthkit/autopsy/timeline/actions/ResetFilters.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/actions/ResetFilters.java
@@ -23,7 +23,7 @@ import javafx.event.ActionEvent;
import org.controlsfx.control.action.Action;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.timeline.TimeLineController;
-import org.sleuthkit.autopsy.timeline.events.FilteredEventsModel;
+import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel;
/**
* Action that resets the filters to their initial/default state.
@@ -37,12 +37,12 @@ public class ResetFilters extends Action {
eventsModel = controller.getEventsModel();
disabledProperty().bind(new BooleanBinding() {
{
- bind(eventsModel.getRequestedZoomParamters());
+ bind(eventsModel.zoomParametersProperty());
}
@Override
protected boolean computeValue() {
- return eventsModel.getRequestedZoomParamters().getValue().getFilter().equals(eventsModel.getDefaultFilter());
+ return eventsModel.zoomParametersProperty().getValue().getFilter().equals(eventsModel.getDefaultFilter());
}
});
setEventHandler((ActionEvent t) -> {
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/actions/SaveSnapshot.java b/Core/src/org/sleuthkit/autopsy/timeline/actions/SaveSnapshot.java
index 528a44bacf..62305b4c79 100644
--- a/Core/src/org/sleuthkit/autopsy/timeline/actions/SaveSnapshot.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/actions/SaveSnapshot.java
@@ -82,9 +82,9 @@ public class SaveSnapshot extends Action {
reportMetaData.add(new Pair<>("Case", Case.getCurrentCase().getName())); // NON-NLS
- ZoomParams get = controller.getEventsModel().getRequestedZoomParamters().get();
+ ZoomParams get = controller.getEventsModel().zoomParametersProperty().get();
reportMetaData.add(new Pair<>("Time Range", get.getTimeRange().toString())); // NON-NLS
- reportMetaData.add(new Pair<>("Description Level of Detail", get.getDescrLOD().getDisplayName())); // NON-NLS
+ reportMetaData.add(new Pair<>("Description Level of Detail", get.getDescriptionLOD().getDisplayName())); // NON-NLS
reportMetaData.add(new Pair<>("Event Type Zoom Level", get.getTypeZoomLevel().getDisplayName())); // NON-NLS
reportMetaData.add(new Pair<>("Filters", get.getFilter().getHTMLReportString())); // NON-NLS
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/actions/ZoomOut.java b/Core/src/org/sleuthkit/autopsy/timeline/actions/ZoomOut.java
index f5e9bd61cc..ee362dfc40 100644
--- a/Core/src/org/sleuthkit/autopsy/timeline/actions/ZoomOut.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/actions/ZoomOut.java
@@ -23,7 +23,7 @@ import javafx.event.ActionEvent;
import org.controlsfx.control.action.Action;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.timeline.TimeLineController;
-import org.sleuthkit.autopsy.timeline.events.FilteredEventsModel;
+import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel;
/**
*
@@ -40,12 +40,12 @@ public class ZoomOut extends Action {
eventsModel = controller.getEventsModel();
disabledProperty().bind(new BooleanBinding() {
{
- bind(eventsModel.getRequestedZoomParamters());
+ bind(eventsModel.zoomParametersProperty());
}
@Override
protected boolean computeValue() {
- return eventsModel.getRequestedZoomParamters().getValue().getTimeRange().contains(eventsModel.getSpanningInterval());
+ return eventsModel.zoomParametersProperty().getValue().getTimeRange().contains(eventsModel.getSpanningInterval());
}
});
setEventHandler((ActionEvent t) -> {
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/AggregateEvent.java b/Core/src/org/sleuthkit/autopsy/timeline/datamodel/AggregateEvent.java
similarity index 68%
rename from Core/src/org/sleuthkit/autopsy/timeline/events/AggregateEvent.java
rename to Core/src/org/sleuthkit/autopsy/timeline/datamodel/AggregateEvent.java
index 91cfcb5aaa..6ca47c55be 100644
--- a/Core/src/org/sleuthkit/autopsy/timeline/events/AggregateEvent.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/datamodel/AggregateEvent.java
@@ -16,14 +16,14 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.sleuthkit.autopsy.timeline.events;
+package org.sleuthkit.autopsy.timeline.datamodel;
import com.google.common.collect.Sets;
import java.util.Collections;
import java.util.Set;
import javax.annotation.concurrent.Immutable;
import org.joda.time.Interval;
-import org.sleuthkit.autopsy.timeline.events.type.EventType;
+import org.sleuthkit.autopsy.timeline.datamodel.eventtype.EventType;
import org.sleuthkit.autopsy.timeline.utils.IntervalUtils;
import org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD;
@@ -138,44 +138,4 @@ public class AggregateEvent {
return new AggregateEvent(IntervalUtils.span(aggEvent1.span, ag2.span), aggEvent1.getType(), idsUnion, hashHitsUnion, taggedUnion, aggEvent1.getDescription(), aggEvent1.lod);
}
-
- /**
- * get an AggregateEvent the same as this one but with the given eventIDs
- * removed from the list of tagged events
- *
- * @param unTaggedIDs
- *
- * @return a new Aggregate event that is the same as this one but with the
- * given event Ids removed from the list of tagged ids, or, this
- * AggregateEvent if no event ids would be removed
- */
- public AggregateEvent withTagsRemoved(Set unTaggedIDs) {
- Sets.SetView stillTagged = Sets.difference(tagged, unTaggedIDs);
- if (stillTagged.size() < tagged.size()) {
- return new AggregateEvent(span, type, eventIDs, hashHits, stillTagged.immutableCopy(), description, lod);
- }
- return this; //no change
- }
-
- /**
- * get an AggregateEvent the same as this one but with the given eventIDs
- * added to the list of tagged events if there are part of this Aggregate
- *
- * @param taggedIDs
- *
- * @return a new Aggregate event that is the same as this one but with the
- * given event Ids added to the list of tagged ids, or, this
- * AggregateEvent if no event ids would be added
- */
- public AggregateEvent withTagsAdded(Set taggedIDs) {
- Sets.SetView taggedIdsInAgg = Sets.intersection(eventIDs, taggedIDs);//events that are in this aggregate and (newly) marked as tagged
- if (taggedIdsInAgg.size() > 0) {
- Sets.SetView notYetIncludedTagged = Sets.difference(taggedIdsInAgg, tagged); // events that are tagged, but not already marked as tagged in this Agg
- if (notYetIncludedTagged.size() > 0) {
- return new AggregateEvent(span, type, eventIDs, hashHits, Sets.union(tagged, taggedIdsInAgg).immutableCopy(), description, lod);
- }
- }
-
- return this; //no change
- }
}
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/FilteredEventsModel.java b/Core/src/org/sleuthkit/autopsy/timeline/datamodel/FilteredEventsModel.java
similarity index 71%
rename from Core/src/org/sleuthkit/autopsy/timeline/events/FilteredEventsModel.java
rename to Core/src/org/sleuthkit/autopsy/timeline/datamodel/FilteredEventsModel.java
index 07915a15fa..3de6dacc61 100644
--- a/Core/src/org/sleuthkit/autopsy/timeline/events/FilteredEventsModel.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/datamodel/FilteredEventsModel.java
@@ -16,7 +16,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.sleuthkit.autopsy.timeline.events;
+package org.sleuthkit.autopsy.timeline.datamodel;
import com.google.common.eventbus.EventBus;
import java.util.Collection;
@@ -27,6 +27,7 @@ import java.util.logging.Level;
import javafx.beans.Observable;
import javafx.beans.property.ReadOnlyObjectProperty;
import javafx.beans.property.ReadOnlyObjectWrapper;
+import javafx.collections.ListChangeListener;
import javafx.collections.MapChangeListener;
import javax.annotation.concurrent.GuardedBy;
import org.joda.time.DateTimeZone;
@@ -38,9 +39,11 @@ import org.sleuthkit.autopsy.events.BlackBoardArtifactTagDeletedEvent;
import org.sleuthkit.autopsy.events.ContentTagAddedEvent;
import org.sleuthkit.autopsy.events.ContentTagDeletedEvent;
import org.sleuthkit.autopsy.timeline.TimeLineView;
-import org.sleuthkit.autopsy.timeline.events.db.EventsRepository;
-import org.sleuthkit.autopsy.timeline.events.type.EventType;
-import org.sleuthkit.autopsy.timeline.events.type.RootEventType;
+import org.sleuthkit.autopsy.timeline.datamodel.eventtype.EventType;
+import org.sleuthkit.autopsy.timeline.datamodel.eventtype.RootEventType;
+import org.sleuthkit.autopsy.timeline.db.EventsRepository;
+import org.sleuthkit.autopsy.timeline.events.RefreshRequestedEvent;
+import org.sleuthkit.autopsy.timeline.events.TagsUpdatedEvent;
import org.sleuthkit.autopsy.timeline.filters.DataSourceFilter;
import org.sleuthkit.autopsy.timeline.filters.DataSourcesFilter;
import org.sleuthkit.autopsy.timeline.filters.Filter;
@@ -48,13 +51,18 @@ import org.sleuthkit.autopsy.timeline.filters.HashHitsFilter;
import org.sleuthkit.autopsy.timeline.filters.HashSetFilter;
import org.sleuthkit.autopsy.timeline.filters.HideKnownFilter;
import org.sleuthkit.autopsy.timeline.filters.RootFilter;
+import org.sleuthkit.autopsy.timeline.filters.TagNameFilter;
+import org.sleuthkit.autopsy.timeline.filters.TagsFilter;
import org.sleuthkit.autopsy.timeline.filters.TextFilter;
import org.sleuthkit.autopsy.timeline.filters.TypeFilter;
import org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD;
import org.sleuthkit.autopsy.timeline.zooming.EventTypeZoomLevel;
import org.sleuthkit.autopsy.timeline.zooming.ZoomParams;
import org.sleuthkit.datamodel.BlackboardArtifact;
+import org.sleuthkit.datamodel.BlackboardArtifactTag;
import org.sleuthkit.datamodel.Content;
+import org.sleuthkit.datamodel.ContentTag;
+import org.sleuthkit.datamodel.TagName;
import org.sleuthkit.datamodel.TskCoreException;
/**
@@ -83,7 +91,6 @@ public final class FilteredEventsModel {
private static final Logger LOGGER = Logger.getLogger(FilteredEventsModel.class.getName());
-
/**
* time range that spans the filtered events
*/
@@ -113,40 +120,25 @@ public final class FilteredEventsModel {
private final EventsRepository repo;
private final Case autoCase;
- /**
- * @return the default filter used at startup
- */
- public RootFilter getDefaultFilter() {
- DataSourcesFilter dataSourcesFilter = new DataSourcesFilter();
-
- repo.getDatasourcesMap().entrySet().stream().forEach((Map.Entry t) -> {
- DataSourceFilter dataSourceFilter = new DataSourceFilter(t.getValue(), t.getKey());
- dataSourceFilter.setSelected(Boolean.TRUE);
- dataSourcesFilter.addDataSourceFilter(dataSourceFilter);
- });
-
- HashHitsFilter hashHitsFilter = new HashHitsFilter();
- repo.getHashSetMap().entrySet().stream().forEach((Map.Entry t) -> {
- HashSetFilter hashSourceFilter = new HashSetFilter(t.getValue(), t.getKey());
- hashSourceFilter.setSelected(Boolean.TRUE);
- hashHitsFilter.addHashSetFilter(hashSourceFilter);
- });
- return new RootFilter(new HideKnownFilter(), hashHitsFilter, new TextFilter(), new TypeFilter(RootEventType.getInstance()), dataSourcesFilter);
- }
-
public FilteredEventsModel(EventsRepository repo, ReadOnlyObjectProperty currentStateProperty) {
this.repo = repo;
this.autoCase = repo.getAutoCase();
repo.getDatasourcesMap().addListener((MapChangeListener.Change extends Long, ? extends String> change) -> {
DataSourceFilter dataSourceFilter = new DataSourceFilter(change.getValueAdded(), change.getKey());
- RootFilter rootFilter = filter().get();
- rootFilter.getDataSourcesFilter().addDataSourceFilter(dataSourceFilter);
+ RootFilter rootFilter = filterProperty().get();
+ rootFilter.getDataSourcesFilter().addSubFilter(dataSourceFilter);
requestedFilter.set(rootFilter.copyOf());
});
repo.getHashSetMap().addListener((MapChangeListener.Change extends Long, ? extends String> change) -> {
HashSetFilter hashSetFilter = new HashSetFilter(change.getValueAdded(), change.getKey());
- RootFilter rootFilter = filter().get();
- rootFilter.getHashHitsFilter().addHashSetFilter(hashSetFilter);
+ RootFilter rootFilter = filterProperty().get();
+ rootFilter.getHashHitsFilter().addSubFilter(hashSetFilter);
+ requestedFilter.set(rootFilter.copyOf());
+ });
+ repo.getTagNames().addListener((ListChangeListener.Change extends TagName> c) -> {
+ RootFilter rootFilter = filterProperty().get();
+ TagsFilter tagsFilter = rootFilter.getTagsFilter();
+ repo.syncTagsFilter(tagsFilter);
requestedFilter.set(rootFilter.copyOf());
});
requestedFilter.set(getDefaultFilter());
@@ -156,14 +148,14 @@ public final class FilteredEventsModel {
if (zoomParams != null) {
if (zoomParams.getTypeZoomLevel().equals(requestedTypeZoom.get()) == false
- || zoomParams.getDescrLOD().equals(requestedLOD.get()) == false
+ || zoomParams.getDescriptionLOD().equals(requestedLOD.get()) == false
|| zoomParams.getFilter().equals(requestedFilter.get()) == false
|| zoomParams.getTimeRange().equals(requestedTimeRange.get()) == false) {
requestedTypeZoom.set(zoomParams.getTypeZoomLevel());
requestedFilter.set(zoomParams.getFilter().copyOf());
requestedTimeRange.set(zoomParams.getTimeRange());
- requestedLOD.set(zoomParams.getDescrLOD());
+ requestedLOD.set(zoomParams.getDescriptionLOD());
}
}
});
@@ -171,12 +163,75 @@ public final class FilteredEventsModel {
requestedZoomParamters.bind(currentStateProperty);
}
- public Interval getBoundingEventsInterval() {
- return repo.getBoundingEventsInterval(getRequestedZoomParamters().get().getTimeRange(), getRequestedZoomParamters().get().getFilter());
+ synchronized public ReadOnlyObjectProperty zoomParametersProperty() {
+ return requestedZoomParamters.getReadOnlyProperty();
}
- synchronized public ReadOnlyObjectProperty getRequestedZoomParamters() {
- return requestedZoomParamters.getReadOnlyProperty();
+ /**
+ * @return a read only view of the time range requested via
+ * {@link #requestTimeRange(org.joda.time.Interval)}
+ */
+ synchronized public ReadOnlyObjectProperty timeRangeProperty() {
+ if (requestedTimeRange.get() == null) {
+ requestedTimeRange.set(getSpanningInterval());
+ }
+ return requestedTimeRange.getReadOnlyProperty();
+ }
+
+ synchronized public ReadOnlyObjectProperty descriptionLODProperty() {
+ return requestedLOD.getReadOnlyProperty();
+ }
+
+ synchronized public ReadOnlyObjectProperty filterProperty() {
+ return requestedFilter.getReadOnlyProperty();
+ }
+
+ synchronized public ReadOnlyObjectProperty eventTypeZoomProperty() {
+ return requestedTypeZoom.getReadOnlyProperty();
+ }
+
+ synchronized public DescriptionLOD getDescriptionLOD() {
+ return requestedLOD.get();
+ }
+
+ synchronized public RootFilter getFilter() {
+ return requestedFilter.get();
+ }
+
+ synchronized public EventTypeZoomLevel getEventTypeZoom() {
+ return requestedTypeZoom.get();
+ }
+
+ /**
+ * @return the default filter used at startup
+ */
+ public RootFilter getDefaultFilter() {
+ DataSourcesFilter dataSourcesFilter = new DataSourcesFilter();
+
+ repo.getDatasourcesMap().entrySet().stream().forEach((Map.Entry t) -> {
+ DataSourceFilter dataSourceFilter = new DataSourceFilter(t.getValue(), t.getKey());
+ dataSourceFilter.setSelected(Boolean.TRUE);
+ dataSourcesFilter.addSubFilter(dataSourceFilter);
+ });
+
+ HashHitsFilter hashHitsFilter = new HashHitsFilter();
+ repo.getHashSetMap().entrySet().stream().forEach((Map.Entry t) -> {
+ HashSetFilter hashSetFilter = new HashSetFilter(t.getValue(), t.getKey());
+ hashSetFilter.setSelected(Boolean.TRUE);
+ hashHitsFilter.addSubFilter(hashSetFilter);
+ });
+
+ TagsFilter tagsFilter = new TagsFilter();
+ repo.getTagNames().stream().forEach(t -> {
+ TagNameFilter tagNameFilter = new TagNameFilter(t, autoCase);
+ tagNameFilter.setSelected(Boolean.TRUE);
+ tagsFilter.addSubFilter(tagNameFilter);
+ });
+ return new RootFilter(new HideKnownFilter(), tagsFilter, hashHitsFilter, new TextFilter(), new TypeFilter(RootEventType.getInstance()), dataSourcesFilter);
+ }
+
+ public Interval getBoundingEventsInterval() {
+ return repo.getBoundingEventsInterval(zoomParametersProperty().get().getTimeRange(), zoomParametersProperty().get().getFilter());
}
public TimeLineEvent getEventById(Long eventID) {
@@ -187,6 +242,18 @@ public final class FilteredEventsModel {
return repo.getEventsById(eventIDs);
}
+ /**
+ * get a count of tagnames applied to the given event ids as a map from
+ * tagname displayname to count of tag applications
+ *
+ * @param eventIDsWithTags the event ids to get the tag counts map for
+ *
+ * @return a map from tagname displayname to count of applications
+ */
+ public Map getTagCountsByTagName(Set eventIDsWithTags) {
+ return repo.getTagCountsByTagName(eventIDsWithTags);
+ }
+
public Set getEventIDs(Interval timeRange, Filter filter) {
final Interval overlap;
final RootFilter intersect;
@@ -219,25 +286,6 @@ public final class FilteredEventsModel {
return repo.countEvents(new ZoomParams(timeRange, typeZoom, filter, null));
}
- /**
- * @return a read only view of the time range requested via
- * {@link #requestTimeRange(org.joda.time.Interval)}
- */
- synchronized public ReadOnlyObjectProperty timeRange() {
- if (requestedTimeRange.get() == null) {
- requestedTimeRange.set(getSpanningInterval());
- }
- return requestedTimeRange.getReadOnlyProperty();
- }
-
- synchronized public ReadOnlyObjectProperty descriptionLOD() {
- return requestedLOD.getReadOnlyProperty();
- }
-
- synchronized public ReadOnlyObjectProperty filter() {
- return requestedFilter.getReadOnlyProperty();
- }
-
/**
* @return the smallest interval spanning all the events from the
* repository, ignoring any filters or requested ranges
@@ -303,58 +351,52 @@ public final class FilteredEventsModel {
return repo.getAggregatedEvents(params);
}
- synchronized public ReadOnlyObjectProperty eventTypeZoom() {
- return requestedTypeZoom.getReadOnlyProperty();
+ synchronized public boolean handleContentTagAdded(ContentTagAddedEvent evt) {
+ ContentTag contentTag = evt.getTag();
+ Content content = contentTag.getContent();
+ Set updatedEventIDs = repo.addTag(content.getId(), null, contentTag);
+ return postTagsUpdated(updatedEventIDs);
}
- synchronized public EventTypeZoomLevel getEventTypeZoom() {
- return requestedTypeZoom.get();
+ synchronized public boolean handleArtifactTagAdded(BlackBoardArtifactTagAddedEvent evt) {
+ BlackboardArtifactTag artifactTag = evt.getTag();
+ BlackboardArtifact artifact = artifactTag.getArtifact();
+ Set updatedEventIDs = repo.addTag(artifact.getObjectID(), artifact.getArtifactID(), artifactTag);;
+ return postTagsUpdated(updatedEventIDs);
}
- synchronized public DescriptionLOD getDescriptionLOD() {
- return requestedLOD.get();
- }
-
- synchronized public void handleTagAdded(BlackBoardArtifactTagAddedEvent e) {
- BlackboardArtifact artifact = e.getTag().getArtifact();
- Set updatedEventIDs = repo.markEventsTagged(artifact.getObjectID(), artifact.getArtifactID(), true);
- if (!updatedEventIDs.isEmpty()) {
- eventbus.post(new EventsTaggedEvent(updatedEventIDs));
- }
- }
-
- synchronized public void handleTagDeleted(BlackBoardArtifactTagDeletedEvent e) {
- BlackboardArtifact artifact = e.getTag().getArtifact();
- try {
- boolean tagged = autoCase.getServices().getTagsManager().getBlackboardArtifactTagsByArtifact(artifact).isEmpty() == false;
- Set updatedEventIDs = repo.markEventsTagged(artifact.getObjectID(), artifact.getArtifactID(), tagged);
- if (!updatedEventIDs.isEmpty()) {
- eventbus.post(new EventsUnTaggedEvent(updatedEventIDs));
- }
- } catch (TskCoreException ex) {
- LOGGER.log(Level.SEVERE, "unable to determine tagged status of attribute.", ex);
- }
- }
-
- synchronized public void handleTagAdded(ContentTagAddedEvent e) {
- Content content = e.getTag().getContent();
- Set updatedEventIDs = repo.markEventsTagged(content.getId(), null, true);
- if (!updatedEventIDs.isEmpty()) {
- eventbus.post(new EventsTaggedEvent(updatedEventIDs));
- }
- }
-
- synchronized public void handleTagDeleted(ContentTagDeletedEvent e) {
- Content content = e.getTag().getContent();
+ synchronized public boolean handleContentTagDeleted(ContentTagDeletedEvent evt) {
+ ContentTag contentTag = evt.getTag();
+ Content content = contentTag.getContent();
try {
boolean tagged = autoCase.getServices().getTagsManager().getContentTagsByContent(content).isEmpty() == false;
- Set updatedEventIDs = repo.markEventsTagged(content.getId(), null, tagged);
- if (!updatedEventIDs.isEmpty()) {
- eventbus.post(new EventsUnTaggedEvent(updatedEventIDs));
- }
+ Set updatedEventIDs = repo.deleteTag(content.getId(), null, contentTag, tagged);
+ return postTagsUpdated(updatedEventIDs);
} catch (TskCoreException ex) {
LOGGER.log(Level.SEVERE, "unable to determine tagged status of content.", ex);
}
+ return false;
+ }
+
+ synchronized public boolean handleArtifactTagDeleted(BlackBoardArtifactTagDeletedEvent evt) {
+ BlackboardArtifactTag artifactTag = evt.getTag();
+ BlackboardArtifact artifact = artifactTag.getArtifact();
+ try {
+ boolean tagged = autoCase.getServices().getTagsManager().getBlackboardArtifactTagsByArtifact(artifact).isEmpty() == false;
+ Set updatedEventIDs = repo.deleteTag(artifact.getObjectID(), artifact.getArtifactID(), artifactTag, tagged);
+ return postTagsUpdated(updatedEventIDs);
+ } catch (TskCoreException ex) {
+ LOGGER.log(Level.SEVERE, "unable to determine tagged status of artifact.", ex);
+ }
+ return false;
+ }
+
+ private boolean postTagsUpdated(Set updatedEventIDs) {
+ boolean tagsUpdated = !updatedEventIDs.isEmpty();
+ if (tagsUpdated) {
+ eventbus.post(new TagsUpdatedEvent(updatedEventIDs));
+ }
+ return tagsUpdated;
}
synchronized public void registerForEvents(Object o) {
@@ -364,4 +406,9 @@ public final class FilteredEventsModel {
synchronized public void unRegisterForEvents(Object o) {
eventbus.unregister(0);
}
+
+ public void refresh() {
+ eventbus.post(new RefreshRequestedEvent());
+ }
+
}
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/TimeLineEvent.java b/Core/src/org/sleuthkit/autopsy/timeline/datamodel/TimeLineEvent.java
similarity index 54%
rename from Core/src/org/sleuthkit/autopsy/timeline/events/TimeLineEvent.java
rename to Core/src/org/sleuthkit/autopsy/timeline/datamodel/TimeLineEvent.java
index 53ed2904eb..744311c321 100644
--- a/Core/src/org/sleuthkit/autopsy/timeline/events/TimeLineEvent.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/datamodel/TimeLineEvent.java
@@ -16,47 +16,49 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.sleuthkit.autopsy.timeline.events;
+package org.sleuthkit.autopsy.timeline.datamodel;
+import com.google.common.collect.ImmutableMap;
import javax.annotation.Nullable;
-import org.sleuthkit.autopsy.timeline.events.type.EventType;
+import javax.annotation.concurrent.Immutable;
+import org.sleuthkit.autopsy.timeline.datamodel.eventtype.EventType;
+import org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD;
import org.sleuthkit.datamodel.TskData;
/**
- *
+ * A single event.
*/
+@Immutable
public class TimeLineEvent {
- private final Long eventID;
-
- private final Long fileID;
-
- private final Long time;
-
+ private final long eventID;
+ private final long fileID;
private final Long artifactID;
+ private final long dataSourceID;
+ private final long time;
private final EventType subType;
-
- private final String fullDescription, medDescription, shortDescription;
+ private final ImmutableMap descriptions;
private final TskData.FileKnown known;
-
private final boolean hashHit;
private final boolean tagged;
- public TimeLineEvent(Long eventID, Long objID, @Nullable Long artifactID, Long time, EventType type, String fullDescription, String medDescription, String shortDescription, TskData.FileKnown known, boolean hashHit, boolean tagged) {
+ public TimeLineEvent(long eventID, long dataSourceID, long objID, @Nullable Long artifactID, long time, EventType type, String fullDescription, String medDescription, String shortDescription, TskData.FileKnown known, boolean hashHit, boolean tagged) {
this.eventID = eventID;
this.fileID = objID;
- this.artifactID = artifactID;
+ this.artifactID = artifactID == 0 ? null : artifactID;
this.time = time;
this.subType = type;
+ descriptions = ImmutableMap.of(
+ DescriptionLOD.FULL, fullDescription,
+ DescriptionLOD.MEDIUM, medDescription,
+ DescriptionLOD.SHORT, shortDescription);
- this.fullDescription = fullDescription;
- this.medDescription = medDescription;
- this.shortDescription = shortDescription;
this.known = known;
this.hashHit = hashHit;
this.tagged = tagged;
+ this.dataSourceID = dataSourceID;
}
public boolean isTagged() {
@@ -72,18 +74,18 @@ public class TimeLineEvent {
return artifactID;
}
- public Long getEventID() {
+ public long getEventID() {
return eventID;
}
- public Long getFileID() {
+ public long getFileID() {
return fileID;
}
/**
* @return the time in seconds from unix epoch
*/
- public Long getTime() {
+ public long getTime() {
return time;
}
@@ -92,18 +94,26 @@ public class TimeLineEvent {
}
public String getFullDescription() {
- return fullDescription;
+ return getDescription(DescriptionLOD.FULL);
}
public String getMedDescription() {
- return medDescription;
+ return getDescription(DescriptionLOD.MEDIUM);
}
public String getShortDescription() {
- return shortDescription;
+ return getDescription(DescriptionLOD.SHORT);
}
public TskData.FileKnown getKnown() {
return known;
}
+
+ public String getDescription(DescriptionLOD lod) {
+ return descriptions.get(lod);
+ }
+
+ public long getDataSourceID() {
+ return dataSourceID;
+ }
}
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/type/ArtifactEventType.java b/Core/src/org/sleuthkit/autopsy/timeline/datamodel/eventtype/ArtifactEventType.java
similarity index 99%
rename from Core/src/org/sleuthkit/autopsy/timeline/events/type/ArtifactEventType.java
rename to Core/src/org/sleuthkit/autopsy/timeline/datamodel/eventtype/ArtifactEventType.java
index 6ef21f15c9..fa17ab64e6 100644
--- a/Core/src/org/sleuthkit/autopsy/timeline/events/type/ArtifactEventType.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/datamodel/eventtype/ArtifactEventType.java
@@ -16,7 +16,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.sleuthkit.autopsy.timeline.events.type;
+package org.sleuthkit.autopsy.timeline.datamodel.eventtype;
import java.util.HashMap;
import java.util.List;
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/type/BaseTypes.java b/Core/src/org/sleuthkit/autopsy/timeline/datamodel/eventtype/BaseTypes.java
similarity index 96%
rename from Core/src/org/sleuthkit/autopsy/timeline/events/type/BaseTypes.java
rename to Core/src/org/sleuthkit/autopsy/timeline/datamodel/eventtype/BaseTypes.java
index b4b6a8acc6..6ca1109f24 100644
--- a/Core/src/org/sleuthkit/autopsy/timeline/events/type/BaseTypes.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/datamodel/eventtype/BaseTypes.java
@@ -16,7 +16,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.sleuthkit.autopsy.timeline.events.type;
+package org.sleuthkit.autopsy.timeline.datamodel.eventtype;
import java.util.Arrays;
import java.util.List;
@@ -70,8 +70,9 @@ public enum BaseTypes implements EventType {
private final String iconBase;
- private Image image;
+ private final Image image;
+ @Override
public Image getFXImage() {
return image;
}
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/type/Bundle.properties b/Core/src/org/sleuthkit/autopsy/timeline/datamodel/eventtype/Bundle.properties
similarity index 100%
rename from Core/src/org/sleuthkit/autopsy/timeline/events/type/Bundle.properties
rename to Core/src/org/sleuthkit/autopsy/timeline/datamodel/eventtype/Bundle.properties
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/type/Bundle_ja.properties b/Core/src/org/sleuthkit/autopsy/timeline/datamodel/eventtype/Bundle_ja.properties
similarity index 100%
rename from Core/src/org/sleuthkit/autopsy/timeline/events/type/Bundle_ja.properties
rename to Core/src/org/sleuthkit/autopsy/timeline/datamodel/eventtype/Bundle_ja.properties
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/type/EventType.java b/Core/src/org/sleuthkit/autopsy/timeline/datamodel/eventtype/EventType.java
similarity index 98%
rename from Core/src/org/sleuthkit/autopsy/timeline/events/type/EventType.java
rename to Core/src/org/sleuthkit/autopsy/timeline/datamodel/eventtype/EventType.java
index f89143cf91..0758d804ec 100644
--- a/Core/src/org/sleuthkit/autopsy/timeline/events/type/EventType.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/datamodel/eventtype/EventType.java
@@ -16,7 +16,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.sleuthkit.autopsy.timeline.events.type;
+package org.sleuthkit.autopsy.timeline.datamodel.eventtype;
import java.util.ArrayList;
import java.util.Comparator;
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/type/FileSystemTypes.java b/Core/src/org/sleuthkit/autopsy/timeline/datamodel/eventtype/FileSystemTypes.java
similarity index 97%
rename from Core/src/org/sleuthkit/autopsy/timeline/events/type/FileSystemTypes.java
rename to Core/src/org/sleuthkit/autopsy/timeline/datamodel/eventtype/FileSystemTypes.java
index fae716c656..11584008fc 100644
--- a/Core/src/org/sleuthkit/autopsy/timeline/events/type/FileSystemTypes.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/datamodel/eventtype/FileSystemTypes.java
@@ -16,7 +16,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.sleuthkit.autopsy.timeline.events.type;
+package org.sleuthkit.autopsy.timeline.datamodel.eventtype;
import java.util.Collections;
import java.util.List;
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/type/MiscTypes.java b/Core/src/org/sleuthkit/autopsy/timeline/datamodel/eventtype/MiscTypes.java
similarity index 99%
rename from Core/src/org/sleuthkit/autopsy/timeline/events/type/MiscTypes.java
rename to Core/src/org/sleuthkit/autopsy/timeline/datamodel/eventtype/MiscTypes.java
index 5cc019e7be..ef57868a1d 100644
--- a/Core/src/org/sleuthkit/autopsy/timeline/events/type/MiscTypes.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/datamodel/eventtype/MiscTypes.java
@@ -16,7 +16,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.sleuthkit.autopsy.timeline.events.type;
+package org.sleuthkit.autopsy.timeline.datamodel.eventtype;
import java.util.Arrays;
import java.util.Collections;
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/type/RootEventType.java b/Core/src/org/sleuthkit/autopsy/timeline/datamodel/eventtype/RootEventType.java
similarity index 97%
rename from Core/src/org/sleuthkit/autopsy/timeline/events/type/RootEventType.java
rename to Core/src/org/sleuthkit/autopsy/timeline/datamodel/eventtype/RootEventType.java
index 1c797fc64c..1c2d051105 100644
--- a/Core/src/org/sleuthkit/autopsy/timeline/events/type/RootEventType.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/datamodel/eventtype/RootEventType.java
@@ -16,7 +16,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.sleuthkit.autopsy.timeline.events.type;
+package org.sleuthkit.autopsy.timeline.datamodel.eventtype;
import java.util.Arrays;
import java.util.Collections;
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/type/WebTypes.java b/Core/src/org/sleuthkit/autopsy/timeline/datamodel/eventtype/WebTypes.java
similarity index 99%
rename from Core/src/org/sleuthkit/autopsy/timeline/events/type/WebTypes.java
rename to Core/src/org/sleuthkit/autopsy/timeline/datamodel/eventtype/WebTypes.java
index 40556c7e95..7de95beaed 100644
--- a/Core/src/org/sleuthkit/autopsy/timeline/events/type/WebTypes.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/datamodel/eventtype/WebTypes.java
@@ -16,7 +16,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.sleuthkit.autopsy.timeline.events.type;
+package org.sleuthkit.autopsy.timeline.datamodel.eventtype;
import java.util.Collections;
import java.util.List;
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/db/Bundle_ja.properties b/Core/src/org/sleuthkit/autopsy/timeline/db/Bundle_ja.properties
similarity index 100%
rename from Core/src/org/sleuthkit/autopsy/timeline/events/db/Bundle_ja.properties
rename to Core/src/org/sleuthkit/autopsy/timeline/db/Bundle_ja.properties
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java b/Core/src/org/sleuthkit/autopsy/timeline/db/EventDB.java
similarity index 67%
rename from Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java
rename to Core/src/org/sleuthkit/autopsy/timeline/db/EventDB.java
index 7e682e7255..1b6c5943fb 100644
--- a/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/db/EventDB.java
@@ -16,7 +16,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.sleuthkit.autopsy.timeline.events.db;
+package org.sleuthkit.autopsy.timeline.db;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.SetMultimap;
@@ -32,6 +32,7 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
+import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
@@ -44,37 +45,30 @@ import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.logging.Level;
import java.util.stream.Collectors;
-import java.util.stream.Stream;
import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
import org.apache.commons.lang3.StringUtils;
import org.joda.time.DateTimeZone;
import org.joda.time.Interval;
import org.joda.time.Period;
-import org.openide.util.Exceptions;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.timeline.TimeLineController;
-import org.sleuthkit.autopsy.timeline.events.AggregateEvent;
-import org.sleuthkit.autopsy.timeline.events.TimeLineEvent;
-import org.sleuthkit.autopsy.timeline.events.type.BaseTypes;
-import org.sleuthkit.autopsy.timeline.events.type.EventType;
-import org.sleuthkit.autopsy.timeline.events.type.RootEventType;
+import org.sleuthkit.autopsy.timeline.datamodel.AggregateEvent;
+import org.sleuthkit.autopsy.timeline.datamodel.TimeLineEvent;
+import org.sleuthkit.autopsy.timeline.datamodel.eventtype.BaseTypes;
+import org.sleuthkit.autopsy.timeline.datamodel.eventtype.EventType;
+import org.sleuthkit.autopsy.timeline.datamodel.eventtype.RootEventType;
+import static org.sleuthkit.autopsy.timeline.db.SQLHelper.useHashHitTablesHelper;
+import static org.sleuthkit.autopsy.timeline.db.SQLHelper.useTagTablesHelper;
import org.sleuthkit.autopsy.timeline.filters.RootFilter;
+import org.sleuthkit.autopsy.timeline.filters.TagsFilter;
import org.sleuthkit.autopsy.timeline.utils.RangeDivisionInfo;
import org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD;
-import static org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD.FULL;
-import static org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD.MEDIUM;
-import static org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD.SHORT;
import org.sleuthkit.autopsy.timeline.zooming.EventTypeZoomLevel;
-import org.sleuthkit.autopsy.timeline.zooming.TimeUnits;
-import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.DAYS;
-import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.HOURS;
-import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.MINUTES;
-import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.MONTHS;
-import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.SECONDS;
-import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.YEARS;
import org.sleuthkit.autopsy.timeline.zooming.ZoomParams;
import org.sleuthkit.datamodel.SleuthkitCase;
+import org.sleuthkit.datamodel.Tag;
import org.sleuthkit.datamodel.TskData;
import org.sqlite.SQLiteJDBCLoader;
@@ -88,7 +82,7 @@ import org.sqlite.SQLiteJDBCLoader;
public class EventDB {
/**
-
+ *
* enum to represent keys stored in db_info table
*/
private enum DBInfoKey {
@@ -150,17 +144,21 @@ public class EventDB {
private PreparedStatement getMaxTimeStmt;
private PreparedStatement getMinTimeStmt;
private PreparedStatement getDataSourceIDsStmt;
+ private PreparedStatement getHashSetNamesStmt;
private PreparedStatement insertRowStmt;
private PreparedStatement recordDBInfoStmt;
private PreparedStatement insertHashSetStmt;
private PreparedStatement insertHashHitStmt;
+ private PreparedStatement insertTagStmt;
+ private PreparedStatement deleteTagStmt;
private PreparedStatement selectHashSetStmt;
private PreparedStatement countAllEventsStmt;
private PreparedStatement dropEventsTableStmt;
private PreparedStatement dropHashSetHitsTableStmt;
private PreparedStatement dropHashSetsTableStmt;
+ private PreparedStatement dropTagsTableStmt;
private PreparedStatement dropDBInfoTableStmt;
- private PreparedStatement selectEventsFromOBjectAndArtifactStmt;
+ private PreparedStatement selectEventIDsFromOBjectAndArtifactStmt;
private final Set preparedStatements = new HashSet<>();
@@ -196,9 +194,9 @@ public class EventDB {
public Interval getSpanningInterval(Collection eventIDs) {
DBLock.lock();
try (Statement stmt = con.createStatement();
- ResultSet rs = stmt.executeQuery("select Min(time), Max(time) from events where event_id in (" + StringUtils.join(eventIDs, ", ") + ")");) { // NON-NLS
+ ResultSet rs = stmt.executeQuery("SELECT Min(time), Max(time) FROM events WHERE event_id IN (" + StringUtils.join(eventIDs, ", ") + ")");) { // NON-NLS
while (rs.next()) {
- return new Interval(rs.getLong("Min(time)"), rs.getLong("Max(time)") + 1, DateTimeZone.UTC); // NON-NLS
+ return new Interval(rs.getLong("Min(time)") * 1000, (rs.getLong("Max(time)") + 1) * 1000, DateTimeZone.UTC); // NON-NLS
}
} catch (SQLException ex) {
LOGGER.log(Level.SEVERE, "Error executing get spanning interval query.", ex); // NON-NLS
@@ -212,11 +210,11 @@ public class EventDB {
return new EventTransaction();
}
- void commitTransaction(EventTransaction tr, Boolean notify) {
+ void commitTransaction(EventTransaction tr) {
if (tr.isClosed()) {
throw new IllegalArgumentException("can't close already closed transaction"); // NON-NLS
}
- tr.commit(notify);
+ tr.commit();
}
/**
@@ -248,7 +246,7 @@ public class EventDB {
*/
Map countEventsByType(ZoomParams params) {
if (params.getTimeRange() != null) {
- return countEvents(params.getTimeRange().getStartMillis() / 1000,
+ return countEventsByType(params.getTimeRange().getStartMillis() / 1000,
params.getTimeRange().getEndMillis() / 1000,
params.getFilter(), params.getTypeZoomLevel());
} else {
@@ -256,6 +254,33 @@ public class EventDB {
}
}
+ /**
+ * get a count of tagnames applied to the given event ids as a map from
+ * tagname displayname to count of tag applications
+ *
+ * @param eventIDsWithTags the event ids to get the tag counts map for
+ *
+ * @return a map from tagname displayname to count of applications
+ */
+ Map getTagCountsByTagName(Set eventIDsWithTags) {
+ HashMap counts = new HashMap<>();
+ DBLock.lock();
+ try (Statement createStatement = con.createStatement();
+ ResultSet rs = createStatement.executeQuery("SELECT tag_name_display_name, COUNT(DISTINCT tag_id) AS count FROM tags"
+ + " WHERE event_id IN (" + StringUtils.join(eventIDsWithTags, ", ") + ")"
+ + " GROUP BY tag_name_id"
+ + " ORDER BY tag_name_display_name");) {
+ while (rs.next()) {
+ counts.put(rs.getString("tag_name_display_name"), rs.getLong("count"));
+ }
+ } catch (SQLException ex) {
+ LOGGER.log(Level.SEVERE, "Failed to get tag counts by tag name.", ex);
+ } finally {
+ DBLock.unlock();
+ }
+ return counts;
+ }
+
/**
* drop the tables from this database and recreate them in order to start
* over.
@@ -266,10 +291,27 @@ public class EventDB {
dropEventsTableStmt.executeUpdate();
dropHashSetHitsTableStmt.executeUpdate();
dropHashSetsTableStmt.executeUpdate();
+ dropTagsTableStmt.executeUpdate();
dropDBInfoTableStmt.executeUpdate();
- initializeDB();;
+ initializeDB();
} catch (SQLException ex) {
- LOGGER.log(Level.SEVERE, "could not drop old tables table", ex); // NON-NLS
+ LOGGER.log(Level.SEVERE, "could not drop old tables", ex); // NON-NLS
+ } finally {
+ DBLock.unlock();
+ }
+ }
+
+ /**
+ * drop only the tags table and rebuild it incase the tags have changed
+ * while TL was not listening,
+ */
+ void reInitializeTags() {
+ DBLock.lock();
+ try {
+ dropTagsTableStmt.executeUpdate();
+ initializeTagsTable();
+ } catch (SQLException ex) {
+ LOGGER.log(Level.SEVERE, "could not drop old tags table", ex); // NON-NLS
} finally {
DBLock.unlock();
}
@@ -281,8 +323,8 @@ public class EventDB {
final String sqlWhere = SQLHelper.getSQLWhere(filter);
DBLock.lock();
try (Statement stmt = con.createStatement(); //can't use prepared statement because of complex where clause
- ResultSet rs = stmt.executeQuery(" select (select Max(time) from events" + useHashHitTablesHelper(filter) + " where time <=" + start + " and " + sqlWhere + ") as start,"
- + "(select Min(time) from from events" + useHashHitTablesHelper(filter) + " where time >= " + end + " and " + sqlWhere + ") as end")) { // NON-NLS
+ ResultSet rs = stmt.executeQuery(" SELECT (SELECT Max(time) FROM events " + useHashHitTablesHelper(filter) + useTagTablesHelper(filter) + " WHERE time <=" + start + " AND " + sqlWhere + ") AS start,"
+ + "(SELECT Min(time) FROM events" + useHashHitTablesHelper(filter) + useTagTablesHelper(filter) + " WHERE time >= " + end + " AND " + sqlWhere + ") AS end")) { // NON-NLS
while (rs.next()) {
long start2 = rs.getLong("start"); // NON-NLS
@@ -332,10 +374,9 @@ public class EventDB {
Set resultIDs = new HashSet<>();
DBLock.lock();
- final String query = "select event_id from from events" + useHashHitTablesHelper(filter) + " where time >= " + startTime + " and time <" + endTime + " and " + SQLHelper.getSQLWhere(filter); // NON-NLS
+ final String query = "SELECT events.event_id AS event_id FROM events" + useHashHitTablesHelper(filter) + useTagTablesHelper(filter) + " WHERE time >= " + startTime + " AND time <" + endTime + " AND " + SQLHelper.getSQLWhere(filter); // NON-NLS
try (Statement stmt = con.createStatement();
ResultSet rs = stmt.executeQuery(query)) {
-
while (rs.next()) {
resultIDs.add(rs.getLong("event_id"));
}
@@ -357,11 +398,11 @@ public class EventDB {
return getDBInfo(DBInfoKey.LAST_OBJECT_ID, -1);
}
+ /**
+ * this relies on the fact that no tskObj has ID 0 but 0 is the default
+ * value for the datasource_id column in the events table.
+ */
boolean hasNewColumns() {
- /*
- * this relies on the fact that no tskObj has ID 0 but 0 is the default
- * value for the datasource_id column in the events table.
- */
return hasHashHitColumn() && hasDataSourceIDColumn() && hasTaggedColumn()
&& (getDataSourceIDs().isEmpty() == false);
}
@@ -388,7 +429,7 @@ public class EventDB {
Map getHashSetNames() {
Map hashSets = new HashMap<>();
DBLock.lock();
- try (ResultSet rs = con.createStatement().executeQuery("select * from hash_sets")) {
+ try (ResultSet rs = getHashSetNamesStmt.executeQuery();) {
while (rs.next()) {
long hashSetID = rs.getLong("hash_set_id");
String hashSetName = rs.getString("hash_set_name");
@@ -443,7 +484,6 @@ public class EventDB {
/**
* create the table and indices if they don't already exist
*
- *
* @return the number of rows in the table , count > 0 indicating an
* existing table
*/
@@ -488,8 +528,9 @@ public class EventDB {
+ " full_description TEXT, " // NON-NLS
+ " med_description TEXT, " // NON-NLS
+ " short_description TEXT, " // NON-NLS
- + " known_state INTEGER,"
- + " hash_hit INTEGER)"; //boolean // NON-NLS
+ + " known_state INTEGER," //boolean // NON-NLS
+ + " hash_hit INTEGER," //boolean // NON-NLS
+ + " tagged INTEGER)"; //boolean // NON-NLS
stmt.execute(sql);
} catch (SQLException ex) {
LOGGER.log(Level.SEVERE, "problem creating database table", ex); // NON-NLS
@@ -500,7 +541,6 @@ public class EventDB {
String sql = "ALTER TABLE events ADD COLUMN datasource_id INTEGER"; // NON-NLS
stmt.execute(sql);
} catch (SQLException ex) {
-
LOGGER.log(Level.SEVERE, "problem upgrading events table", ex); // NON-NLS
}
}
@@ -509,7 +549,6 @@ public class EventDB {
String sql = "ALTER TABLE events ADD COLUMN tagged INTEGER"; // NON-NLS
stmt.execute(sql);
} catch (SQLException ex) {
-
LOGGER.log(Level.SEVERE, "problem upgrading events table", ex); // NON-NLS
}
}
@@ -542,8 +581,11 @@ public class EventDB {
LOGGER.log(Level.SEVERE, "problem creating hash_set_hits table", ex);
}
+ initializeTagsTable();
+
createIndex("events", Arrays.asList("file_id"));
createIndex("events", Arrays.asList("artifact_id"));
+ createIndex("events", Arrays.asList("time"));
createIndex("events", Arrays.asList("sub_type", "time"));
createIndex("events", Arrays.asList("base_type", "time"));
createIndex("events", Arrays.asList("known_state"));
@@ -552,7 +594,7 @@ public class EventDB {
insertRowStmt = prepareStatement(
"INSERT INTO events (datasource_id,file_id ,artifact_id, time, sub_type, base_type, full_description, med_description, short_description, known_state, hash_hit, tagged) " // NON-NLS
+ "VALUES (?,?,?,?,?,?,?,?,?,?,?,?)"); // NON-NLS
-
+ getHashSetNamesStmt = prepareStatement("SELECT hash_set_id, hash_set_name FROM hash_sets"); // NON-NLS
getDataSourceIDsStmt = prepareStatement("SELECT DISTINCT datasource_id FROM events"); // NON-NLS
getMaxTimeStmt = prepareStatement("SELECT Max(time) AS max FROM events"); // NON-NLS
getMinTimeStmt = prepareStatement("SELECT Min(time) AS min FROM events"); // NON-NLS
@@ -562,21 +604,41 @@ public class EventDB {
insertHashSetStmt = prepareStatement("INSERT OR IGNORE INTO hash_sets (hash_set_name) values (?)");
selectHashSetStmt = prepareStatement("SELECT hash_set_id FROM hash_sets WHERE hash_set_name = ?");
insertHashHitStmt = prepareStatement("INSERT OR IGNORE INTO hash_set_hits (hash_set_id, event_id) values (?,?)");
+ insertTagStmt = prepareStatement("INSERT OR IGNORE INTO tags (tag_id, tag_name_id,tag_name_display_name, event_id) values (?,?,?,?)");
+ deleteTagStmt = prepareStatement("DELETE FROM tags WHERE tag_id = ?");
countAllEventsStmt = prepareStatement("SELECT count(*) AS count FROM events");
dropEventsTableStmt = prepareStatement("DROP TABLE IF EXISTS events");
dropHashSetHitsTableStmt = prepareStatement("DROP TABLE IF EXISTS hash_set_hits");
dropHashSetsTableStmt = prepareStatement("DROP TABLE IF EXISTS hash_sets");
+ dropTagsTableStmt = prepareStatement("DROP TABLE IF EXISTS tags");
dropDBInfoTableStmt = prepareStatement("DROP TABLE IF EXISTS db_ino");
- selectEventsFromOBjectAndArtifactStmt = prepareStatement("SELECT event_id FROM events WHERE file_id == ? AND artifact_id IS ?");
+ selectEventIDsFromOBjectAndArtifactStmt = prepareStatement("SELECT event_id FROM events WHERE file_id == ? AND artifact_id IS ?");
} catch (SQLException sQLException) {
LOGGER.log(Level.SEVERE, "failed to prepareStatment", sQLException); // NON-NLS
}
-
} finally {
DBLock.unlock();
}
}
+ /**
+ * create the tags table if it doesn't already exist. This is broken out as
+ * a separate method so it can be used by {@link #reInitializeTags() }
+ */
+ private void initializeTagsTable() {
+ try (Statement stmt = con.createStatement()) {
+ String sql = "CREATE TABLE IF NOT EXISTS tags "
+ + "(tag_id INTEGER NOT NULL,"
+ + " tag_name_id INTEGER NOT NULL, "
+ + " tag_name_display_name TEXT NOT NULL, "
+ + " event_id INTEGER REFERENCES events(event_id) NOT NULL, "
+ + " PRIMARY KEY (event_id, tag_name_id))";
+ stmt.execute(sql);
+ } catch (SQLException ex) {
+ LOGGER.log(Level.SEVERE, "problem creating tags table", ex);
+ }
+ }
+
/**
*
* @param tableName the value of tableName
@@ -628,11 +690,11 @@ public class EventDB {
void insertEvent(long time, EventType type, long datasourceID, long objID,
Long artifactID, String fullDescription, String medDescription,
- String shortDescription, TskData.FileKnown known, Set hashSets, boolean tagged) {
+ String shortDescription, TskData.FileKnown known, Set hashSets, List extends Tag> tags) {
EventTransaction transaction = beginTransaction();
- insertEvent(time, type, datasourceID, objID, artifactID, fullDescription, medDescription, shortDescription, known, hashSets, tagged, transaction);
- commitTransaction(transaction, true);
+ insertEvent(time, type, datasourceID, objID, artifactID, fullDescription, medDescription, shortDescription, known, hashSets, tags, transaction);
+ commitTransaction(transaction);
}
/**
@@ -644,17 +706,13 @@ public class EventDB {
void insertEvent(long time, EventType type, long datasourceID, long objID,
Long artifactID, String fullDescription, String medDescription,
String shortDescription, TskData.FileKnown known, Set hashSetNames,
- boolean tagged,
- EventTransaction transaction) {
+ List extends Tag> tags, EventTransaction transaction) {
if (transaction.isClosed()) {
throw new IllegalArgumentException("can't update database with closed transaction"); // NON-NLS
}
- int typeNum;
- int superTypeNum;
-
- typeNum = RootEventType.allTypes.indexOf(type);
- superTypeNum = type.getSuperType().ordinal();
+ int typeNum = RootEventType.allTypes.indexOf(type);
+ int superTypeNum = type.getSuperType().ordinal();
DBLock.lock();
try {
@@ -684,7 +742,7 @@ public class EventDB {
insertRowStmt.setByte(10, known == null ? TskData.FileKnown.UNKNOWN.getFileKnownValue() : known.getFileKnownValue());
insertRowStmt.setInt(11, hashSetNames.isEmpty() ? 0 : 1);
- insertRowStmt.setInt(12, tagged ? 1 : 0);
+ insertRowStmt.setInt(12, tags.isEmpty() ? 0 : 1);
insertRowStmt.executeUpdate();
@@ -697,7 +755,7 @@ public class EventDB {
insertHashSetStmt.setString(1, name);
insertHashSetStmt.executeUpdate();
- //TODO: use nested select to get hash_set_id rather than seperate statement/query
+ //TODO: use nested select to get hash_set_id rather than seperate statement/query ?
//"select hash_set_id from hash_sets where hash_set_name = ?"
selectHashSetStmt.setString(1, name);
try (ResultSet rs = selectHashSetStmt.executeQuery()) {
@@ -711,9 +769,13 @@ public class EventDB {
}
}
}
+ for (Tag tag : tags) {
+ //could this be one insert? is there a performance win?
+ insertTag(tag, eventID);
+ }
break;
}
- };
+ }
} catch (SQLException ex) {
LOGGER.log(Level.SEVERE, "failed to insert event", ex); // NON-NLS
@@ -722,33 +784,133 @@ public class EventDB {
}
}
- Set markEventsTagged(long objectID, Long artifactID, boolean tagged) {
- HashSet eventIDs = new HashSet<>();
-
+ /**
+ * mark any events with the given object and artifact ids as tagged, and
+ * record the tag it self.
+ *
+ * @param objectID the obj_id that this tag applies to, the id of the
+ * content that the artifact is derived from for artifact
+ * tags
+ * @param artifactID the artifact_id that this tag applies to, or null if
+ * this is a content tag
+ * @param tag the tag that should be inserted
+ *
+ * @return the event ids that match the object/artifact pair
+ */
+ Set addTag(long objectID, @Nullable Long artifactID, Tag tag) {
DBLock.lock();
-
try {
- selectEventsFromOBjectAndArtifactStmt.clearParameters();
- selectEventsFromOBjectAndArtifactStmt.setLong(1, objectID);
- if (Objects.isNull(artifactID)) {
- selectEventsFromOBjectAndArtifactStmt.setNull(2, Types.NULL);
- } else {
- selectEventsFromOBjectAndArtifactStmt.setLong(2, artifactID);
- }
- try (ResultSet executeQuery = selectEventsFromOBjectAndArtifactStmt.executeQuery();) {
- while (executeQuery.next()) {
- eventIDs.add(executeQuery.getLong("event_id"));
- }
- try (Statement updateStatement = con.createStatement();) {
- updateStatement.executeUpdate("UPDATE events SET tagged = " + (tagged ? 1 : 0)
- + " WHERE event_id IN (" + StringUtils.join(eventIDs, ",") + ")");
- }
+ Set eventIDs = markEventsTagged(objectID, artifactID, true);
+ for (Long eventID : eventIDs) {
+ insertTag(tag, eventID);
}
+ return eventIDs;
} catch (SQLException ex) {
- LOGGER.log(Level.SEVERE, "failed to mark events as " + (tagged ? "" : "(un)") + tagged, ex); // NON-NLS
+ LOGGER.log(Level.SEVERE, "failed to add tag to event", ex); // NON-NLS
} finally {
DBLock.unlock();
}
+ return Collections.emptySet();
+ }
+
+ /**
+ * insert this tag into the db
+ *
+ * NOTE: does not lock the db, must be called form inside a
+ * DBLock.lock/unlock pair
+ *
+ * @param tag the tag to insert
+ * @param eventID the event id that this tag is applied to.
+ *
+ * @throws SQLException if there was a problem executing insert
+ */
+ private void insertTag(Tag tag, long eventID) throws SQLException {
+
+ //"INSERT OR IGNORE INTO tags (tag_id, tag_name_id,tag_name_display_name, event_id) values (?,?,?,?)"
+ insertTagStmt.clearParameters();
+ insertTagStmt.setLong(1, tag.getId());
+ insertTagStmt.setLong(2, tag.getName().getId());
+ insertTagStmt.setString(3, tag.getName().getDisplayName());
+ insertTagStmt.setLong(4, eventID);
+ insertTagStmt.executeUpdate();
+ }
+
+ /**
+ * mark any events with the given object and artifact ids as tagged, and
+ * record the tag it self.
+ *
+ * @param objectID the obj_id that this tag applies to, the id of the
+ * content that the artifact is derived from for artifact
+ * tags
+ * @param artifactID the artifact_id that this tag applies to, or null if
+ * this is a content tag
+ * @param tag the tag that should be deleted
+ * @param stillTagged true if there are other tags still applied to this
+ * event in autopsy
+ *
+ * @return the event ids that match the object/artifact pair
+ */
+ Set deleteTag(long objectID, @Nullable Long artifactID, Tag tag, boolean stillTagged) {
+ DBLock.lock();
+ try {
+ //"DELETE FROM tags WHERE tag_id = ?
+ deleteTagStmt.clearParameters();
+ deleteTagStmt.setLong(1, tag.getId());
+ deleteTagStmt.executeUpdate();
+
+ Set eventIDs = markEventsTagged(objectID, artifactID, stillTagged);
+ return eventIDs;
+ } catch (SQLException ex) {
+ LOGGER.log(Level.SEVERE, "failed to add tag to event", ex); // NON-NLS
+ } finally {
+ DBLock.unlock();
+ }
+ return Collections.emptySet();
+ }
+
+ /**
+ * mark any events with the given object and artifact ids as tagged, and
+ * record the tag it self.
+ *
+ * NOTE: does not lock the db, must be called form inside a
+ * DBLock.lock/unlock pair
+ *
+ * @param objectID the obj_id that this tag applies to, the id of the
+ * content that the artifact is derived from for artifact
+ * tags
+ * @param artifactID the artifact_id that this tag applies to, or null if
+ * this is a content tag
+ * @param tagged true to mark the matching events tagged, false to mark
+ * them as untagged
+ *
+ * @return the event ids that match the object/artifact pair
+ *
+ * @throws SQLException if there is an error marking the events as
+ * (un)taggedS
+ */
+ private Set markEventsTagged(long objectID, @Nullable Long artifactID, boolean tagged) throws SQLException {
+ //first select the matching event ids
+ selectEventIDsFromOBjectAndArtifactStmt.clearParameters();
+ selectEventIDsFromOBjectAndArtifactStmt.setLong(1, objectID);
+ if (Objects.isNull(artifactID)) {
+ selectEventIDsFromOBjectAndArtifactStmt.setNull(2, Types.NULL);
+ } else {
+ selectEventIDsFromOBjectAndArtifactStmt.setLong(2, artifactID);
+ }
+
+ HashSet eventIDs = new HashSet<>();
+ try (ResultSet executeQuery = selectEventIDsFromOBjectAndArtifactStmt.executeQuery();) {
+ while (executeQuery.next()) {
+ eventIDs.add(executeQuery.getLong("event_id"));
+ }
+ }
+
+ //then update tagged state for all event with selected ids
+ try (Statement updateStatement = con.createStatement();) {
+ updateStatement.executeUpdate("UPDATE events SET tagged = " + (tagged ? 1 : 0)
+ + " WHERE event_id IN (" + StringUtils.join(eventIDs, ",") + ")");
+ }
+
return eventIDs;
}
@@ -768,19 +930,6 @@ public class EventDB {
trans.rollback();
}
- boolean tableExists() {
- //TODO: use prepared statement - jm
- try (Statement createStatement = con.createStatement();
- ResultSet executeQuery = createStatement.executeQuery("SELECT name FROM sqlite_master WHERE type='table' AND name='events'")) { // NON-NLS
- if (executeQuery.getString("name").equals("events") == false) { // NON-NLS
- return false;
- }
- } catch (SQLException ex) {
- Exceptions.printStackTrace(ex);
- }
- return true;
- }
-
private void closeStatements() throws SQLException {
for (PreparedStatement pStmt : preparedStatements) {
pStmt.close();
@@ -789,7 +938,7 @@ public class EventDB {
private void configureDB() throws SQLException {
DBLock.lock();
- //this should match Sleuthkit db setupt
+ //this should match Sleuthkit db setup
try (Statement statement = con.createStatement()) {
//reduce i/o operations, we have no OS crash recovery anyway
statement.execute("PRAGMA synchronous = OFF;"); // NON-NLS
@@ -813,11 +962,13 @@ public class EventDB {
SQLiteJDBCLoader.getVersion(), SQLiteJDBCLoader.isNativeMode()
? "native" : "pure-java")); // NON-NLS
} catch (Exception exception) {
+ LOGGER.log(Level.SEVERE, "Failed to determine if sqlite-jdbc is loaded in native or pure-java mode.", exception);
}
}
private TimeLineEvent constructTimeLineEvent(ResultSet rs) throws SQLException {
return new TimeLineEvent(rs.getLong("event_id"),
+ rs.getLong("datasource_id"),
rs.getLong("file_id"),
rs.getLong("artifact_id"),
rs.getLong("time"), RootEventType.allTypes.get(rs.getInt("sub_type")),
@@ -833,7 +984,6 @@ public class EventDB {
* count all the events with the given options and return a map organizing
* the counts in a hierarchy from date > eventtype> count
*
- *
* @param startTime events before this time will be excluded (seconds from
* unix epoch)
* @param endTime events at or after this time will be excluded (seconds
@@ -846,7 +996,7 @@ public class EventDB {
* @return a map organizing the counts in a hierarchy from date > eventtype>
* count
*/
- private Map countEvents(Long startTime, Long endTime, RootFilter filter, EventTypeZoomLevel zoomLevel) {
+ private Map countEventsByType(Long startTime, Long endTime, RootFilter filter, EventTypeZoomLevel zoomLevel) {
if (Objects.equals(startTime, endTime)) {
endTime++;
}
@@ -857,9 +1007,9 @@ public class EventDB {
final boolean useSubTypes = (zoomLevel == EventTypeZoomLevel.SUB_TYPE);
//get some info about the range of dates requested
- final String queryString = "select count(*), " + useSubTypeHelper(useSubTypes)
- + " from events" + useHashHitTablesHelper(filter) + " where time >= " + startTime + " and time < " + endTime + " and " + SQLHelper.getSQLWhere(filter) // NON-NLS
- + " GROUP BY " + useSubTypeHelper(useSubTypes); // NON-NLS
+ final String queryString = "SELECT count(DISTINCT events.event_id) AS count, " + typeColumnHelper(useSubTypes)
+ + " FROM events" + useHashHitTablesHelper(filter) + useTagTablesHelper(filter) + " WHERE time >= " + startTime + " AND time < " + endTime + " AND " + SQLHelper.getSQLWhere(filter) // NON-NLS
+ + " GROUP BY " + typeColumnHelper(useSubTypes); // NON-NLS
DBLock.lock();
try (Statement stmt = con.createStatement();
@@ -869,7 +1019,7 @@ public class EventDB {
? RootEventType.allTypes.get(rs.getInt("sub_type"))
: BaseTypes.values()[rs.getInt("base_type")];
- typeMap.put(type, rs.getLong("count(*)")); // NON-NLS
+ typeMap.put(type, rs.getLong("count")); // NON-NLS
}
} catch (Exception ex) {
@@ -880,119 +1030,140 @@ public class EventDB {
return typeMap;
}
- List getAggregatedEvents(ZoomParams params) {
- return getAggregatedEvents(params.getTimeRange(), params.getFilter(), params.getTypeZoomLevel(), params.getDescrLOD());
- }
-
/**
- * //TODO: update javadoc //TODO: split this into helper methods
- *
- * get a list of {@link AggregateEvent}s.
- *
- * General algorithm is as follows:
- *
- * 1)get all aggregate events, via one db query. 2) sort them into a map
- * from (type, description)-> aggevent 3) for each key in map, merge the
- * events and accumulate them in a list to return
- *
- *
- * @param timeRange the Interval within in which all returned aggregate
- * events will be.
- * @param filter only events that pass the filter will be included in
- * aggregates events returned
- * @param zoomLevel only events of this level will be included
- * @param lod description level of detail to use when grouping events
+ * get a list of {@link AggregateEvent}s, clustered according to the given
+ * zoom paramaters.
*
+ * @param params the zoom params that determine the zooming, filtering and
+ * clustering.
*
* @return a list of aggregate events within the given timerange, that pass
* the supplied filter, aggregated according to the given event type
* and description zoom levels
*/
- private List getAggregatedEvents(Interval timeRange, RootFilter filter, EventTypeZoomLevel zoomLevel, DescriptionLOD lod) {
- String descriptionColumn = getDescriptionColumn(lod);
- final boolean useSubTypes = (zoomLevel.equals(EventTypeZoomLevel.SUB_TYPE));
+ List getAggregatedEvents(ZoomParams params) {
+ //unpack params
+ Interval timeRange = params.getTimeRange();
+ RootFilter filter = params.getFilter();
+ DescriptionLOD descriptionLOD = params.getDescriptionLOD();
+ EventTypeZoomLevel typeZoomLevel = params.getTypeZoomLevel();
- //get some info about the time range requested
- RangeDivisionInfo rangeInfo = RangeDivisionInfo.getRangeDivisionInfo(timeRange);
- //use 'rounded out' range
- long start = timeRange.getStartMillis() / 1000;//.getLowerBound();
- long end = timeRange.getEndMillis() / 1000;//Millis();//rangeInfo.getUpperBound();
- if (Objects.equals(start, end)) {
+ //ensure length of querried interval is not 0
+ long start = timeRange.getStartMillis() / 1000;
+ long end = timeRange.getEndMillis() / 1000;
+ if (start == end) {
end++;
}
+ //get some info about the time range requested
+ RangeDivisionInfo rangeInfo = RangeDivisionInfo.getRangeDivisionInfo(timeRange);
- //get a sqlite srtftime format string
- String strfTimeFormat = getStrfTimeFormat(rangeInfo.getPeriodSize());
+ //build dynamic parts of query
+ String strfTimeFormat = SQLHelper.getStrfTimeFormat(rangeInfo);
+ String descriptionColumn = SQLHelper.getDescriptionColumn(descriptionLOD);
+ final boolean useSubTypes = typeZoomLevel.equals(EventTypeZoomLevel.SUB_TYPE);
+ String timeZone = TimeLineController.getTimeZone().get().equals(TimeZone.getDefault()) ? ", 'localtime'" : ""; // NON-NLS
+ String typeColumn = typeColumnHelper(useSubTypes);
- //effectively map from type to (map from description to events)
- Map> typeMap = new HashMap<>();
+ //compose query string
+ String query = "SELECT strftime('" + strfTimeFormat + "',time , 'unixepoch'" + timeZone + ") AS interval," // NON-NLS
+ + " group_concat(events.event_id) as event_ids, min(time), max(time), " + typeColumn + ", " + descriptionColumn // NON-NLS
+ + "\n FROM events" + useHashHitTablesHelper(filter) + useTagTablesHelper(filter) // NON-NLS
+ + "\n WHERE time >= " + start + " AND time < " + end + " AND " + SQLHelper.getSQLWhere(filter) // NON-NLS
+ + "\n GROUP BY interval, " + typeColumn + " , " + descriptionColumn // NON-NLS
+ + "\n ORDER BY min(time)"; // NON-NLS
- //get all agregate events in this time unit
+ // perform query and map results to AggregateEvent objects
+ List events = new ArrayList<>();
DBLock.lock();
- String query = "select strftime('" + strfTimeFormat + "',time , 'unixepoch'" + (TimeLineController.getTimeZone().get().equals(TimeZone.getDefault()) ? ", 'localtime'" : "") + ") as interval,"
- + " group_concat(events.event_id) as event_ids, Min(time), Max(time), " + descriptionColumn + ", " + useSubTypeHelper(useSubTypes)
- + " from events" + useHashHitTablesHelper(filter) + " where " + "time >= " + start + " and time < " + end + " and " + SQLHelper.getSQLWhere(filter) // NON-NLS
- + " group by interval, " + useSubTypeHelper(useSubTypes) + " , " + descriptionColumn // NON-NLS
- + " order by Min(time)"; // NON-NLS
- // scoop up requested events in groups organized by interval, type, and desription
- try (ResultSet rs = con.createStatement().executeQuery(query);) {
+
+ try (Statement createStatement = con.createStatement();
+ ResultSet rs = createStatement.executeQuery(query)) {
while (rs.next()) {
- Interval interval = new Interval(rs.getLong("Min(time)") * 1000, rs.getLong("Max(time)") * 1000, TimeLineController.getJodaTimeZone());
- String eventIDS = rs.getString("event_ids");
- EventType type = useSubTypes ? RootEventType.allTypes.get(rs.getInt("sub_type")) : BaseTypes.values()[rs.getInt("base_type")];
-
- HashSet hashHits = new HashSet<>();
- HashSet tagged = new HashSet<>();
- try (Statement st2 = con.createStatement();
- ResultSet hashQueryResults = st2.executeQuery("select event_id , tagged, hash_hit from events where event_id in (" + eventIDS + ")");) {
- while (hashQueryResults.next()) {
- long eventID = hashQueryResults.getLong("event_id");
- if (hashQueryResults.getInt("tagged") != 0) {
- tagged.add(eventID);
- }
- if (hashQueryResults.getInt("hash_hit") != 0) {
- hashHits.add(eventID);
- }
- }
- }
-
- AggregateEvent aggregateEvent = new AggregateEvent(
- interval, // NON-NLS
- type,
- Stream.of(eventIDS.split(",")).map(Long::valueOf).collect(Collectors.toSet()), // NON-NLS
- hashHits,
- tagged,
- rs.getString(descriptionColumn),
- lod);
-
- //put events in map from type/descrition -> event
- SetMultimap descrMap = typeMap.get(type);
- if (descrMap == null) {
- descrMap = HashMultimap.create();
- typeMap.put(type, descrMap);
- }
- descrMap.put(aggregateEvent.getDescription(), aggregateEvent);
+ events.add(aggregateEventHelper(rs, useSubTypes, descriptionLOD, filter.getTagsFilter()));
}
-
} catch (SQLException ex) {
- Exceptions.printStackTrace(ex);
+ LOGGER.log(Level.SEVERE, "Failed to get aggregate events with query: " + query, ex); // NON-NLS
} finally {
DBLock.unlock();
}
+ return mergeAggregateEvents(rangeInfo.getPeriodSize().getPeriod(), events);
+ }
+
+ /**
+ * map a single row in a ResultSet to an AggregateEvent
+ *
+ * @param rs the result set whose current row should be mapped
+ * @param useSubTypes use the sub_type column if true, else use the
+ * base_type column
+ * @param descriptionLOD the description level of detail for this event
+ *
+ * @return an AggregateEvent corresponding to the current row in the given
+ * result set
+ *
+ * @throws SQLException
+ */
+ private AggregateEvent aggregateEventHelper(ResultSet rs, boolean useSubTypes, DescriptionLOD descriptionLOD, TagsFilter filter) throws SQLException {
+ Interval interval = new Interval(rs.getLong("min(time)") * 1000, rs.getLong("max(time)") * 1000, TimeLineController.getJodaTimeZone());// NON-NLS
+ String eventIDsString = rs.getString("event_ids");// NON-NLS
+ Set eventIDs = SQLHelper.unGroupConcat(eventIDsString, Long::valueOf);
+ String description = rs.getString(SQLHelper.getDescriptionColumn(descriptionLOD));
+ EventType type = useSubTypes ? RootEventType.allTypes.get(rs.getInt("sub_type")) : BaseTypes.values()[rs.getInt("base_type")];// NON-NLS
+
+ Set hashHits = new HashSet<>();
+ String hashHitQuery = "SELECT group_concat(event_id) FROM events WHERE event_id IN (" + eventIDsString + ") AND hash_hit = 1";// NON-NLS
+ try (Statement stmt = con.createStatement();
+ ResultSet hashHitsRS = stmt.executeQuery(hashHitQuery)) {
+ while (hashHitsRS.next()) {
+ hashHits = SQLHelper.unGroupConcat(hashHitsRS.getString("group_concat(event_id)"), Long::valueOf);// NON-NLS
+ }
+ }
+
+ Set tagged = new HashSet<>();
+ String taggedQuery = "SELECT group_concat(event_id) FROM events WHERE event_id IN (" + eventIDsString + ") AND tagged = 1";// NON-NLS
+ try (Statement stmt = con.createStatement();
+ ResultSet taggedRS = stmt.executeQuery(taggedQuery)) {
+ while (taggedRS.next()) {
+ tagged = SQLHelper.unGroupConcat(taggedRS.getString("group_concat(event_id)"), Long::valueOf);// NON-NLS
+ }
+ }
+
+ return new AggregateEvent(interval, type, eventIDs, hashHits, tagged,
+ description, descriptionLOD);
+ }
+
+ /**
+ * merge the events in the given list if they are within the same period
+ * General algorithm is as follows:
+ *
+ * 1) sort them into a map from (type, description)-> List
+ * 2) for each key in map, merge the events and accumulate them in a list to
+ * return
+ *
+ * @param timeUnitLength
+ * @param preMergedEvents
+ *
+ * @return
+ */
+ static private List mergeAggregateEvents(Period timeUnitLength, List preMergedEvents) {
+
+ //effectively map from type to (map from description to events)
+ Map> typeMap = new HashMap<>();
+
+ for (AggregateEvent aggregateEvent : preMergedEvents) {
+ typeMap.computeIfAbsent(aggregateEvent.getType(), eventType -> HashMultimap.create())
+ .put(aggregateEvent.getDescription(), aggregateEvent);
+ }
//result list to return
ArrayList aggEvents = new ArrayList<>();
- //save this for use when comparing gap size
- Period timeUnitLength = rangeInfo.getPeriodSize().getPeriod();
-
//For each (type, description) key, merge agg events
for (SetMultimap descrMap : typeMap.values()) {
+ //for each description ...
for (String descr : descrMap.keySet()) {
//run through the sorted events, merging together adjacent events
Iterator iterator = descrMap.get(descr).stream()
- .sorted((AggregateEvent o1, AggregateEvent o2)
- -> Long.compare(o1.getSpan().getStartMillis(), o2.getSpan().getStartMillis()))
+ .sorted(Comparator.comparing(event -> event.getSpan().getStartMillis()))
.iterator();
AggregateEvent current = iterator.next();
while (iterator.hasNext()) {
@@ -1013,17 +1184,10 @@ public class EventDB {
aggEvents.add(current);
}
}
-
- //at this point we should have a list of aggregate events.
- //one per type/description spanning consecutive time units as determined in rangeInfo
return aggEvents;
}
- private String useHashHitTablesHelper(RootFilter filter) {
- return SQLHelper.hasActiveHashFilter(filter) ? ", hash_set_hits" : "";
- }
-
- private static String useSubTypeHelper(final boolean useSubTypes) {
+ private static String typeColumnHelper(final boolean useSubTypes) {
return useSubTypes ? "sub_type" : "base_type";
}
@@ -1050,37 +1214,6 @@ public class EventDB {
return defaultValue;
}
- private String getDescriptionColumn(DescriptionLOD lod) {
- switch (lod) {
- case FULL:
- return "full_description";
- case MEDIUM:
- return "med_description";
- case SHORT:
- default:
- return "short_description";
- }
- }
-
- private String getStrfTimeFormat(TimeUnits info) {
- switch (info) {
- case DAYS:
- return "%Y-%m-%dT00:00:00"; // NON-NLS
- case HOURS:
- return "%Y-%m-%dT%H:00:00"; // NON-NLS
- case MINUTES:
- return "%Y-%m-%dT%H:%M:00"; // NON-NLS
- case MONTHS:
- return "%Y-%m-01T00:00:00"; // NON-NLS
- case SECONDS:
- return "%Y-%m-%dT%H:%M:%S"; // NON-NLS
- case YEARS:
- return "%Y-01-01T00:00:00"; // NON-NLS
- default:
- return "%Y-%m-%dT%H:%M:%S"; // NON-NLS
- }
- }
-
private PreparedStatement prepareStatement(String queryString) throws SQLException {
PreparedStatement prepareStatement = con.prepareStatement(queryString);
preparedStatements.add(prepareStatement);
@@ -1143,16 +1276,13 @@ public class EventDB {
}
}
- private void commit(Boolean notify) {
+ private void commit() {
if (!closed) {
try {
con.commit();
// make sure we close before we update, bc they'll need locks
close();
- if (notify) {
-// fireNewEvents(newEvents);
- }
} catch (SQLException ex) {
LOGGER.log(Level.SEVERE, "Error commiting events.db.", ex); // NON-NLS
rollback();
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventsRepository.java b/Core/src/org/sleuthkit/autopsy/timeline/db/EventsRepository.java
similarity index 60%
rename from Core/src/org/sleuthkit/autopsy/timeline/events/db/EventsRepository.java
rename to Core/src/org/sleuthkit/autopsy/timeline/db/EventsRepository.java
index ecc202473d..7a537626fd 100644
--- a/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventsRepository.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/db/EventsRepository.java
@@ -16,13 +16,12 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.sleuthkit.autopsy.timeline.events.db;
+package org.sleuthkit.autopsy.timeline.db;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Map;
@@ -34,6 +33,7 @@ import java.util.logging.Level;
import java.util.stream.Collectors;
import javafx.beans.property.ReadOnlyObjectProperty;
import javafx.collections.FXCollections;
+import javafx.collections.ObservableList;
import javafx.collections.ObservableMap;
import javax.annotation.concurrent.GuardedBy;
import javax.swing.JOptionPane;
@@ -45,27 +45,34 @@ import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.services.TagsManager;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.timeline.ProgressWindow;
-import org.sleuthkit.autopsy.timeline.events.AggregateEvent;
-import org.sleuthkit.autopsy.timeline.events.FilteredEventsModel;
-import org.sleuthkit.autopsy.timeline.events.TimeLineEvent;
-import org.sleuthkit.autopsy.timeline.events.type.ArtifactEventType;
-import org.sleuthkit.autopsy.timeline.events.type.EventType;
-import org.sleuthkit.autopsy.timeline.events.type.FileSystemTypes;
-import org.sleuthkit.autopsy.timeline.events.type.RootEventType;
+import org.sleuthkit.autopsy.timeline.datamodel.AggregateEvent;
+import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel;
+import org.sleuthkit.autopsy.timeline.datamodel.TimeLineEvent;
+import org.sleuthkit.autopsy.timeline.datamodel.eventtype.ArtifactEventType;
+import org.sleuthkit.autopsy.timeline.datamodel.eventtype.EventType;
+import org.sleuthkit.autopsy.timeline.datamodel.eventtype.FileSystemTypes;
+import org.sleuthkit.autopsy.timeline.datamodel.eventtype.RootEventType;
import org.sleuthkit.autopsy.timeline.filters.RootFilter;
+import org.sleuthkit.autopsy.timeline.filters.TagNameFilter;
+import org.sleuthkit.autopsy.timeline.filters.TagsFilter;
import org.sleuthkit.autopsy.timeline.zooming.ZoomParams;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
+import org.sleuthkit.datamodel.BlackboardArtifactTag;
+import org.sleuthkit.datamodel.ContentTag;
import org.sleuthkit.datamodel.SleuthkitCase;
+import org.sleuthkit.datamodel.Tag;
+import org.sleuthkit.datamodel.TagName;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
/**
- * Provides public API (over EventsDB) to access events. In theory this
- * insulates the rest of the timeline module form the details of the db
+ * Provides higher-level public API (over EventsDB) to access events. In theory
+ * this insulates the rest of the timeline module form the details of the db
* implementation. Since there are no other implementations of the database or
* clients of this class, and no Java Interface defined yet, in practice this
- * just delegates everything to the eventDB
+ * just delegates everything to the eventDB. Some results are also cached by
+ * this layer.
*
* Concurrency Policy:
*
@@ -95,12 +102,17 @@ public class EventsRepository {
private final ObservableMap datasourcesMap = FXCollections.observableHashMap();
private final ObservableMap hashSetMap = FXCollections.observableHashMap();
+ private final ObservableList tagNames = FXCollections.observableArrayList();
private final Case autoCase;
public Case getAutoCase() {
return autoCase;
}
+ public ObservableList getTagNames() {
+ return tagNames;
+ }
+
synchronized public ObservableMap getDatasourcesMap() {
return datasourcesMap;
}
@@ -125,7 +137,7 @@ public class EventsRepository {
this.autoCase = autoCase;
//TODO: we should check that case is open, or get passed a case object/directory -jm
this.eventDB = EventDB.getEventDB(autoCase);
- populateFilterMaps(autoCase.getSleuthkitCase());
+ populateFilterData(autoCase.getSleuthkitCase());
idToEventCache = CacheBuilder.newBuilder()
.maximumSize(5000L)
.expireAfterAccess(10, TimeUnit.MINUTES)
@@ -218,6 +230,94 @@ public class EventsRepository {
return eventDB.getSpanningInterval(eventIDs);
}
+ public boolean hasNewColumns() {
+ return eventDB.hasNewColumns();
+ }
+
+ /**
+ * get a count of tagnames applied to the given event ids as a map from
+ * tagname displayname to count of tag applications
+ *
+ * @param eventIDsWithTags the event ids to get the tag counts map for
+ *
+ * @return a map from tagname displayname to count of applications
+ */
+ public Map getTagCountsByTagName(Set eventIDsWithTags) {
+ return eventDB.getTagCountsByTagName(eventIDsWithTags);
+ }
+
+ /**
+ * use the given SleuthkitCase to update the data used to determine the
+ * available filters.
+ *
+ * @param skCase
+ */
+ synchronized private void populateFilterData(SleuthkitCase skCase) {
+
+ for (Map.Entry hashSet : eventDB.getHashSetNames().entrySet()) {
+ hashSetMap.putIfAbsent(hashSet.getKey(), hashSet.getValue());
+ }
+ //because there is no way to remove a datasource we only add to this map.
+ for (Long id : eventDB.getDataSourceIDs()) {
+ try {
+ datasourcesMap.putIfAbsent(id, skCase.getContentById(id).getDataSource().getName());
+ } catch (TskCoreException ex) {
+ LOGGER.log(Level.SEVERE, "Failed to get datasource by ID.", ex);
+ }
+ }
+
+ try {
+ //should this only be tags applied to files or event bearing artifacts?
+ tagNames.setAll(skCase.getTagNamesInUse());
+ } catch (TskCoreException ex) {
+ LOGGER.log(Level.SEVERE, "Failed to get tag names in use.", ex);
+ }
+ }
+
+ synchronized public Set addTag(long objID, Long artifactID, Tag tag) {
+ Set updatedEventIDs = eventDB.addTag(objID, artifactID, tag);
+ if (!updatedEventIDs.isEmpty()) {
+ invalidateCaches(updatedEventIDs);
+ }
+ return updatedEventIDs;
+ }
+
+ synchronized public Set deleteTag(long objID, Long artifactID, Tag tag, boolean tagged) {
+ Set updatedEventIDs = eventDB.deleteTag(objID, artifactID, tag, tagged);
+ if (!updatedEventIDs.isEmpty()) {
+ invalidateCaches(updatedEventIDs);
+ }
+ return updatedEventIDs;
+ }
+
+ synchronized private void invalidateCaches(Set updatedEventIDs) {
+ eventCountsCache.invalidateAll();
+ aggregateEventsCache.invalidateAll();
+ idToEventCache.invalidateAll(updatedEventIDs);
+ try {
+ tagNames.setAll(autoCase.getSleuthkitCase().getTagNamesInUse());
+ } catch (TskCoreException ex) {
+ LOGGER.log(Level.SEVERE, "Failed to get tag names in use.", ex);
+ }
+ }
+
+ /**
+ * "sync" the given tags filter with the tagnames in use: Disable filters
+ * for tags that are not in use in the case, and add new filters for tags
+ * that don't have them. New filters are selected by default.
+ *
+ * @param tagsFilter the tags filter to modify so it is consistent with the
+ * tags in use in the case
+ */
+ public void syncTagsFilter(TagsFilter tagsFilter) {
+ for (TagName t : tagNames) {
+ tagsFilter.addSubFilter(new TagNameFilter(t, autoCase));
+ }
+ for (TagNameFilter t : tagsFilter.getSubFilters()) {
+ t.setDisabled(tagNames.contains(t.getTagName()) == false);
+ }
+ }
+
synchronized public void rebuildRepository(Runnable r) {
if (dbPopulationWorker != null) {
dbPopulationWorker.cancel(true);
@@ -227,8 +327,110 @@ public class EventsRepository {
dbPopulationWorker.execute();
}
- public boolean hasDataSourceInfo() {
- return eventDB.hasNewColumns();
+ synchronized public void rebuildTags(Runnable r) {
+ if (dbPopulationWorker != null) {
+ dbPopulationWorker.cancel(true);
+
+ }
+ dbPopulationWorker = new RebuildTagsWorker(r);
+ dbPopulationWorker.execute();
+ }
+
+ private class RebuildTagsWorker extends SwingWorker {
+
+ private final ProgressWindow progressDialog;
+
+ //TODO: can we avoid this with a state listener? does it amount to the same thing?
+ //post population operation to execute
+ private final Runnable postPopulationOperation;
+ private final SleuthkitCase skCase;
+ private final TagsManager tagsManager;
+
+ public RebuildTagsWorker(Runnable postPopulationOperation) {
+ progressDialog = new ProgressWindow(null, true, this);
+ progressDialog.setVisible(true);
+
+ skCase = autoCase.getSleuthkitCase();
+ tagsManager = autoCase.getServices().getTagsManager();
+
+ this.postPopulationOperation = postPopulationOperation;
+ }
+
+ @Override
+ protected Void doInBackground() throws Exception {
+
+ EventDB.EventTransaction trans = eventDB.beginTransaction();
+ LOGGER.log(Level.INFO, "dropping old tags"); // NON-NLS
+ eventDB.reInitializeTags();
+
+ LOGGER.log(Level.INFO, "updating content tags"); // NON-NLS
+ List contentTags = tagsManager.getAllContentTags();
+ int size = contentTags.size();
+ for (int i = 0; i < size; i++) {
+ if (isCancelled()) {
+ break;
+ }
+ publish(new ProgressWindow.ProgressUpdate(i, size, "refreshing file tags", ""));
+ ContentTag contentTag = contentTags.get(i);
+ eventDB.addTag(contentTag.getContent().getId(), null, contentTag);
+ }
+ LOGGER.log(Level.INFO, "updating artifact tags"); // NON-NLS
+ List artifactTags = tagsManager.getAllBlackboardArtifactTags();
+ size = artifactTags.size();
+ for (int i = 0; i < size; i++) {
+ if (isCancelled()) {
+ break;
+ }
+ publish(new ProgressWindow.ProgressUpdate(i, size, "refreshing result tags", ""));
+ BlackboardArtifactTag artifactTag = artifactTags.get(i);
+ eventDB.addTag(artifactTag.getContent().getId(), artifactTag.getArtifact().getArtifactID(), artifactTag);
+ }
+
+ LOGGER.log(Level.INFO, "committing tags"); // NON-NLS
+ publish(new ProgressWindow.ProgressUpdate(0, -1, "committing tag changes", ""));
+ if (isCancelled()) {
+ eventDB.rollBackTransaction(trans);
+ } else {
+ eventDB.commitTransaction(trans);
+ }
+
+ populateFilterData(skCase);
+ invalidateCaches();
+
+ return null;
+ }
+
+ /**
+ * handle intermediate 'results': just update progress dialog
+ *
+ * @param chunks
+ */
+ @Override
+ protected void process(List chunks) {
+ super.process(chunks);
+ ProgressWindow.ProgressUpdate chunk = chunks.get(chunks.size() - 1);
+ progressDialog.update(chunk);
+ }
+
+ @Override
+ @NbBundle.Messages("msgdlg.tagsproblem.text=There was a problem refreshing the tagged events."
+ + " Some events may have inacurate tags. See the log for details.")
+ protected void done() {
+ super.done();
+ try {
+ progressDialog.close();
+ get();
+ } catch (CancellationException ex) {
+ LOGGER.log(Level.WARNING, "Database population was cancelled by the user. Not all events may be present or accurate. See the log for details.", ex); // NON-NLS
+ } catch (InterruptedException | ExecutionException ex) {
+ LOGGER.log(Level.WARNING, "Exception while populating database.", ex); // NON-NLS
+ JOptionPane.showMessageDialog(null, Bundle.msgdlg_tagsproblem_text());
+ } catch (Exception ex) {
+ LOGGER.log(Level.WARNING, "Unexpected exception while populating database.", ex); // NON-NLS
+ JOptionPane.showMessageDialog(null, Bundle.msgdlg_tagsproblem_text());
+ }
+ postPopulationOperation.run(); //execute post db population operation
+ }
}
private class DBPopulationWorker extends SwingWorker {
@@ -256,7 +458,7 @@ public class EventsRepository {
"progressWindow.msg.reinit_db=(re)initializing events database",
"progressWindow.msg.commitingDb=committing events db"})
protected Void doInBackground() throws Exception {
- process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, -1, Bundle.progressWindow_msg_reinit_db(), "")));
+ publish(new ProgressWindow.ProgressUpdate(0, -1, Bundle.progressWindow_msg_reinit_db(), ""));
//reset database
//TODO: can we do more incremental updates? -jm
eventDB.reInitializeDB();
@@ -265,7 +467,7 @@ public class EventsRepository {
List files = skCase.findAllFileIdsWhere("name != '.' AND name != '..'");
final int numFiles = files.size();
- process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, numFiles, Bundle.progressWindow_msg_populateMacEventsFiles(), "")));
+ publish(new ProgressWindow.ProgressUpdate(0, numFiles, Bundle.progressWindow_msg_populateMacEventsFiles(), ""));
//insert file events into db
int i = 1;
@@ -289,26 +491,26 @@ public class EventsRepository {
String rootFolder = StringUtils.substringBetween(parentPath, "/", "/");
String shortDesc = datasourceName + "/" + StringUtils.defaultIfBlank(rootFolder, "");
String medD = datasourceName + parentPath;
- final TskData.FileKnown known = f.getKnown();
- Set hashSets = f.getHashSetNames() ;
- boolean tagged = !tagsManager.getContentTagsByContent(f).isEmpty();
+ final TskData.FileKnown known = f.getKnown();
+ Set hashSets = f.getHashSetNames();
+ List tags = tagsManager.getContentTagsByContent(f);
//insert it into the db if time is > 0 => time is legitimate (drops logical files)
if (f.getAtime() > 0) {
- eventDB.insertEvent(f.getAtime(), FileSystemTypes.FILE_ACCESSED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, tagged, trans);
+ eventDB.insertEvent(f.getAtime(), FileSystemTypes.FILE_ACCESSED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, tags, trans);
}
if (f.getMtime() > 0) {
- eventDB.insertEvent(f.getMtime(), FileSystemTypes.FILE_MODIFIED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, tagged, trans);
+ eventDB.insertEvent(f.getMtime(), FileSystemTypes.FILE_MODIFIED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, tags, trans);
}
if (f.getCtime() > 0) {
- eventDB.insertEvent(f.getCtime(), FileSystemTypes.FILE_CHANGED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, tagged, trans);
+ eventDB.insertEvent(f.getCtime(), FileSystemTypes.FILE_CHANGED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, tags, trans);
}
if (f.getCrtime() > 0) {
- eventDB.insertEvent(f.getCrtime(), FileSystemTypes.FILE_CREATED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, tagged, trans);
+ eventDB.insertEvent(f.getCrtime(), FileSystemTypes.FILE_CREATED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, tags, trans);
}
- process(Arrays.asList(new ProgressWindow.ProgressUpdate(i, numFiles,
- Bundle.progressWindow_msg_populateMacEventsFiles(), f.getName())));
+ publish(new ProgressWindow.ProgressUpdate(i, numFiles,
+ Bundle.progressWindow_msg_populateMacEventsFiles(), f.getName()));
}
} catch (TskCoreException tskCoreException) {
LOGGER.log(Level.WARNING, "failed to insert mac event for file : " + fID, tskCoreException); // NON-NLS
@@ -329,14 +531,15 @@ public class EventsRepository {
}
}
- process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, -1, Bundle.progressWindow_msg_commitingDb(), "")));
+ publish(new ProgressWindow.ProgressUpdate(0, -1, Bundle.progressWindow_msg_commitingDb(), ""));
+
if (isCancelled()) {
eventDB.rollBackTransaction(trans);
} else {
- eventDB.commitTransaction(trans, true);
+ eventDB.commitTransaction(trans);
}
- populateFilterMaps(skCase);
+ populateFilterData(skCase);
invalidateCaches();
return null;
@@ -363,7 +566,7 @@ public class EventsRepository {
progressDialog.close();
get();
} catch (CancellationException ex) {
- LOGGER.log(Level.INFO, "Database population was cancelled by the user. Not all events may be present or accurate. See the log for details.", ex); // NON-NLS
+ LOGGER.log(Level.WARNING, "Database population was cancelled by the user. Not all events may be present or accurate. See the log for details.", ex); // NON-NLS
} catch (InterruptedException | ExecutionException ex) {
LOGGER.log(Level.WARNING, "Exception while populating database.", ex); // NON-NLS
JOptionPane.showMessageDialog(null, Bundle.msgdlg_problem_text());
@@ -388,64 +591,32 @@ public class EventsRepository {
final ArrayList blackboardArtifacts = skCase.getBlackboardArtifacts(type.getArtifactType());
final int numArtifacts = blackboardArtifacts.size();
- process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, numArtifacts,
- Bundle.progressWindow_populatingXevents(type.toString()), "")));
+ for (int i = 0; i < numArtifacts; i++) {
+ publish(new ProgressWindow.ProgressUpdate(i, numArtifacts,
+ Bundle.progressWindow_populatingXevents(type.getDisplayName()), ""));
- int i = 0;
- for (final BlackboardArtifact bbart : blackboardArtifacts) {
//for each artifact, extract the relevant information for the descriptions
+ BlackboardArtifact bbart = blackboardArtifacts.get(i);
ArtifactEventType.AttributeEventDescription eventDescription = ArtifactEventType.AttributeEventDescription.buildEventDescription(type, bbart);
- if (eventDescription != null && eventDescription.getTime() > 0L) { //insert it into the db if time is > 0 => time is legitimate
- long datasourceID = skCase.getContentById(bbart.getObjectID()).getDataSource().getId();
+ //insert it into the db if time is > 0 => time is legitimate
+ if (eventDescription != null && eventDescription.getTime() > 0L) {
+ long objectID = bbart.getObjectID();
+ AbstractFile f = skCase.getAbstractFileById(objectID);
+ long datasourceID = f.getDataSource().getId();
+ long artifactID = bbart.getArtifactID();
+ Set hashSets = f.getHashSetNames();
+ List tags = tagsManager.getBlackboardArtifactTagsByArtifact(bbart);
+ String fullDescription = eventDescription.getFullDescription();
+ String medDescription = eventDescription.getMedDescription();
+ String shortDescription = eventDescription.getShortDescription();
- AbstractFile f = skCase.getAbstractFileById(bbart.getObjectID());
- Set hashSets = f.getHashSetNames();
- boolean tagged = tagsManager.getBlackboardArtifactTagsByArtifact(bbart).isEmpty() == false;
-
- eventDB.insertEvent(eventDescription.getTime(), type, datasourceID, bbart.getObjectID(), bbart.getArtifactID(), eventDescription.getFullDescription(), eventDescription.getMedDescription(), eventDescription.getShortDescription(), null, hashSets, tagged, trans);
+ eventDB.insertEvent(eventDescription.getTime(), type, datasourceID, objectID, artifactID, fullDescription, medDescription, shortDescription, null, hashSets, tags, trans);
}
-
- i++;
- process(Arrays.asList(new ProgressWindow.ProgressUpdate(i, numArtifacts,
- Bundle.progressWindow_populatingXevents(type), "")));
}
} catch (TskCoreException ex) {
LOGGER.log(Level.SEVERE, "There was a problem getting events with sub type = " + type.toString() + ".", ex); // NON-NLS
}
}
}
-
- /**
- * use the given SleuthkitCase to look up the names for the datasources in
- * the events table.
- *
- * TODO: we could keep a table of id -> name in the eventdb but I am wary of
- * having too much redundant info.
- *
- * @param skCase
- */
- synchronized private void populateFilterMaps(SleuthkitCase skCase) {
-
- for (Map.Entry hashSet : eventDB.getHashSetNames().entrySet()) {
- hashSetMap.putIfAbsent(hashSet.getKey(), hashSet.getValue());
- }
- //because there is no way to remove a datasource we only add to this map.
- for (Long id : eventDB.getDataSourceIDs()) {
- try {
- datasourcesMap.putIfAbsent(id, skCase.getContentById(id).getDataSource().getName());
- } catch (TskCoreException ex) {
- LOGGER.log(Level.SEVERE, "Failed to get datasource by ID.", ex);
- }
- }
- }
-
- synchronized public Set markEventsTagged(long objID, Long artifactID, boolean tagged) {
- Set updatedEventIDs = eventDB.markEventsTagged(objID, artifactID, tagged);
- if (!updatedEventIDs.isEmpty()) {
- aggregateEventsCache.invalidateAll();
- idToEventCache.invalidateAll(updatedEventIDs);
- }
- return updatedEventIDs;
- }
}
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/db/MultipleTransactionException.java b/Core/src/org/sleuthkit/autopsy/timeline/db/MultipleTransactionException.java
similarity index 95%
rename from Core/src/org/sleuthkit/autopsy/timeline/events/db/MultipleTransactionException.java
rename to Core/src/org/sleuthkit/autopsy/timeline/db/MultipleTransactionException.java
index d4a0e999c7..197a920dea 100644
--- a/Core/src/org/sleuthkit/autopsy/timeline/events/db/MultipleTransactionException.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/db/MultipleTransactionException.java
@@ -16,7 +16,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.sleuthkit.autopsy.timeline.events.db;
+package org.sleuthkit.autopsy.timeline.db;
/**
*
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/db/SQLHelper.java b/Core/src/org/sleuthkit/autopsy/timeline/db/SQLHelper.java
new file mode 100644
index 0000000000..d807bcc741
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/timeline/db/SQLHelper.java
@@ -0,0 +1,283 @@
+/*
+ * Autopsy Forensic Browser
+ *
+ * Copyright 2013-15 Basis Technology Corp.
+ * Contact: carrier sleuthkit org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.autopsy.timeline.db;
+
+import java.util.Collections;
+import java.util.List;
+import java.util.Set;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+import javax.annotation.Nonnull;
+import org.apache.commons.lang3.StringUtils;
+import org.sleuthkit.autopsy.timeline.datamodel.eventtype.RootEventType;
+import org.sleuthkit.autopsy.timeline.filters.AbstractFilter;
+import org.sleuthkit.autopsy.timeline.filters.DataSourceFilter;
+import org.sleuthkit.autopsy.timeline.filters.DataSourcesFilter;
+import org.sleuthkit.autopsy.timeline.filters.Filter;
+import org.sleuthkit.autopsy.timeline.filters.HashHitsFilter;
+import org.sleuthkit.autopsy.timeline.filters.HashSetFilter;
+import org.sleuthkit.autopsy.timeline.filters.HideKnownFilter;
+import org.sleuthkit.autopsy.timeline.filters.IntersectionFilter;
+import org.sleuthkit.autopsy.timeline.filters.RootFilter;
+import org.sleuthkit.autopsy.timeline.filters.TagNameFilter;
+import org.sleuthkit.autopsy.timeline.filters.TagsFilter;
+import org.sleuthkit.autopsy.timeline.filters.TextFilter;
+import org.sleuthkit.autopsy.timeline.filters.TypeFilter;
+import org.sleuthkit.autopsy.timeline.filters.UnionFilter;
+import org.sleuthkit.autopsy.timeline.utils.RangeDivisionInfo;
+import org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD;
+import static org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD.FULL;
+import static org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD.MEDIUM;
+import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.DAYS;
+import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.HOURS;
+import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.MINUTES;
+import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.MONTHS;
+import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.SECONDS;
+import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.YEARS;
+import org.sleuthkit.datamodel.TskData;
+
+/**
+ * Static helper methods for converting between java data model objects and
+ * sqlite queries.
+ */
+public class SQLHelper {
+
+ static String useHashHitTablesHelper(RootFilter filter) {
+ HashHitsFilter hashHitFilter = filter.getHashHitsFilter();
+ return hashHitFilter.isSelected() && false == hashHitFilter.isDisabled() ? " LEFT JOIN hash_set_hits " : " ";
+ }
+
+ static String useTagTablesHelper(RootFilter filter) {
+ TagsFilter tagsFilter = filter.getTagsFilter();
+ return tagsFilter.isSelected() && false == tagsFilter.isDisabled() ? " LEFT JOIN tags " : " ";
+ }
+
+ /**
+ * take the result of a group_concat SQLite operation and split it into a
+ * set of X using the mapper to to convert from string to X
+ *
+ * @param the type of elements to return
+ * @param groupConcat a string containing the group_concat result ( a comma
+ * separated list)
+ * @param mapper a function from String to X
+ *
+ * @return a Set of X, each element mapped from one element of the original
+ * comma delimited string
+ */
+ static Set unGroupConcat(String groupConcat, Function mapper) {
+ return StringUtils.isBlank(groupConcat) ? Collections.emptySet()
+ : Stream.of(groupConcat.split(","))
+ .map(mapper::apply)
+ .collect(Collectors.toSet());
+ }
+
+ private static String getSQLWhere(IntersectionFilter> filter) {
+ return filter.getSubFilters().stream()
+ .filter(Filter::isSelected)
+ .map(SQLHelper::getSQLWhere)
+ .collect(Collectors.joining(" and ", "( ", ")"));
+ }
+
+ private static String getSQLWhere(UnionFilter> filter) {
+ return filter.getSubFilters().stream()
+ .filter(Filter::isSelected).map(SQLHelper::getSQLWhere)
+ .collect(Collectors.joining(" or ", "( ", ")"));
+ }
+
+ static String getSQLWhere(RootFilter filter) {
+ return getSQLWhere((IntersectionFilter) filter);
+ }
+
+ private static String getSQLWhere(Filter filter) {
+ String result = "";
+ if (filter == null) {
+ return "1";
+ } else if (filter instanceof TagsFilter) {
+ result = getSQLWhere((TagsFilter) filter);
+ } else if (filter instanceof HashHitsFilter) {
+ result = getSQLWhere((HashHitsFilter) filter);
+ } else if (filter instanceof DataSourceFilter) {
+ result = getSQLWhere((DataSourceFilter) filter);
+ } else if (filter instanceof DataSourcesFilter) {
+ result = getSQLWhere((DataSourcesFilter) filter);
+ } else if (filter instanceof HideKnownFilter) {
+ result = getSQLWhere((HideKnownFilter) filter);
+ } else if (filter instanceof HashHitsFilter) {
+ result = getSQLWhere((HashHitsFilter) filter);
+ } else if (filter instanceof TextFilter) {
+ result = getSQLWhere((TextFilter) filter);
+ } else if (filter instanceof TypeFilter) {
+ result = getSQLWhere((TypeFilter) filter);
+ } else if (filter instanceof IntersectionFilter) {
+ result = getSQLWhere((IntersectionFilter) filter);
+ } else if (filter instanceof UnionFilter) {
+ result = getSQLWhere((UnionFilter) filter);
+ } else {
+ return "1";
+ }
+ result = StringUtils.deleteWhitespace(result).equals("(1and1and1)") ? "1" : result;
+ result = StringUtils.deleteWhitespace(result).equals("()") ? "1" : result;
+ return result;
+ }
+
+ private static String getSQLWhere(HideKnownFilter filter) {
+ if (filter.isSelected()) {
+ return "(known_state IS NOT '" + TskData.FileKnown.KNOWN.getFileKnownValue() + "')"; // NON-NLS
+ } else {
+ return "1";
+ }
+ }
+
+ private static String getSQLWhere(TagsFilter filter) {
+ if (filter.isSelected()
+ && (false == filter.isDisabled())
+ && (filter.getSubFilters().isEmpty() == false)) {
+ String tagNameIDs = filter.getSubFilters().stream()
+ .filter((TagNameFilter t) -> t.isSelected() && !t.isDisabled())
+ .map((TagNameFilter t) -> String.valueOf(t.getTagName().getId()))
+ .collect(Collectors.joining(", ", "(", ")"));
+ return "(events.event_id == tags.event_id AND "
+ + "tags.tag_name_id IN " + tagNameIDs + ") ";
+ } else {
+ return "1";
+ }
+
+ }
+
+ private static String getSQLWhere(HashHitsFilter filter) {
+ if (filter.isSelected()
+ && (false == filter.isDisabled())
+ && (filter.getSubFilters().isEmpty() == false)) {
+ String hashSetIDs = filter.getSubFilters().stream()
+ .filter((HashSetFilter t) -> t.isSelected() && !t.isDisabled())
+ .map((HashSetFilter t) -> String.valueOf(t.getHashSetID()))
+ .collect(Collectors.joining(", ", "(", ")"));
+ return "(hash_set_hits.hash_set_id IN " + hashSetIDs + " AND hash_set_hits.event_id == events.event_id)";
+ } else {
+ return "1";
+ }
+ }
+
+ private static String getSQLWhere(DataSourceFilter filter) {
+ if (filter.isSelected()) {
+ return "(datasource_id = '" + filter.getDataSourceID() + "')";
+ } else {
+ return "1";
+ }
+ }
+
+ private static String getSQLWhere(DataSourcesFilter filter) {
+ return (filter.isSelected()) ? "(datasource_id in ("
+ + filter.getSubFilters().stream()
+ .filter(AbstractFilter::isSelected)
+ .map((dataSourceFilter) -> String.valueOf(dataSourceFilter.getDataSourceID()))
+ .collect(Collectors.joining(", ")) + "))" : "1";
+ }
+
+ private static String getSQLWhere(TextFilter filter) {
+ if (filter.isSelected()) {
+ if (StringUtils.isBlank(filter.getText())) {
+ return "1";
+ }
+ String strippedFilterText = StringUtils.strip(filter.getText());
+ return "((med_description like '%" + strippedFilterText + "%')"
+ + " or (full_description like '%" + strippedFilterText + "%')"
+ + " or (short_description like '%" + strippedFilterText + "%'))";
+ } else {
+ return "1";
+ }
+ }
+
+ /**
+ * generate a sql where clause for the given type filter, while trying to be
+ * as simple as possible to improve performance.
+ *
+ * @param typeFilter
+ *
+ * @return
+ */
+ private static String getSQLWhere(TypeFilter typeFilter) {
+ if (typeFilter.isSelected() == false) {
+ return "0";
+ } else if (typeFilter.getEventType() instanceof RootEventType) {
+ if (typeFilter.getSubFilters().stream()
+ .allMatch(subFilter -> subFilter.isSelected() && subFilter.getSubFilters().stream().allMatch(Filter::isSelected))) {
+ return "1"; //then collapse clause to true
+ }
+ }
+ return "(sub_type IN (" + StringUtils.join(getActiveSubTypes(typeFilter), ",") + "))";
+ }
+
+ private static List getActiveSubTypes(TypeFilter filter) {
+ if (filter.isSelected()) {
+ if (filter.getSubFilters().isEmpty()) {
+ return Collections.singletonList(RootEventType.allTypes.indexOf(filter.getEventType()));
+ } else {
+ return filter.getSubFilters().stream().flatMap((Filter t) -> getActiveSubTypes((TypeFilter) t).stream()).collect(Collectors.toList());
+ }
+ } else {
+ return Collections.emptyList();
+ }
+ }
+
+ /**
+ * get a sqlite strftime format string that will allow us to group by the
+ * requested period size. That is, with all info more granular that that
+ * requested dropped (replaced with zeros).
+ *
+ * @param info the {@link RangeDivisionInfo} with the requested period size
+ *
+ * @return a String formatted according to the sqlite strftime spec
+ *
+ * @see https://www.sqlite.org/lang_datefunc.html
+ */
+ static String getStrfTimeFormat(@Nonnull RangeDivisionInfo info) {
+ switch (info.getPeriodSize()) {
+ case YEARS:
+ return "%Y-01-01T00:00:00"; // NON-NLS
+ case MONTHS:
+ return "%Y-%m-01T00:00:00"; // NON-NLS
+ case DAYS:
+ return "%Y-%m-%dT00:00:00"; // NON-NLS
+ case HOURS:
+ return "%Y-%m-%dT%H:00:00"; // NON-NLS
+ case MINUTES:
+ return "%Y-%m-%dT%H:%M:00"; // NON-NLS
+ case SECONDS:
+ default: //seconds - should never happen
+ return "%Y-%m-%dT%H:%M:%S"; // NON-NLS
+ }
+ }
+
+ static String getDescriptionColumn(DescriptionLOD lod) {
+ switch (lod) {
+ case FULL:
+ return "full_description";
+ case MEDIUM:
+ return "med_description";
+ case SHORT:
+ default:
+ return "short_description";
+ }
+ }
+
+ private SQLHelper() {
+ }
+}
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/EventsTaggedEvent.java b/Core/src/org/sleuthkit/autopsy/timeline/events/RefreshRequestedEvent.java
similarity index 64%
rename from Core/src/org/sleuthkit/autopsy/timeline/events/EventsTaggedEvent.java
rename to Core/src/org/sleuthkit/autopsy/timeline/events/RefreshRequestedEvent.java
index 114fe053a8..f8cdfe23e7 100644
--- a/Core/src/org/sleuthkit/autopsy/timeline/events/EventsTaggedEvent.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/events/RefreshRequestedEvent.java
@@ -18,22 +18,13 @@
*/
package org.sleuthkit.autopsy.timeline.events;
-import java.util.Collections;
-import java.util.Set;
-
/**
- * Posted to eventbus when a tag as been added to a file artifact that
- * corresponds to an event
+ * A "local" event published by filteredEventsModel to indicate that the user
+ * requested that the current visualization be refreshed with out changing any
+ * of the parameters ( to include more up to date tag data for example.)
+ *
+ * This event is not intended for use out side of the timeline module.
*/
-public class EventsTaggedEvent {
+public class RefreshRequestedEvent {
- private final Set eventIDs;
-
- public EventsTaggedEvent(Set eventIDs) {
- this.eventIDs = eventIDs;
- }
-
- public Set getEventIDs() {
- return Collections.unmodifiableSet(eventIDs);
- }
}
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/EventsUnTaggedEvent.java b/Core/src/org/sleuthkit/autopsy/timeline/events/TagsUpdatedEvent.java
similarity index 60%
rename from Core/src/org/sleuthkit/autopsy/timeline/events/EventsUnTaggedEvent.java
rename to Core/src/org/sleuthkit/autopsy/timeline/events/TagsUpdatedEvent.java
index 474676b65a..167069837c 100644
--- a/Core/src/org/sleuthkit/autopsy/timeline/events/EventsUnTaggedEvent.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/events/TagsUpdatedEvent.java
@@ -18,23 +18,26 @@
*/
package org.sleuthkit.autopsy.timeline.events;
-import java.util.Collections;
+import com.google.common.collect.ImmutableSet;
import java.util.Set;
/**
- * Posted to eventbus when a tag as been removed from a file artifact that
- * corresponds to an event
+ * A "local" event published by filteredEventsModel to indicate that events have
+ * been(un)tagged. This event is not intended for use out side of the timeline
+ * module.
*/
-public class EventsUnTaggedEvent {
+public class TagsUpdatedEvent {
- private final Set eventIDs;
+ private final Set updatedEventIDs;
- public Set getEventIDs() {
- return Collections.unmodifiableSet(eventIDs);
+
+ public ImmutableSet getUpdatedEventIDs() {
+ return ImmutableSet.copyOf(updatedEventIDs);
}
- public EventsUnTaggedEvent(Set eventIDs) {
- this.eventIDs = eventIDs;
- }
+ public TagsUpdatedEvent(Set updatedEventIDs) {
+ this.updatedEventIDs = updatedEventIDs;
+
+ }
}
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/db/SQLHelper.java b/Core/src/org/sleuthkit/autopsy/timeline/events/db/SQLHelper.java
deleted file mode 100644
index 1c63f9f2d9..0000000000
--- a/Core/src/org/sleuthkit/autopsy/timeline/events/db/SQLHelper.java
+++ /dev/null
@@ -1,157 +0,0 @@
-/*
- * To change this license header, choose License Headers in Project Properties.
- * To change this template file, choose Tools | Templates
- * and open the template in the editor.
- */
-package org.sleuthkit.autopsy.timeline.events.db;
-
-import java.util.Collections;
-import java.util.List;
-import java.util.stream.Collectors;
-import org.apache.commons.lang3.StringUtils;
-import org.sleuthkit.autopsy.timeline.events.type.RootEventType;
-import org.sleuthkit.autopsy.timeline.filters.AbstractFilter;
-import org.sleuthkit.autopsy.timeline.filters.DataSourceFilter;
-import org.sleuthkit.autopsy.timeline.filters.DataSourcesFilter;
-import org.sleuthkit.autopsy.timeline.filters.Filter;
-import org.sleuthkit.autopsy.timeline.filters.HashHitsFilter;
-import org.sleuthkit.autopsy.timeline.filters.HashSetFilter;
-import org.sleuthkit.autopsy.timeline.filters.HideKnownFilter;
-import org.sleuthkit.autopsy.timeline.filters.IntersectionFilter;
-import org.sleuthkit.autopsy.timeline.filters.RootFilter;
-import org.sleuthkit.autopsy.timeline.filters.TextFilter;
-import org.sleuthkit.autopsy.timeline.filters.TypeFilter;
-import org.sleuthkit.autopsy.timeline.filters.UnionFilter;
-import org.sleuthkit.datamodel.TskData;
-
-/**
- *
- */
-public class SQLHelper {
-
- private static List getActiveSubTypes(TypeFilter filter) {
- if (filter.isSelected()) {
- if (filter.getSubFilters().isEmpty()) {
- return Collections.singletonList(RootEventType.allTypes.indexOf(filter.getEventType()));
- } else {
- return filter.getSubFilters().stream().flatMap((Filter t) -> getActiveSubTypes((TypeFilter) t).stream()).collect(Collectors.toList());
- }
- } else {
- return Collections.emptyList();
- }
- }
-
- static boolean hasActiveHashFilter(RootFilter filter) {
- HashHitsFilter hashHitFilter = filter.getHashHitsFilter();
- return hashHitFilter.isSelected() && false == hashHitFilter.isDisabled();
- }
-
- private SQLHelper() {
- }
-
- static String getSQLWhere(IntersectionFilter> filter) {
- return filter.getSubFilters().stream().filter(Filter::isSelected).map(SQLHelper::getSQLWhere).collect(Collectors.joining(" and ", "( ", ")"));
- }
-
- static String getSQLWhere(UnionFilter> filter) {
- return filter.getSubFilters().stream().filter(Filter::isSelected).map(SQLHelper::getSQLWhere).collect(Collectors.joining(" or ", "( ", ")"));
- }
-
- static String getSQLWhere(Filter filter) {
- String result = "";
- if (filter == null) {
- return "1";
- } else if (filter instanceof HashHitsFilter) {
- result = getSQLWhere((HashHitsFilter) filter);
- } else if (filter instanceof DataSourceFilter) {
- result = getSQLWhere((DataSourceFilter) filter);
- } else if (filter instanceof DataSourcesFilter) {
- result = getSQLWhere((DataSourcesFilter) filter);
- } else if (filter instanceof HideKnownFilter) {
- result = getSQLWhere((HideKnownFilter) filter);
- } else if (filter instanceof HashHitsFilter) {
- result = getSQLWhere((HashHitsFilter) filter);
- } else if (filter instanceof TextFilter) {
- result = getSQLWhere((TextFilter) filter);
- } else if (filter instanceof TypeFilter) {
- result = getSQLWhere((TypeFilter) filter);
- } else if (filter instanceof IntersectionFilter) {
- result = getSQLWhere((IntersectionFilter) filter);
- } else if (filter instanceof UnionFilter) {
- result = getSQLWhere((UnionFilter) filter);
- } else {
- return "1";
- }
- result = StringUtils.deleteWhitespace(result).equals("(1and1and1)") ? "1" : result;
- result = StringUtils.deleteWhitespace(result).equals("()") ? "1" : result;
- return result;
- }
-
- static String getSQLWhere(HideKnownFilter filter) {
- if (filter.isSelected()) {
- return "(known_state IS NOT '" + TskData.FileKnown.KNOWN.getFileKnownValue() + "')"; // NON-NLS
- } else {
- return "1";
- }
- }
-
- static String getSQLWhere(HashHitsFilter filter) {
- if (filter.isSelected()
- && (false == filter.isDisabled())
- && (filter.getSubFilters().isEmpty() == false)) {
- return "(hash_set_hits.hash_set_id in " + filter.getSubFilters().stream()
- .filter((HashSetFilter t) -> t.isSelected() && !t.isDisabled())
- .map((HashSetFilter t) -> String.valueOf(t.getHashSetID()))
- .collect(Collectors.joining(", ", "(", ")")) + " and hash_set_hits.event_id == events.event_id)";
- } else {
- return "1";
- }
- }
-
- static String getSQLWhere(DataSourceFilter filter) {
- return (filter.isSelected()) ? "(datasource_id = '" + filter.getDataSourceID() + "')" : "1";
- }
-
- static String getSQLWhere(DataSourcesFilter filter) {
- return (filter.isSelected()) ? "(datasource_id in ("
- + filter.getSubFilters().stream()
- .filter(AbstractFilter::isSelected)
- .map((dataSourceFilter) -> String.valueOf(dataSourceFilter.getDataSourceID()))
- .collect(Collectors.joining(", ")) + "))" : "1";
- }
-
- static String getSQLWhere(TextFilter filter) {
- if (filter.isSelected()) {
- if (StringUtils.isBlank(filter.getText())) {
- return "1";
- }
- String strippedFilterText = StringUtils.strip(filter.getText());
- return "((med_description like '%" + strippedFilterText + "%')"
- + " or (full_description like '%" + strippedFilterText + "%')"
- + " or (short_description like '%" + strippedFilterText + "%'))";
- } else {
- return "1";
- }
- }
-
- /**
- * generate a sql where clause for the given type filter, while trying to be
- * as simple as possible to improve performance.
- *
- * @param typeFilter
- *
- * @return
- */
- static String getSQLWhere(TypeFilter typeFilter) {
- if (typeFilter.isSelected() == false) {
- return "0";
- } else if (typeFilter.getEventType() instanceof RootEventType) {
- if (typeFilter.getSubFilters().stream()
- .allMatch(subFilter -> subFilter.isSelected() && subFilter.getSubFilters().stream().allMatch(Filter::isSelected))) {
- return "1"; //then collapse clause to true
- }
- }
- return "(sub_type IN (" + StringUtils.join(getActiveSubTypes(typeFilter), ",") + "))";
- }
-
-}
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/explorernodes/EventNode.java b/Core/src/org/sleuthkit/autopsy/timeline/explorernodes/EventNode.java
index 59850eb99e..83e8801532 100644
--- a/Core/src/org/sleuthkit/autopsy/timeline/explorernodes/EventNode.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/explorernodes/EventNode.java
@@ -36,7 +36,7 @@ import org.sleuthkit.autopsy.datamodel.DisplayableItemNode;
import org.sleuthkit.autopsy.datamodel.DisplayableItemNodeVisitor;
import org.sleuthkit.autopsy.datamodel.NodeProperty;
import org.sleuthkit.autopsy.timeline.TimeLineController;
-import org.sleuthkit.autopsy.timeline.events.TimeLineEvent;
+import org.sleuthkit.autopsy.timeline.datamodel.TimeLineEvent;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.Content;
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/explorernodes/EventRootNode.java b/Core/src/org/sleuthkit/autopsy/timeline/explorernodes/EventRootNode.java
index 9633041221..255bb280a6 100644
--- a/Core/src/org/sleuthkit/autopsy/timeline/explorernodes/EventRootNode.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/explorernodes/EventRootNode.java
@@ -27,9 +27,9 @@ import org.openide.nodes.Children;
import org.openide.nodes.Node;
import org.openide.util.NbBundle;
import org.openide.util.lookup.Lookups;
-import org.sleuthkit.autopsy.timeline.events.FilteredEventsModel;
-import org.sleuthkit.autopsy.timeline.events.TimeLineEvent;
-import org.sleuthkit.autopsy.timeline.events.type.BaseTypes;
+import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel;
+import org.sleuthkit.autopsy.timeline.datamodel.TimeLineEvent;
+import org.sleuthkit.autopsy.timeline.datamodel.eventtype.BaseTypes;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.datamodel.DisplayableItemNode;
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/filters/CompoundFilter.java b/Core/src/org/sleuthkit/autopsy/timeline/filters/CompoundFilter.java
index f4b7a3938e..297b0a3a1b 100644
--- a/Core/src/org/sleuthkit/autopsy/timeline/filters/CompoundFilter.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/filters/CompoundFilter.java
@@ -78,7 +78,7 @@ public abstract class CompoundFilter extends Abstr
}
}
- static boolean hashEqualSubFilters(final CompoundFilter oneFilter, final CompoundFilter otherFilter) {
+ static boolean areSubFiltersEqual(final CompoundFilter oneFilter, final CompoundFilter otherFilter) {
if (oneFilter.getSubFilters().size() != otherFilter.getSubFilters().size()) {
return false;
}
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/filters/DataSourceFilter.java b/Core/src/org/sleuthkit/autopsy/timeline/filters/DataSourceFilter.java
index 648eb34cc9..b75193ddeb 100644
--- a/Core/src/org/sleuthkit/autopsy/timeline/filters/DataSourceFilter.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/filters/DataSourceFilter.java
@@ -85,4 +85,5 @@ public class DataSourceFilter extends AbstractFilter {
return isSelected() == other.isSelected();
}
+
}
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/filters/DataSourcesFilter.java b/Core/src/org/sleuthkit/autopsy/timeline/filters/DataSourcesFilter.java
index a791edb9ac..06198052af 100644
--- a/Core/src/org/sleuthkit/autopsy/timeline/filters/DataSourcesFilter.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/filters/DataSourcesFilter.java
@@ -18,6 +18,7 @@
*/
package org.sleuthkit.autopsy.timeline.filters;
+import java.util.Comparator;
import java.util.stream.Collectors;
import javafx.beans.binding.Bindings;
import org.openide.util.NbBundle;
@@ -38,7 +39,7 @@ public class DataSourcesFilter extends UnionFilter {
filterCopy.setSelected(isSelected());
//add a copy of each subfilter
this.getSubFilters().forEach((DataSourceFilter t) -> {
- filterCopy.addDataSourceFilter(t.copyOf());
+ filterCopy.addSubFilter(t.copyOf());
});
return filterCopy;
@@ -63,13 +64,14 @@ public class DataSourcesFilter extends UnionFilter {
return string;
}
- public void addDataSourceFilter(DataSourceFilter dataSourceFilter) {
+ public void addSubFilter(DataSourceFilter dataSourceFilter) {
if (getSubFilters().stream().map(DataSourceFilter.class::cast)
.map(DataSourceFilter::getDataSourceID)
.filter(t -> t == dataSourceFilter.getDataSourceID())
.findAny().isPresent() == false) {
dataSourceFilter.getDisabledProperty().bind(getDisabledProperty());
getSubFilters().add(dataSourceFilter);
+ getSubFilters().sort(Comparator.comparing(DataSourceFilter::getDisplayName));
}
if (getSubFilters().size() > 1) {
setSelected(Boolean.TRUE);
@@ -90,7 +92,7 @@ public class DataSourcesFilter extends UnionFilter {
return false;
}
- return hashEqualSubFilters(this, other);
+ return areSubFiltersEqual(this, other);
}
@@ -98,5 +100,4 @@ public class DataSourcesFilter extends UnionFilter {
public int hashCode() {
return 9;
}
-
}
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/filters/HashHitsFilter.java b/Core/src/org/sleuthkit/autopsy/timeline/filters/HashHitsFilter.java
index 5b97f1f71e..79f705bab0 100644
--- a/Core/src/org/sleuthkit/autopsy/timeline/filters/HashHitsFilter.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/filters/HashHitsFilter.java
@@ -5,6 +5,7 @@
*/
package org.sleuthkit.autopsy.timeline.filters;
+import java.util.Comparator;
import java.util.stream.Collectors;
import javafx.beans.binding.Bindings;
import org.openide.util.NbBundle;
@@ -31,7 +32,7 @@ public class HashHitsFilter extends UnionFilter {
filterCopy.setSelected(isSelected());
//add a copy of each subfilter
this.getSubFilters().forEach((HashSetFilter t) -> {
- filterCopy.addHashSetFilter(t.copyOf());
+ filterCopy.addSubFilter(t.copyOf());
});
return filterCopy;
}
@@ -68,15 +69,16 @@ public class HashHitsFilter extends UnionFilter {
return false;
}
- return hashEqualSubFilters(this, other);
+ return areSubFiltersEqual(this, other);
}
- public void addHashSetFilter(HashSetFilter hashSetFilter) {
- if (getSubFilters().stream().map(HashSetFilter.class::cast)
+ public void addSubFilter(HashSetFilter hashSetFilter) {
+ if (getSubFilters().stream()
.map(HashSetFilter::getHashSetID)
.filter(t -> t == hashSetFilter.getHashSetID())
.findAny().isPresent() == false) {
getSubFilters().add(hashSetFilter);
+ getSubFilters().sort(Comparator.comparing(HashSetFilter::getDisplayName));
}
}
}
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/filters/HashSetFilter.java b/Core/src/org/sleuthkit/autopsy/timeline/filters/HashSetFilter.java
index 9edfbc3ee1..ad6df42d85 100644
--- a/Core/src/org/sleuthkit/autopsy/timeline/filters/HashSetFilter.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/filters/HashSetFilter.java
@@ -21,7 +21,7 @@ package org.sleuthkit.autopsy.timeline.filters;
import java.util.Objects;
/**
- * Filter for an individual datasource
+ * Filter for an individual hash set
*/
public class HashSetFilter extends AbstractFilter {
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/filters/IntersectionFilter.java b/Core/src/org/sleuthkit/autopsy/timeline/filters/IntersectionFilter.java
index a6d6204978..f99249dcf0 100644
--- a/Core/src/org/sleuthkit/autopsy/timeline/filters/IntersectionFilter.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/filters/IntersectionFilter.java
@@ -24,7 +24,7 @@ import javafx.collections.FXCollections;
import org.openide.util.NbBundle;
/**
- * Intersection(And) filter
+ * Intersection (And) filter
*/
public class IntersectionFilter extends CompoundFilter {
@@ -60,7 +60,10 @@ public class IntersectionFilter extends CompoundFilter {
@Override
public String getHTMLReportString() {
- return getSubFilters().stream().filter(Filter::isSelected).map(Filter::getHTMLReportString).collect(Collectors.joining("