mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-17 18:17:43 +00:00
Merge remote-tracking branch 'sleuthkit/develop' into develop
This commit is contained in:
commit
36a9017c03
@ -2,7 +2,8 @@ file.reference.jdom-2.0.5-contrib.jar=release/modules/ext/jdom-2.0.5-contrib.jar
|
||||
file.reference.jdom-2.0.5.jar=release/modules/ext/jdom-2.0.5.jar
|
||||
file.reference.jython-standalone-2.7.0.jar=release/modules/ext/jython-standalone-2.7.0.jar
|
||||
file.reference.jython.jar-1=release/modules/ext/jython.jar
|
||||
file.reference.metadata-extractor-2.6.2.jar=release/modules/ext/metadata-extractor-2.6.2.jar
|
||||
file.reference.metadata-extractor-2.8.1.jar=release/modules/ext/metadata-extractor-2.8.1.jar
|
||||
file.reference.opencv-248.jar=release/modules/ext/opencv-248.jar
|
||||
file.reference.Rejistry-1.0-SNAPSHOT.jar=release/modules/ext/Rejistry-1.0-SNAPSHOT.jar
|
||||
file.reference.sevenzipjbinding-AllPlatforms.jar=release/modules/ext/sevenzipjbinding-AllPlatforms.jar
|
||||
file.reference.sevenzipjbinding.jar=release/modules/ext/sevenzipjbinding.jar
|
||||
@ -10,12 +11,13 @@ file.reference.sqlite-jdbc-3.8.11.jar=release/modules/ext/sqlite-jdbc-3.8.11.jar
|
||||
file.reference.StixLib.jar=release/modules/ext/StixLib.jar
|
||||
file.reference.tika-core-1.2.jar=release/modules/ext/tika-core-1.2.jar
|
||||
file.reference.Tsk_DataModel.jar=release/modules/ext/Tsk_DataModel.jar
|
||||
file.reference.xmpcore.jar=release/modules/ext/xmpcore.jar
|
||||
file.reference.xmpcore-5.1.2.jar=release/modules/ext/xmpcore-5.1.2.jar
|
||||
javac.source=1.8
|
||||
javac.compilerargs=-Xlint -Xlint:-serial
|
||||
license.file=../LICENSE-2.0.txt
|
||||
nbm.homepage=http://www.sleuthkit.org/
|
||||
nbm.module.author=Brian Carrier
|
||||
nbm.needs.restart=true
|
||||
source.reference.metadata-extractor-2.8.1.jar=release/modules/ext/metadata-extractor-2.8.1-src.zip!/Source/
|
||||
spec.version.base=10.3
|
||||
|
||||
|
@ -203,10 +203,26 @@
|
||||
<package>org.sleuthkit.autopsy.report</package>
|
||||
<package>org.sleuthkit.datamodel</package>
|
||||
</public-packages>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/xmpcore-5.1.2.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/xmpcore-5.1.2.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/jdom-2.0.5.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/jdom-2.0.5.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/StixLib.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/StixLib.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/sqlite-jdbc-3.8.11.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/sqlite-jdbc-3.8.11.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/opencv-248.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/opencv-248.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/Rejistry-1.0-SNAPSHOT.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/Rejistry-1.0-SNAPSHOT.jar</binary-origin>
|
||||
@ -219,34 +235,18 @@
|
||||
<runtime-relative-path>ext/jython-standalone-2.7.0.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/jython-standalone-2.7.0.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/StixLib.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/StixLib.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/opencv-248.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/opencv-248.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/sqlite-jdbc-3.8.11.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/sqlite-jdbc-3.8.11.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/sevenzipjbinding-AllPlatforms.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/sevenzipjbinding-AllPlatforms.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/metadata-extractor-2.6.2.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/metadata-extractor-2.6.2.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/xmpcore.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/xmpcore.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/tika-core-1.2.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/tika-core-1.2.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/metadata-extractor-2.8.1.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/metadata-extractor-2.8.1.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/jdom-2.0.5-contrib.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/jdom-2.0.5-contrib.jar</binary-origin>
|
||||
|
Binary file not shown.
BIN
Core/release/modules/ext/metadata-extractor-2.8.1.jar
Executable file
BIN
Core/release/modules/ext/metadata-extractor-2.8.1.jar
Executable file
Binary file not shown.
BIN
Core/release/modules/ext/xmpcore-5.1.2.jar
Executable file
BIN
Core/release/modules/ext/xmpcore-5.1.2.jar
Executable file
Binary file not shown.
Binary file not shown.
@ -37,7 +37,7 @@ public class FileTypeExtensions {
|
||||
private final static List<String> TEXT_EXTENSIONS = Arrays.asList(".txt", ".rtf", ".log", ".text", ".xml"); //NON-NLS
|
||||
private final static List<String> WEB_EXTENSIONS = Arrays.asList(".html", ".htm", ".css", ".js", ".php", ".aspx"); //NON-NLS
|
||||
private final static List<String> PDF_EXTENSIONS = Arrays.asList(".pdf"); //NON-NLS
|
||||
private final static List<String> ARCHIVE_EXTENSIONS = Arrays.asList(".zip", ".rar", ".7zip", ".7z", ".arj", ".tar", ".gzip", ".bzip", ".bzip2", ".cab", ".jar", ".cpio", ".ar", ".gz", ".tgz"); //NON-NLS
|
||||
private final static List<String> ARCHIVE_EXTENSIONS = Arrays.asList(".zip", ".rar", ".7zip", ".7z", ".arj", ".tar", ".gzip", ".bzip", ".bzip2", ".cab", ".jar", ".cpio", ".ar", ".gz", ".tgz", ".bz2"); //NON-NLS
|
||||
|
||||
public static List<String> getImageExtensions() {
|
||||
return IMAGE_EXTENSIONS;
|
||||
|
@ -37,3 +37,5 @@ EmbeddedFileExtractorIngestModule.ImageExtractor.xlsContainer.init.err=Xls conta
|
||||
EmbeddedFileExtractorIngestModule.ImageExtractor.xlsxContainer.init.err=Xlsx container could not be initialized while reading: {0}
|
||||
EmbeddedFileExtractorIngestModule.ImageExtractor.extractImage.addToDB.exception.msg=Unable to add the derived files to the database.
|
||||
EmbeddedFileExtractorIngestModule.ImageExtractor.getOutputFolderPath.exception.msg=Could not get path for image extraction from Abstract File: {0}
|
||||
EmbeddedFileExtractorIngestModule.ImageExtractor.getOutputFolderPath.exception.msg=Could not get path for image extraction from Abstract File: {0}
|
||||
EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackStream.write.noSpace.msg=Unable to write content to disk. Not enough space.
|
@ -24,6 +24,8 @@ import java.io.FileNotFoundException;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
@ -40,7 +42,6 @@ import net.sf.sevenzipjbinding.simple.ISimpleInArchive;
|
||||
import net.sf.sevenzipjbinding.simple.ISimpleInArchiveItem;
|
||||
import org.netbeans.api.progress.ProgressHandle;
|
||||
import org.netbeans.api.progress.ProgressHandleFactory;
|
||||
import org.openide.util.Exceptions;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.services.FileManager;
|
||||
@ -98,7 +99,8 @@ class SevenZipExtractor {
|
||||
GZIP("application/gzip"),
|
||||
XGZIP("application/x-gzip"),
|
||||
XBZIP2("application/x-bzip2"),
|
||||
XTAR("application/x-tar");
|
||||
XTAR("application/x-tar"),
|
||||
XGTAR("application/x-gtar");
|
||||
|
||||
private final String mimeType;
|
||||
|
||||
@ -121,9 +123,9 @@ class SevenZipExtractor {
|
||||
logger.log(Level.INFO, "7-Zip-JBinding library was initialized on supported platform: {0}", platform); //NON-NLS
|
||||
} catch (SevenZipNativeInitializationException e) {
|
||||
logger.log(Level.SEVERE, "Error initializing 7-Zip-JBinding library", e); //NON-NLS
|
||||
String msg = NbBundle.getMessage(this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.init.errInitModule.msg",
|
||||
String msg = NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.init.errInitModule.msg",
|
||||
EmbeddedFileExtractorModuleFactory.getModuleName());
|
||||
String details = NbBundle.getMessage(this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.init.errCantInitLib",
|
||||
String details = NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.init.errCantInitLib",
|
||||
e.getMessage());
|
||||
services.postMessage(IngestMessage.createErrorMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details));
|
||||
throw new IngestModuleException(e.getMessage());
|
||||
@ -204,7 +206,7 @@ class SevenZipExtractor {
|
||||
if (cRatio >= MAX_COMPRESSION_RATIO) {
|
||||
String itemName = archiveFileItem.getPath();
|
||||
logger.log(Level.INFO, "Possible zip bomb detected, compression ration: {0} for in archive item: {1}", new Object[]{cRatio, itemName}); //NON-NLS
|
||||
String msg = NbBundle.getMessage(this.getClass(),
|
||||
String msg = NbBundle.getMessage(SevenZipExtractor.class,
|
||||
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnMsg", archiveFile.getName(), itemName);
|
||||
String path;
|
||||
try {
|
||||
@ -212,7 +214,7 @@ class SevenZipExtractor {
|
||||
} catch (TskCoreException ex) {
|
||||
path = archiveFile.getParentPath() + archiveFile.getName();
|
||||
}
|
||||
String details = NbBundle.getMessage(this.getClass(),
|
||||
String details = NbBundle.getMessage(SevenZipExtractor.class,
|
||||
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnDetails", cRatio, path);
|
||||
//MessageNotifyUtil.Notify.error(msg, details);
|
||||
services.postMessage(IngestMessage.createWarningMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details));
|
||||
@ -311,9 +313,9 @@ class SevenZipExtractor {
|
||||
if (parentAr == null) {
|
||||
parentAr = archiveDepthCountTree.addArchive(null, archiveId);
|
||||
} else if (parentAr.getDepth() == MAX_DEPTH) {
|
||||
String msg = NbBundle.getMessage(this.getClass(),
|
||||
String msg = NbBundle.getMessage(SevenZipExtractor.class,
|
||||
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.warnMsg.zipBomb", archiveFile.getName());
|
||||
String details = NbBundle.getMessage(this.getClass(),
|
||||
String details = NbBundle.getMessage(SevenZipExtractor.class,
|
||||
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.warnDetails.zipBomb",
|
||||
parentAr.getDepth(), archiveFilePath);
|
||||
//MessageNotifyUtil.Notify.error(msg, details);
|
||||
@ -328,7 +330,7 @@ class SevenZipExtractor {
|
||||
SevenZipContentReadStream stream = null;
|
||||
|
||||
final ProgressHandle progress = ProgressHandleFactory.createHandle(
|
||||
NbBundle.getMessage(this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.moduleName"));
|
||||
NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.moduleName"));
|
||||
int processedItems = 0;
|
||||
|
||||
boolean progressStarted = false;
|
||||
@ -400,7 +402,7 @@ class SevenZipExtractor {
|
||||
pathInArchive = "/" + useName;
|
||||
}
|
||||
|
||||
String msg = NbBundle.getMessage(this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.unknownPath.msg",
|
||||
String msg = NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.unknownPath.msg",
|
||||
archiveFilePath, pathInArchive);
|
||||
logger.log(Level.WARNING, msg);
|
||||
|
||||
@ -432,24 +434,19 @@ class SevenZipExtractor {
|
||||
fullEncryption = false;
|
||||
}
|
||||
|
||||
final Long size = item.getSize();
|
||||
if (size == null) {
|
||||
// If the size property cannot be determined, out-of-disk-space
|
||||
// situations cannot be ascertained.
|
||||
// Hence skip this file.
|
||||
logger.log(Level.WARNING, "Size cannot be determined. Skipping file in archive: {0}", pathInArchive); //NON-NLS
|
||||
continue;
|
||||
}
|
||||
// NOTE: item.getSize() may return null in case of certain
|
||||
// archiving formats. Eg: BZ2
|
||||
Long size = item.getSize();
|
||||
|
||||
//check if unpacking this file will result in out of disk space
|
||||
//this is additional to zip bomb prevention mechanism
|
||||
if (freeDiskSpace != IngestMonitor.DISK_FREE_SPACE_UNKNOWN && size > 0) { //if known free space and file not empty
|
||||
if (freeDiskSpace != IngestMonitor.DISK_FREE_SPACE_UNKNOWN && size != null && size > 0) { //if free space is known and file is not empty.
|
||||
long newDiskSpace = freeDiskSpace - size;
|
||||
if (newDiskSpace < MIN_FREE_DISK_SPACE) {
|
||||
String msg = NbBundle.getMessage(this.getClass(),
|
||||
String msg = NbBundle.getMessage(SevenZipExtractor.class,
|
||||
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.notEnoughDiskSpace.msg",
|
||||
archiveFilePath, fileName);
|
||||
String details = NbBundle.getMessage(this.getClass(),
|
||||
String details = NbBundle.getMessage(SevenZipExtractor.class,
|
||||
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.notEnoughDiskSpace.details");
|
||||
//MessageNotifyUtil.Notify.error(msg, details);
|
||||
services.postMessage(IngestMessage.createErrorMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details));
|
||||
@ -501,21 +498,31 @@ class SevenZipExtractor {
|
||||
final long modtime = writeTime == null ? 0L : writeTime.getTime() / 1000;
|
||||
final long accesstime = accessTime == null ? 0L : accessTime.getTime() / 1000;
|
||||
|
||||
//record derived data in unode, to be traversed later after unpacking the archive
|
||||
unpackedNode.addDerivedInfo(size, !isDir,
|
||||
0L, createtime, accesstime, modtime, localRelPath);
|
||||
|
||||
//unpack locally if a file
|
||||
if (!isDir) {
|
||||
SevenZipExtractor.UnpackStream unpackStream = null;
|
||||
if (!isDir) {
|
||||
try {
|
||||
unpackStream = new SevenZipExtractor.UnpackStream(localAbsPath);
|
||||
unpackStream = new SevenZipExtractor.UnpackStream(localAbsPath, freeDiskSpace, size == null);
|
||||
item.extractSlow(unpackStream);
|
||||
} catch (Exception e) {
|
||||
//could be something unexpected with this file, move on
|
||||
logger.log(Level.WARNING, "Could not extract file from archive: " + localAbsPath, e); //NON-NLS
|
||||
} finally {
|
||||
if (unpackStream != null) {
|
||||
//record derived data in unode, to be traversed later after unpacking the archive
|
||||
if (size != null) {
|
||||
// unpackedNode.bytesWritten will not be set in
|
||||
// this case. Use 'size' which has been set
|
||||
// previously.
|
||||
unpackedNode.addDerivedInfo(size, !isDir,
|
||||
0L, createtime, accesstime, modtime, localRelPath);
|
||||
} else {
|
||||
// since size is unknown, use
|
||||
// unpackStream.getNumberOfBytesWritten() to get
|
||||
// the size.
|
||||
unpackedNode.addDerivedInfo(unpackStream.getNumberOfBytesWritten(), !isDir,
|
||||
0L, createtime, accesstime, modtime, localRelPath);
|
||||
}
|
||||
unpackStream.close();
|
||||
}
|
||||
}
|
||||
@ -549,9 +556,9 @@ class SevenZipExtractor {
|
||||
|
||||
// print a message if the file is allocated
|
||||
if (archiveFile.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.ALLOC)) {
|
||||
String msg = NbBundle.getMessage(this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.errUnpacking.msg",
|
||||
String msg = NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.errUnpacking.msg",
|
||||
archiveFile.getName());
|
||||
String details = NbBundle.getMessage(this.getClass(),
|
||||
String details = NbBundle.getMessage(SevenZipExtractor.class,
|
||||
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.errUnpacking.details",
|
||||
archiveFilePath, ex.getMessage());
|
||||
services.postMessage(IngestMessage.createErrorMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details));
|
||||
@ -590,8 +597,8 @@ class SevenZipExtractor {
|
||||
logger.log(Level.SEVERE, "Error creating blackboard artifact for encryption detected for file: " + archiveFilePath, ex); //NON-NLS
|
||||
}
|
||||
|
||||
String msg = NbBundle.getMessage(this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.encrFileDetected.msg");
|
||||
String details = NbBundle.getMessage(this.getClass(),
|
||||
String msg = NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.encrFileDetected.msg");
|
||||
String details = NbBundle.getMessage(SevenZipExtractor.class,
|
||||
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.encrFileDetected.details",
|
||||
archiveFile.getName(), EmbeddedFileExtractorModuleFactory.getModuleName());
|
||||
services.postMessage(IngestMessage.createWarningMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details));
|
||||
@ -612,8 +619,15 @@ class SevenZipExtractor {
|
||||
|
||||
private OutputStream output;
|
||||
private String localAbsPath;
|
||||
private long freeDiskSpace;
|
||||
private boolean sizeUnknown = false;
|
||||
private boolean outOfSpace = false;
|
||||
private long bytesWritten = 0;
|
||||
|
||||
UnpackStream(String localAbsPath) {
|
||||
UnpackStream(String localAbsPath, long freeDiskSpace, boolean sizeUnknown) {
|
||||
this.sizeUnknown = sizeUnknown;
|
||||
this.freeDiskSpace = freeDiskSpace;
|
||||
this.localAbsPath = localAbsPath;
|
||||
try {
|
||||
output = new BufferedOutputStream(new FileOutputStream(localAbsPath));
|
||||
} catch (FileNotFoundException ex) {
|
||||
@ -622,13 +636,38 @@ class SevenZipExtractor {
|
||||
|
||||
}
|
||||
|
||||
public long getNumberOfBytesWritten() {
|
||||
return this.bytesWritten;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int write(byte[] bytes) throws SevenZipException {
|
||||
try {
|
||||
if (!sizeUnknown) {
|
||||
output.write(bytes);
|
||||
} else {
|
||||
// If the content size is unknown, cautiously write to disk.
|
||||
// Write only if byte array is less than 80% of the current
|
||||
// free disk space.
|
||||
if (freeDiskSpace == IngestMonitor.DISK_FREE_SPACE_UNKNOWN || bytes.length < 0.8 * freeDiskSpace) {
|
||||
output.write(bytes);
|
||||
// NOTE: this method is called multiple times for a
|
||||
// single extractSlow() call. Update bytesWritten and
|
||||
// freeDiskSpace after every write operation.
|
||||
this.bytesWritten += bytes.length;
|
||||
this.freeDiskSpace -= bytes.length;
|
||||
} else {
|
||||
this.outOfSpace = true;
|
||||
logger.log(Level.INFO, NbBundle.getMessage(
|
||||
SevenZipExtractor.class,
|
||||
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackStream.write.noSpace.msg"));
|
||||
throw new SevenZipException(
|
||||
NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackStream.write.noSpace.msg"));
|
||||
}
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
throw new SevenZipException(
|
||||
NbBundle.getMessage(this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackStream.write.exception.msg",
|
||||
NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackStream.write.exception.msg",
|
||||
localAbsPath), ex);
|
||||
}
|
||||
return bytes.length;
|
||||
@ -639,6 +678,9 @@ class SevenZipExtractor {
|
||||
try {
|
||||
output.flush();
|
||||
output.close();
|
||||
if (this.outOfSpace) {
|
||||
Files.delete(Paths.get(this.localAbsPath));
|
||||
}
|
||||
} catch (IOException e) {
|
||||
logger.log(Level.SEVERE, "Error closing unpack stream for file: {0}", localAbsPath); //NON-NLS
|
||||
}
|
||||
@ -774,7 +816,7 @@ class SevenZipExtractor {
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Error adding a derived file to db:" + fileName, ex); //NON-NLS
|
||||
throw new TskCoreException(
|
||||
NbBundle.getMessage(this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackedTree.exception.msg",
|
||||
NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackedTree.exception.msg",
|
||||
fileName), ex);
|
||||
}
|
||||
|
||||
|
@ -22,10 +22,21 @@ import com.drew.imaging.ImageMetadataReader;
|
||||
import com.drew.imaging.ImageProcessingException;
|
||||
import com.drew.lang.GeoLocation;
|
||||
import com.drew.lang.Rational;
|
||||
import com.drew.metadata.Directory;
|
||||
import com.drew.metadata.Metadata;
|
||||
import com.drew.metadata.MetadataException;
|
||||
import com.drew.metadata.exif.makernotes.CanonMakernoteDirectory;
|
||||
import com.drew.metadata.exif.ExifIFD0Directory;
|
||||
import com.drew.metadata.exif.ExifSubIFDDirectory;
|
||||
import com.drew.metadata.exif.GpsDirectory;
|
||||
import com.drew.metadata.exif.makernotes.CasioType1MakernoteDirectory;
|
||||
import com.drew.metadata.exif.makernotes.FujifilmMakernoteDirectory;
|
||||
import com.drew.metadata.exif.makernotes.KodakMakernoteDirectory;
|
||||
import com.drew.metadata.exif.makernotes.NikonType2MakernoteDirectory;
|
||||
import com.drew.metadata.exif.makernotes.PanasonicMakernoteDirectory;
|
||||
import com.drew.metadata.exif.makernotes.PentaxMakernoteDirectory;
|
||||
import com.drew.metadata.exif.makernotes.SanyoMakernoteDirectory;
|
||||
import com.drew.metadata.exif.makernotes.SonyType1MakernoteDirectory;
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
@ -33,6 +44,8 @@ import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.TimeZone;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.logging.Level;
|
||||
import org.openide.util.NbBundle;
|
||||
@ -47,6 +60,8 @@ import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.Image;
|
||||
import org.sleuthkit.datamodel.ReadContentInputStream;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.TskData;
|
||||
@ -63,10 +78,13 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
|
||||
private final IngestServices services = IngestServices.getInstance();
|
||||
private final AtomicInteger filesProcessed = new AtomicInteger(0);
|
||||
private volatile boolean filesToFire = false;
|
||||
private volatile boolean facesDetected = false;
|
||||
private final List<BlackboardArtifact> listOfFacesDetectedArtifacts = new ArrayList<>();
|
||||
private long jobId;
|
||||
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
|
||||
private FileTypeDetector fileTypeDetector;
|
||||
private final HashSet<String> supportedMimeTypes = new HashSet<>();
|
||||
private TimeZone timeZone = null;
|
||||
|
||||
ExifParserFileIngestModule() {
|
||||
supportedMimeTypes.add("audio/x-wav");
|
||||
@ -103,10 +121,17 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
|
||||
|
||||
// update the tree every 1000 files if we have EXIF data that is not being being displayed
|
||||
final int filesProcessedValue = filesProcessed.incrementAndGet();
|
||||
if ((filesToFire) && (filesProcessedValue % 1000 == 0)) {
|
||||
if ((filesProcessedValue % 1000 == 0)) {
|
||||
if (filesToFire) {
|
||||
services.fireModuleDataEvent(new ModuleDataEvent(ExifParserModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF));
|
||||
filesToFire = false;
|
||||
}
|
||||
if (facesDetected) {
|
||||
services.fireModuleDataEvent(new ModuleDataEvent(ExifParserModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_FACE_DETECTED, listOfFacesDetectedArtifacts));
|
||||
listOfFacesDetectedArtifacts.clear();
|
||||
facesDetected = false;
|
||||
}
|
||||
}
|
||||
|
||||
//skip unsupported
|
||||
if (!parsableFormat(content)) {
|
||||
@ -125,19 +150,32 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
|
||||
bin = new BufferedInputStream(in);
|
||||
|
||||
Collection<BlackboardAttribute> attributes = new ArrayList<>();
|
||||
Metadata metadata = ImageMetadataReader.readMetadata(bin, true);
|
||||
Metadata metadata = ImageMetadataReader.readMetadata(bin);
|
||||
|
||||
// Date
|
||||
ExifSubIFDDirectory exifDir = metadata.getDirectory(ExifSubIFDDirectory.class);
|
||||
ExifSubIFDDirectory exifDir = metadata.getFirstDirectoryOfType(ExifSubIFDDirectory.class);
|
||||
if (exifDir != null) {
|
||||
Date date = exifDir.getDate(ExifSubIFDDirectory.TAG_DATETIME_ORIGINAL);
|
||||
|
||||
// set the timeZone for the current datasource.
|
||||
if (timeZone == null) {
|
||||
try {
|
||||
Content dataSource = f.getDataSource();
|
||||
if ((dataSource != null) && (dataSource instanceof Image)) {
|
||||
Image image = (Image) dataSource;
|
||||
timeZone = TimeZone.getTimeZone(image.getTimeZone());
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.INFO, "Error getting time zones", ex); //NON-NLS
|
||||
}
|
||||
}
|
||||
Date date = exifDir.getDate(ExifSubIFDDirectory.TAG_DATETIME_ORIGINAL, timeZone);
|
||||
if (date != null) {
|
||||
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED.getTypeID(), ExifParserModuleFactory.getModuleName(), date.getTime() / 1000));
|
||||
}
|
||||
}
|
||||
|
||||
// GPS Stuff
|
||||
GpsDirectory gpsDir = metadata.getDirectory(GpsDirectory.class);
|
||||
GpsDirectory gpsDir = metadata.getFirstDirectoryOfType(GpsDirectory.class);
|
||||
if (gpsDir != null) {
|
||||
GeoLocation loc = gpsDir.getGeoLocation();
|
||||
if (loc != null) {
|
||||
@ -147,14 +185,14 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
|
||||
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID(), ExifParserModuleFactory.getModuleName(), longitude));
|
||||
}
|
||||
|
||||
Rational altitude = gpsDir.getRational(GpsDirectory.TAG_GPS_ALTITUDE);
|
||||
Rational altitude = gpsDir.getRational(GpsDirectory.TAG_ALTITUDE);
|
||||
if (altitude != null) {
|
||||
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE.getTypeID(), ExifParserModuleFactory.getModuleName(), altitude.doubleValue()));
|
||||
}
|
||||
}
|
||||
|
||||
// Device info
|
||||
ExifIFD0Directory devDir = metadata.getDirectory(ExifIFD0Directory.class);
|
||||
ExifIFD0Directory devDir = metadata.getFirstDirectoryOfType(ExifIFD0Directory.class);
|
||||
if (devDir != null) {
|
||||
String model = devDir.getString(ExifIFD0Directory.TAG_MODEL);
|
||||
if (model != null && !model.isEmpty()) {
|
||||
@ -167,6 +205,11 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
|
||||
}
|
||||
}
|
||||
|
||||
if (containsFace(metadata)) {
|
||||
listOfFacesDetectedArtifacts.add(f.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_FACE_DETECTED));
|
||||
facesDetected = true;
|
||||
}
|
||||
|
||||
// Add the attributes, if there are any, to a new artifact
|
||||
if (!attributes.isEmpty()) {
|
||||
BlackboardArtifact bba = f.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF);
|
||||
@ -199,6 +242,121 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if this metadata contains any tags related to facial information.
|
||||
* NOTE: Cases with this metadata containing tags like enabled red-eye
|
||||
* reduction settings, portrait settings, etc are also assumed to contain
|
||||
* facial information. The method returns true. The return value of this
|
||||
* method does NOT guarantee actual presence of face.
|
||||
*
|
||||
* @param metadata the metadata which needs to be parsed for possible facial
|
||||
* information.
|
||||
*
|
||||
* @return returns true if the metadata contains any tags related to facial
|
||||
* information.
|
||||
*/
|
||||
private boolean containsFace(Metadata metadata) {
|
||||
Directory d = metadata.getFirstDirectoryOfType(CanonMakernoteDirectory.class);
|
||||
if (d != null) {
|
||||
if (d.containsTag(CanonMakernoteDirectory.TAG_FACE_DETECT_ARRAY_1)
|
||||
&& d.getString(CanonMakernoteDirectory.TAG_FACE_DETECT_ARRAY_1) != null) {
|
||||
return true;
|
||||
}
|
||||
if (d.containsTag(CanonMakernoteDirectory.TAG_FACE_DETECT_ARRAY_2)
|
||||
&& d.getString(CanonMakernoteDirectory.TAG_FACE_DETECT_ARRAY_2) != null) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
d = metadata.getFirstDirectoryOfType(CasioType1MakernoteDirectory.class);
|
||||
if (d != null) {
|
||||
try {
|
||||
if (d.containsTag(CasioType1MakernoteDirectory.TAG_FLASH_MODE)
|
||||
&& d.getInt(CasioType1MakernoteDirectory.TAG_FLASH_MODE) == 0x04) { //0x04 = "Red eye reduction"
|
||||
return true;
|
||||
}
|
||||
} catch (MetadataException ex) {
|
||||
// move on and check next directory
|
||||
}
|
||||
}
|
||||
|
||||
d = metadata.getFirstDirectoryOfType(FujifilmMakernoteDirectory.class);
|
||||
if (d != null) {
|
||||
if (d.containsTag(FujifilmMakernoteDirectory.TAG_FACES_DETECTED)
|
||||
&& d.getString(FujifilmMakernoteDirectory.TAG_FACES_DETECTED) != null) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
d = metadata.getFirstDirectoryOfType(KodakMakernoteDirectory.class);
|
||||
if (d != null) {
|
||||
try {
|
||||
if (d.containsTag(KodakMakernoteDirectory.TAG_FLASH_MODE)
|
||||
&& d.getInt(KodakMakernoteDirectory.TAG_FLASH_MODE) == 0x03) { //0x03 = "Red Eye"
|
||||
return true;
|
||||
}
|
||||
} catch (MetadataException ex) {
|
||||
/// move on and check next directory
|
||||
}
|
||||
}
|
||||
|
||||
d = metadata.getFirstDirectoryOfType(NikonType2MakernoteDirectory.class);
|
||||
if (d != null) {
|
||||
if (d.containsTag(NikonType2MakernoteDirectory.TAG_SCENE_MODE)
|
||||
&& d.getString(NikonType2MakernoteDirectory.TAG_SCENE_MODE) != null
|
||||
&& (d.getString(NikonType2MakernoteDirectory.TAG_SCENE_MODE).equals("BEST FACE") // NON-NLS
|
||||
|| (d.getString(NikonType2MakernoteDirectory.TAG_SCENE_MODE).equals("SMILE")))) { // NON-NLS
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
d = metadata.getFirstDirectoryOfType(PanasonicMakernoteDirectory.class);
|
||||
if (d != null) {
|
||||
if (d.containsTag(PanasonicMakernoteDirectory.TAG_FACES_DETECTED)
|
||||
&& d.getString(PanasonicMakernoteDirectory.TAG_FACES_DETECTED) != null) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
d = metadata.getFirstDirectoryOfType(PentaxMakernoteDirectory.class);
|
||||
if (d != null) {
|
||||
try {
|
||||
if (d.containsTag(PentaxMakernoteDirectory.TAG_FLASH_MODE)
|
||||
&& d.getInt(PentaxMakernoteDirectory.TAG_FLASH_MODE) == 6) { // 6 = Red-eye Reduction
|
||||
return true;
|
||||
}
|
||||
} catch (MetadataException ex) {
|
||||
// move on and check next directory
|
||||
}
|
||||
}
|
||||
|
||||
d = metadata.getFirstDirectoryOfType(SanyoMakernoteDirectory.class);
|
||||
if (d != null) {
|
||||
if (d.containsTag(SanyoMakernoteDirectory.TAG_MANUAL_FOCUS_DISTANCE_OR_FACE_INFO)
|
||||
&& d.getString(SanyoMakernoteDirectory.TAG_MANUAL_FOCUS_DISTANCE_OR_FACE_INFO) != null) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
d = metadata.getFirstDirectoryOfType(SonyType1MakernoteDirectory.class);
|
||||
if (d != null) {
|
||||
try {
|
||||
if (d.containsTag(SonyType1MakernoteDirectory.TAG_AF_MODE)
|
||||
&& d.getInt(SonyType1MakernoteDirectory.TAG_AF_MODE) == 15) { //15 = "Face Detected"
|
||||
return true;
|
||||
}
|
||||
if (d.containsTag(SonyType1MakernoteDirectory.TAG_EXPOSURE_MODE)
|
||||
&& d.getInt(SonyType1MakernoteDirectory.TAG_EXPOSURE_MODE) == 14) { //14 = "Smile shutter"
|
||||
return true;
|
||||
}
|
||||
} catch (MetadataException ex) {
|
||||
// move on and check next directory
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if should try to attempt to extract exif. Currently checks if JPEG
|
||||
* image (by signature)
|
||||
@ -225,10 +383,15 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
|
||||
public void shutDown() {
|
||||
// We only need to check for this final event on the last module per job
|
||||
if (refCounter.decrementAndGet(jobId) == 0) {
|
||||
timeZone = null;
|
||||
if (filesToFire) {
|
||||
//send the final new data event
|
||||
services.fireModuleDataEvent(new ModuleDataEvent(ExifParserModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF));
|
||||
}
|
||||
if (facesDetected) {
|
||||
//send the final new data event
|
||||
services.fireModuleDataEvent(new ModuleDataEvent(ExifParserModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_FACE_DETECTED, listOfFacesDetectedArtifacts));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,3 @@
|
||||
OpenIDE-Module-Name=Timeline
|
||||
CTL_MakeTimeline="Timeline"
|
||||
CTL_TimeLineTopComponentAction=TimeLineTopComponent
|
||||
CTL_TimeLineTopComponent=Timeline Window
|
||||
@ -11,10 +10,6 @@ Timeline.goToButton.text=Go To\:
|
||||
Timeline.yearBarChart.x.years=Years
|
||||
Timeline.resultPanel.loading=Loading...
|
||||
Timeline.node.root=Root
|
||||
Timeline.propChg.confDlg.timelineOOD.msg=The event data is out of date. Would you like to regenerate it?
|
||||
Timeline.propChg.confDlg.timelineOOD.details=Timeline
|
||||
Timeline.initTimeline.confDlg.genBeforeIngest.msg=You are trying to generate a timeline before ingest has been completed. The timeline may be incomplete. Do you want to continue?
|
||||
Timeline.initTimeline.confDlg.genBeforeIngest.details=Timeline
|
||||
TimelineFrame.title=Timeline
|
||||
TimelinePanel.jButton1.text=6m
|
||||
TimelinePanel.jButton13.text=all
|
||||
@ -29,18 +24,10 @@ TimelinePanel.jButton7.text=3d
|
||||
TimelinePanel.jButton2.text=1m
|
||||
TimelinePanel.jButton3.text=3m
|
||||
TimelinePanel.jButton4.text=2w
|
||||
ProgressWindow.progressHeader.text=\
|
||||
TimeLineTopComponent.eventsTab.name=Events
|
||||
TimeLineTopComponent.filterTab.name=Filters
|
||||
Timeline.showLastPopulatedWhileIngestingConf.confDlg.details=Timeline
|
||||
Timeline.do_repopulate.msg=The Timeline events database was previously populated while ingest was running.\nSome events may not have been populated or may have been populated inaccurately.\nDo you want to repopulate the events database now?
|
||||
Timeline.pushDescrLOD.confdlg.msg=You are about to show details for {0} events. This might be very slow or even crash Autopsy.\n\nDo you want to continue?
|
||||
Timeline.pushDescrLOD.confdlg.details=
|
||||
OpenTimelineAction.title=Timeline
|
||||
Timeline.ProgressWindow.cancel.confdlg.msg=Do you want to cancel timeline creation?
|
||||
Timeline.ProgressWindow.cancel.confdlg.detail=Cancel timeline creation?
|
||||
Timeline.progressWindow.name=Timeline
|
||||
Timeline.progressWindow.title=Generating Timeline data
|
||||
OpenTimeLineAction.msgdlg.text=Could not create timeline, there are no data sources.
|
||||
TimeLineTopComponent.timeZonePanel.text=Display Times In\:
|
||||
datasource.missing.confirmation=The Timeline events database was previously populated with an old version of Autopsy.\nThe data source filter will be unavailable unless you update the events database.\nDo you want to update the events database now?
|
||||
ProgressWindow.progressHeader.text=\
|
||||
|
||||
|
@ -54,7 +54,10 @@
|
||||
<Component class="javax.swing.JLabel" name="progressHeader">
|
||||
<Properties>
|
||||
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
|
||||
<ResourceString bundle="org/sleuthkit/autopsy/advancedtimeline/Bundle.properties" key="ProgressWindow.progressHeader.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, "{key}")"/>
|
||||
<ResourceString bundle="org/sleuthkit/autopsy/timeline/Bundle.properties" key="ProgressWindow.progressHeader.text" replaceFormat="NbBundle.getMessage({sourceFileName}.class, "{key}")"/>
|
||||
</Property>
|
||||
<Property name="minimumSize" type="java.awt.Dimension" editor="org.netbeans.beaninfo.editors.DimensionEditor">
|
||||
<Dimension value="[10, 14]"/>
|
||||
</Property>
|
||||
</Properties>
|
||||
</Component>
|
||||
|
@ -21,25 +21,19 @@ package org.sleuthkit.autopsy.timeline;
|
||||
import java.awt.Component;
|
||||
import java.awt.event.ActionEvent;
|
||||
import java.awt.event.KeyEvent;
|
||||
import java.awt.event.WindowAdapter;
|
||||
import java.awt.event.WindowEvent;
|
||||
import javax.annotation.concurrent.Immutable;
|
||||
import javax.swing.AbstractAction;
|
||||
import javax.swing.ActionMap;
|
||||
import javax.swing.GroupLayout;
|
||||
import javax.swing.InputMap;
|
||||
import javax.swing.JComponent;
|
||||
import javax.swing.JFrame;
|
||||
import javax.swing.JLabel;
|
||||
import javax.swing.JOptionPane;
|
||||
import javax.swing.JProgressBar;
|
||||
import javax.swing.KeyStroke;
|
||||
import javax.swing.LayoutStyle;
|
||||
import javax.swing.SwingUtilities;
|
||||
import javax.swing.SwingWorker;
|
||||
import org.openide.awt.Mnemonics;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.openide.windows.WindowManager;
|
||||
import org.sleuthkit.autopsy.coreutils.ThreadConfined;
|
||||
|
||||
/**
|
||||
* Dialog with progress bar that pops up when timeline is being generated
|
||||
@ -51,6 +45,8 @@ public class ProgressWindow extends JFrame {
|
||||
/**
|
||||
* Creates new form TimelineProgressDialog
|
||||
*/
|
||||
@NbBundle.Messages({"Timeline.progressWindow.name=Timeline",
|
||||
"Timeline.progressWindow.title=Generating Timeline data"})
|
||||
public ProgressWindow(Component parent, boolean modal, SwingWorker<?, ?> worker) {
|
||||
super();
|
||||
initComponents();
|
||||
@ -64,9 +60,8 @@ public class ProgressWindow extends JFrame {
|
||||
setIconImage(WindowManager.getDefault().getMainWindow().getIconImage());
|
||||
});
|
||||
|
||||
//progressBar.setIndeterminate(true);
|
||||
setName(NbBundle.getMessage(TimeLineTopComponent.class, "Timeline.progressWindow.name"));
|
||||
setTitle(NbBundle.getMessage(TimeLineTopComponent.class, "Timeline.progressWindow.title"));
|
||||
setName(Bundle.Timeline_progressWindow_name());
|
||||
setTitle(Bundle.Timeline_progressWindow_title());
|
||||
// Close the dialog when Esc is pressed
|
||||
String cancelName = "cancel"; // NON-NLS
|
||||
InputMap inputMap = getRootPane().getInputMap(JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT);
|
||||
@ -83,46 +78,6 @@ public class ProgressWindow extends JFrame {
|
||||
this.worker = worker;
|
||||
}
|
||||
|
||||
public void updateProgress(final int progress) {
|
||||
SwingUtilities.invokeLater(() -> {
|
||||
progressBar.setValue(progress);
|
||||
});
|
||||
}
|
||||
|
||||
public void updateProgress(final int progress, final String message) {
|
||||
SwingUtilities.invokeLater(() -> {
|
||||
progressBar.setValue(progress);
|
||||
progressBar.setString(message);
|
||||
});
|
||||
}
|
||||
|
||||
public void updateProgress(final String message) {
|
||||
SwingUtilities.invokeLater(() -> {
|
||||
progressBar.setString(message);
|
||||
});
|
||||
}
|
||||
|
||||
public void setProgressTotal(final int total) {
|
||||
SwingUtilities.invokeLater(() -> {
|
||||
progressBar.setIndeterminate(false);
|
||||
progressBar.setMaximum(total);
|
||||
progressBar.setStringPainted(true);
|
||||
});
|
||||
}
|
||||
|
||||
public void updateHeaderMessage(final String headerMessage) {
|
||||
SwingUtilities.invokeLater(() -> {
|
||||
progressHeader.setText(headerMessage);
|
||||
});
|
||||
}
|
||||
|
||||
public void setIndeterminate() {
|
||||
SwingUtilities.invokeLater(() -> {
|
||||
progressBar.setIndeterminate(true);
|
||||
progressBar.setStringPainted(true);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is called from within the constructor to initialize the form.
|
||||
* WARNING: Do NOT modify this code. The content of this method is always
|
||||
@ -132,38 +87,39 @@ public class ProgressWindow extends JFrame {
|
||||
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
|
||||
private void initComponents() {
|
||||
|
||||
progressBar = new JProgressBar();
|
||||
progressHeader = new JLabel();
|
||||
progressBar = new javax.swing.JProgressBar();
|
||||
progressHeader = new javax.swing.JLabel();
|
||||
|
||||
addWindowListener(new WindowAdapter() {
|
||||
public void windowClosing(WindowEvent evt) {
|
||||
addWindowListener(new java.awt.event.WindowAdapter() {
|
||||
public void windowClosing(java.awt.event.WindowEvent evt) {
|
||||
closeDialog(evt);
|
||||
}
|
||||
});
|
||||
|
||||
Mnemonics.setLocalizedText(progressHeader, NbBundle.getMessage(ProgressWindow.class, "ProgressWindow.progressHeader.text")); // NOI18N
|
||||
org.openide.awt.Mnemonics.setLocalizedText(progressHeader, NbBundle.getMessage(ProgressWindow.class, "ProgressWindow.progressHeader.text")); // NOI18N
|
||||
progressHeader.setMinimumSize(new java.awt.Dimension(10, 14));
|
||||
|
||||
GroupLayout layout = new GroupLayout(getContentPane());
|
||||
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
|
||||
getContentPane().setLayout(layout);
|
||||
layout.setHorizontalGroup(
|
||||
layout.createParallelGroup(GroupLayout.Alignment.LEADING)
|
||||
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
|
||||
.addGroup(layout.createSequentialGroup()
|
||||
.addContainerGap()
|
||||
.addGroup(layout.createParallelGroup(GroupLayout.Alignment.LEADING)
|
||||
.addComponent(progressBar, GroupLayout.DEFAULT_SIZE, 504, Short.MAX_VALUE)
|
||||
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
|
||||
.addComponent(progressBar, javax.swing.GroupLayout.DEFAULT_SIZE, 504, Short.MAX_VALUE)
|
||||
.addGroup(layout.createSequentialGroup()
|
||||
.addComponent(progressHeader)
|
||||
.addComponent(progressHeader, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
|
||||
.addGap(0, 0, Short.MAX_VALUE)))
|
||||
.addContainerGap())
|
||||
);
|
||||
layout.setVerticalGroup(
|
||||
layout.createParallelGroup(GroupLayout.Alignment.LEADING)
|
||||
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
|
||||
.addGroup(layout.createSequentialGroup()
|
||||
.addContainerGap()
|
||||
.addComponent(progressHeader)
|
||||
.addPreferredGap(LayoutStyle.ComponentPlacement.RELATED)
|
||||
.addComponent(progressBar, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)
|
||||
.addContainerGap(GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
|
||||
.addComponent(progressHeader, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
|
||||
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
|
||||
.addComponent(progressBar, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
|
||||
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
|
||||
);
|
||||
|
||||
pack();
|
||||
@ -176,14 +132,14 @@ public class ProgressWindow extends JFrame {
|
||||
cancel();
|
||||
}//GEN-LAST:event_closeDialog
|
||||
|
||||
@NbBundle.Messages({"Timeline.ProgressWindow.cancel.confdlg.msg=Do you want to cancel timeline creation?",
|
||||
"Timeline.ProgressWindow.cancel.confdlg.detail=Cancel timeline creation?"})
|
||||
public void cancel() {
|
||||
SwingUtilities.invokeLater(() -> {
|
||||
if (isVisible()) {
|
||||
int showConfirmDialog = JOptionPane.showConfirmDialog(ProgressWindow.this,
|
||||
NbBundle.getMessage(TimeLineTopComponent.class,
|
||||
"Timeline.ProgressWindow.cancel.confdlg.msg"),
|
||||
NbBundle.getMessage(TimeLineTopComponent.class,
|
||||
"Timeline.ProgressWindow.cancel.confdlg.detail"),
|
||||
Bundle.Timeline_ProgressWindow_cancel_confdlg_msg(),
|
||||
Bundle.Timeline_ProgressWindow_cancel_confdlg_detail(),
|
||||
JOptionPane.YES_NO_OPTION, JOptionPane.WARNING_MESSAGE);
|
||||
if (showConfirmDialog == JOptionPane.YES_OPTION) {
|
||||
close();
|
||||
@ -200,18 +156,23 @@ public class ProgressWindow extends JFrame {
|
||||
dispose();
|
||||
}
|
||||
// Variables declaration - do not modify//GEN-BEGIN:variables
|
||||
private JProgressBar progressBar;
|
||||
private JLabel progressHeader;
|
||||
private javax.swing.JProgressBar progressBar;
|
||||
private javax.swing.JLabel progressHeader;
|
||||
// End of variables declaration//GEN-END:variables
|
||||
|
||||
@ThreadConfined(type = ThreadConfined.ThreadType.AWT)
|
||||
public void update(ProgressUpdate chunk) {
|
||||
updateHeaderMessage(chunk.getHeaderMessage());
|
||||
progressHeader.setText(chunk.getHeaderMessage());
|
||||
if (chunk.getTotal() >= 0) {
|
||||
setProgressTotal(chunk.getTotal());
|
||||
updateProgress(chunk.getProgress(), chunk.getDetailMessage());
|
||||
progressBar.setIndeterminate(false);
|
||||
progressBar.setMaximum(chunk.getTotal());
|
||||
progressBar.setStringPainted(true);
|
||||
progressBar.setValue(chunk.getProgress());
|
||||
progressBar.setString(chunk.getDetailMessage());
|
||||
} else {
|
||||
setIndeterminate();
|
||||
updateProgress(chunk.getDetailMessage());
|
||||
progressBar.setIndeterminate(true);
|
||||
progressBar.setStringPainted(true);
|
||||
progressBar.setString(chunk.getDetailMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -34,6 +34,7 @@ import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.logging.Level;
|
||||
import javafx.application.Platform;
|
||||
import javafx.beans.InvalidationListener;
|
||||
import javafx.beans.Observable;
|
||||
import javafx.beans.property.ReadOnlyBooleanProperty;
|
||||
import javafx.beans.property.ReadOnlyBooleanWrapper;
|
||||
@ -66,14 +67,15 @@ import static org.sleuthkit.autopsy.casemodule.Case.Events.DATA_SOURCE_ADDED;
|
||||
import org.sleuthkit.autopsy.coreutils.History;
|
||||
import org.sleuthkit.autopsy.coreutils.LoggedTask;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.ThreadConfined;
|
||||
import org.sleuthkit.autopsy.events.BlackBoardArtifactTagAddedEvent;
|
||||
import org.sleuthkit.autopsy.events.BlackBoardArtifactTagDeletedEvent;
|
||||
import org.sleuthkit.autopsy.events.ContentTagAddedEvent;
|
||||
import org.sleuthkit.autopsy.events.ContentTagDeletedEvent;
|
||||
import org.sleuthkit.autopsy.ingest.IngestManager;
|
||||
import org.sleuthkit.autopsy.timeline.events.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.events.db.EventsRepository;
|
||||
import org.sleuthkit.autopsy.timeline.events.type.EventType;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.EventType;
|
||||
import org.sleuthkit.autopsy.timeline.db.EventsRepository;
|
||||
import org.sleuthkit.autopsy.timeline.filters.RootFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.TypeFilter;
|
||||
import org.sleuthkit.autopsy.timeline.utils.IntervalUtils;
|
||||
@ -99,13 +101,11 @@ import org.sleuthkit.datamodel.TskCoreException;
|
||||
* </li>
|
||||
* <ul>
|
||||
*/
|
||||
@NbBundle.Messages({"Timeline.confirmation.dialogs.title=Update Timeline database?"})
|
||||
public class TimeLineController {
|
||||
|
||||
private static final Logger LOGGER = Logger.getLogger(TimeLineController.class.getName());
|
||||
|
||||
private static final String DO_REPOPULATE_MESSAGE = NbBundle.getMessage(TimeLineController.class,
|
||||
"Timeline.do_repopulate.msg");
|
||||
|
||||
private static final ReadOnlyObjectWrapper<TimeZone> timeZone = new ReadOnlyObjectWrapper<>(TimeZone.getDefault());
|
||||
|
||||
public static ZoneId getTimeZoneID() {
|
||||
@ -189,6 +189,9 @@ public class TimeLineController {
|
||||
@GuardedBy("this")
|
||||
private final History<ZoomParams> historyManager = new History<>();
|
||||
|
||||
@GuardedBy("this")
|
||||
private final ReadOnlyObjectWrapper<ZoomParams> currentParams = new ReadOnlyObjectWrapper<>();
|
||||
|
||||
//all members should be access with the intrinsict lock of this object held
|
||||
//selected events (ie shown in the result viewer)
|
||||
@GuardedBy("this")
|
||||
@ -231,16 +234,30 @@ public class TimeLineController {
|
||||
private final ReadOnlyBooleanWrapper newEventsFlag = new ReadOnlyBooleanWrapper(false);
|
||||
|
||||
public TimeLineController(Case autoCase) {
|
||||
this.autoCase = autoCase; //initalize repository and filteredEvents on creation
|
||||
eventsRepository = new EventsRepository(autoCase, historyManager.currentState());
|
||||
this.autoCase = autoCase;
|
||||
|
||||
/*
|
||||
* as the history manager's current state changes, modify the tags
|
||||
* filter to be in sync, and expose that as propery from
|
||||
* TimeLineController. Do we need to do this with datasource or hash hit
|
||||
* filters?
|
||||
*/
|
||||
historyManager.currentState().addListener(new InvalidationListener() {
|
||||
public void invalidated(Observable observable) {
|
||||
ZoomParams historyManagerParams = historyManager.getCurrentState();
|
||||
eventsRepository.syncTagsFilter(historyManagerParams.getFilter().getTagsFilter());
|
||||
currentParams.set(historyManagerParams);
|
||||
}
|
||||
});
|
||||
|
||||
eventsRepository = new EventsRepository(autoCase, currentParams.getReadOnlyProperty());
|
||||
filteredEvents = eventsRepository.getEventsModel();
|
||||
|
||||
InitialZoomState = new ZoomParams(filteredEvents.getSpanningInterval(),
|
||||
EventTypeZoomLevel.BASE_TYPE,
|
||||
filteredEvents.filter().get(),
|
||||
filteredEvents.filterProperty().get(),
|
||||
DescriptionLOD.SHORT);
|
||||
historyManager.advance(InitialZoomState);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
@ -256,7 +273,7 @@ public class TimeLineController {
|
||||
|
||||
public void zoomOutToActivity() {
|
||||
Interval boundingEventsInterval = filteredEvents.getBoundingEventsInterval();
|
||||
advance(filteredEvents.getRequestedZoomParamters().get().withTimeRange(boundingEventsInterval));
|
||||
advance(filteredEvents.zoomParametersProperty().get().withTimeRange(boundingEventsInterval));
|
||||
}
|
||||
|
||||
/**
|
||||
@ -287,7 +304,6 @@ public class TimeLineController {
|
||||
//TODO: verify this locking is correct? -jm
|
||||
synchronized (eventsRepository) {
|
||||
eventsRepository.rebuildRepository(() -> {
|
||||
|
||||
synchronized (eventsRepository) {
|
||||
eventsRepository.recordLastObjID(lastObjId);
|
||||
eventsRepository.recordLastArtifactID(lastArtfID);
|
||||
@ -303,7 +319,7 @@ public class TimeLineController {
|
||||
Platform.runLater(() -> {
|
||||
//TODO: should this be an event?
|
||||
newEventsFlag.set(false);
|
||||
historyManager.reset(filteredEvents.getRequestedZoomParamters().get());
|
||||
historyManager.reset(filteredEvents.zoomParametersProperty().get());
|
||||
TimeLineController.this.showFullRange();
|
||||
});
|
||||
});
|
||||
@ -315,6 +331,28 @@ public class TimeLineController {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Since tags might have changed while TimeLine wasn't listening, drop the
|
||||
* tags table and rebuild it by querying for all the tags and inserting them
|
||||
* in to the TimeLine DB.
|
||||
*/
|
||||
void rebuildTagsTable() {
|
||||
LOGGER.log(Level.INFO, "starting to rebuild tags table"); // NON-NLS
|
||||
SwingUtilities.invokeLater(() -> {
|
||||
if (isWindowOpen()) {
|
||||
mainFrame.close();
|
||||
}
|
||||
});
|
||||
synchronized (eventsRepository) {
|
||||
eventsRepository.rebuildTags(() -> {
|
||||
showWindow();
|
||||
Platform.runLater(() -> {
|
||||
showFullRange();
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
public void showFullRange() {
|
||||
synchronized (filteredEvents) {
|
||||
pushTimeRange(filteredEvents.getSpanningInterval());
|
||||
@ -337,7 +375,6 @@ public class TimeLineController {
|
||||
* show the timeline window and prompt for rebuilding database if necessary.
|
||||
*/
|
||||
synchronized void openTimeLine() {
|
||||
|
||||
// listen for case changes (specifically images being added, and case changes).
|
||||
if (Case.isCaseOpen() && !listeningToAutopsy) {
|
||||
IngestManager.getInstance().addIngestModuleEventListener(ingestModuleListener);
|
||||
@ -347,13 +384,16 @@ public class TimeLineController {
|
||||
}
|
||||
|
||||
try {
|
||||
boolean repoRebuilt = false; //has the repo been rebuilt
|
||||
long timeLineLastObjectId = eventsRepository.getLastObjID();
|
||||
|
||||
boolean repoRebuilt = false;
|
||||
//if the repo is empty rebuild it
|
||||
if (timeLineLastObjectId == -1) {
|
||||
repoRebuilt = rebuildRepo();
|
||||
}
|
||||
|
||||
if (repoRebuilt == false) {
|
||||
//if ingest was running uring last rebuild, prompt to rebuild
|
||||
if (eventsRepository.getWasIngestRunning()) {
|
||||
if (confirmLastBuiltDuringIngestRebuild()) {
|
||||
repoRebuilt = rebuildRepo();
|
||||
@ -363,6 +403,7 @@ public class TimeLineController {
|
||||
|
||||
if (repoRebuilt == false) {
|
||||
final SleuthkitCase sleuthkitCase = autoCase.getSleuthkitCase();
|
||||
//if the last artifact and object ids don't match between skc and tldb, prompt to rebuild
|
||||
if (sleuthkitCase.getLastObjectId() != timeLineLastObjectId
|
||||
|| getCaseLastArtifactID(sleuthkitCase) != eventsRepository.getLastArtfactID()) {
|
||||
if (confirmOutOfDateRebuild()) {
|
||||
@ -372,8 +413,8 @@ public class TimeLineController {
|
||||
}
|
||||
|
||||
if (repoRebuilt == false) {
|
||||
boolean hasDSInfo = eventsRepository.hasDataSourceInfo();
|
||||
if (hasDSInfo == false) {
|
||||
// if the TLDB schema has been upgraded since last time TL ran, prompt for rebuild
|
||||
if (eventsRepository.hasNewColumns() == false) {
|
||||
if (confirmDataSourceIDsMissingRebuild()) {
|
||||
repoRebuilt = rebuildRepo();
|
||||
}
|
||||
@ -381,12 +422,11 @@ public class TimeLineController {
|
||||
}
|
||||
|
||||
/*
|
||||
* if the repo was not rebuilt show the UI. If the repo was rebuild
|
||||
* it will be displayed as part of that process
|
||||
* if the repo was not rebuilt at minimum rebuild the tags which may
|
||||
* have been updated without our knowing it.
|
||||
*/
|
||||
if (repoRebuilt == false) {
|
||||
showWindow();
|
||||
showFullRange();
|
||||
rebuildTagsTable();
|
||||
}
|
||||
|
||||
} catch (TskCoreException ex) {
|
||||
@ -419,13 +459,13 @@ public class TimeLineController {
|
||||
*/
|
||||
synchronized public void pushPeriod(ReadablePeriod period) {
|
||||
synchronized (filteredEvents) {
|
||||
final DateTime middleOf = IntervalUtils.middleOf(filteredEvents.timeRange().get());
|
||||
final DateTime middleOf = IntervalUtils.middleOf(filteredEvents.timeRangeProperty().get());
|
||||
pushTimeRange(IntervalUtils.getIntervalAround(middleOf, period));
|
||||
}
|
||||
}
|
||||
|
||||
synchronized public void pushZoomOutTime() {
|
||||
final Interval timeRange = filteredEvents.timeRange().get();
|
||||
final Interval timeRange = filteredEvents.timeRangeProperty().get();
|
||||
long toDurationMillis = timeRange.toDurationMillis() / 4;
|
||||
DateTime start = timeRange.getStart().minus(toDurationMillis);
|
||||
DateTime end = timeRange.getEnd().plus(toDurationMillis);
|
||||
@ -433,7 +473,7 @@ public class TimeLineController {
|
||||
}
|
||||
|
||||
synchronized public void pushZoomInTime() {
|
||||
final Interval timeRange = filteredEvents.timeRange().get();
|
||||
final Interval timeRange = filteredEvents.timeRangeProperty().get();
|
||||
long toDurationMillis = timeRange.toDurationMillis() / 4;
|
||||
DateTime start = timeRange.getStart().plus(toDurationMillis);
|
||||
DateTime end = timeRange.getEnd().minus(toDurationMillis);
|
||||
@ -481,7 +521,7 @@ public class TimeLineController {
|
||||
if (mainFrame == null) {
|
||||
LOGGER.log(Level.WARNING, "Tried to show timeline with invalid window. Rebuilding GUI."); // NON-NLS
|
||||
mainFrame = (TimeLineTopComponent) WindowManager.getDefault().findTopComponent(
|
||||
NbBundle.getMessage(TimeLineTopComponent.class, "CTL_TimeLineTopComponentAction"));
|
||||
NbBundle.getMessage(TimeLineController.class, "CTL_TimeLineTopComponentAction"));
|
||||
if (mainFrame == null) {
|
||||
mainFrame = new TimeLineTopComponent();
|
||||
}
|
||||
@ -495,7 +535,7 @@ public class TimeLineController {
|
||||
}
|
||||
|
||||
synchronized public void pushEventTypeZoom(EventTypeZoomLevel typeZoomeLevel) {
|
||||
ZoomParams currentZoom = filteredEvents.getRequestedZoomParamters().get();
|
||||
ZoomParams currentZoom = filteredEvents.zoomParametersProperty().get();
|
||||
if (currentZoom == null) {
|
||||
advance(InitialZoomState.withTypeZoomLevel(typeZoomeLevel));
|
||||
} else if (currentZoom.hasTypeZoomLevel(typeZoomeLevel) == false) {
|
||||
@ -505,7 +545,7 @@ public class TimeLineController {
|
||||
|
||||
synchronized public void pushTimeRange(Interval timeRange) {
|
||||
// timeRange = this.filteredEvents.getSpanningInterval().overlap(timeRange);
|
||||
ZoomParams currentZoom = filteredEvents.getRequestedZoomParamters().get();
|
||||
ZoomParams currentZoom = filteredEvents.zoomParametersProperty().get();
|
||||
if (currentZoom == null) {
|
||||
advance(InitialZoomState.withTimeRange(timeRange));
|
||||
} else if (currentZoom.hasTimeRange(timeRange) == false) {
|
||||
@ -513,26 +553,27 @@ public class TimeLineController {
|
||||
}
|
||||
}
|
||||
|
||||
@NbBundle.Messages({"# {0} - the number of events",
|
||||
"Timeline.pushDescrLOD.confdlg.msg=You are about to show details for {0} events."
|
||||
+ " This might be very slow or even crash Autopsy.\n\nDo you want to continue?"})
|
||||
synchronized public boolean pushDescrLOD(DescriptionLOD newLOD) {
|
||||
Map<EventType, Long> eventCounts = filteredEvents.getEventCounts(filteredEvents.getRequestedZoomParamters().get().getTimeRange());
|
||||
Map<EventType, Long> eventCounts = filteredEvents.getEventCounts(filteredEvents.zoomParametersProperty().get().getTimeRange());
|
||||
final Long count = eventCounts.values().stream().reduce(0l, Long::sum);
|
||||
|
||||
boolean shouldContinue = true;
|
||||
if (newLOD == DescriptionLOD.FULL && count > 10_000) {
|
||||
String format = NumberFormat.getInstance().format(count);
|
||||
|
||||
int showConfirmDialog = JOptionPane.showConfirmDialog(mainFrame,
|
||||
NbBundle.getMessage(this.getClass(),
|
||||
"Timeline.pushDescrLOD.confdlg.msg",
|
||||
NumberFormat.getInstance().format(count)),
|
||||
NbBundle.getMessage(TimeLineTopComponent.class,
|
||||
"Timeline.pushDescrLOD.confdlg.details"),
|
||||
Bundle.Timeline_pushDescrLOD_confdlg_msg(format),
|
||||
Bundle.Timeline_confirmation_dialogs_title(),
|
||||
JOptionPane.YES_NO_OPTION);
|
||||
|
||||
shouldContinue = (showConfirmDialog == JOptionPane.YES_OPTION);
|
||||
}
|
||||
|
||||
if (shouldContinue) {
|
||||
ZoomParams currentZoom = filteredEvents.getRequestedZoomParamters().get();
|
||||
ZoomParams currentZoom = filteredEvents.zoomParametersProperty().get();
|
||||
if (currentZoom == null) {
|
||||
advance(InitialZoomState.withDescrLOD(newLOD));
|
||||
} else if (currentZoom.hasDescrLOD(newLOD) == false) {
|
||||
@ -544,7 +585,7 @@ public class TimeLineController {
|
||||
|
||||
synchronized public void pushTimeAndType(Interval timeRange, EventTypeZoomLevel typeZoom) {
|
||||
// timeRange = this.filteredEvents.getSpanningInterval().overlap(timeRange);
|
||||
ZoomParams currentZoom = filteredEvents.getRequestedZoomParamters().get();
|
||||
ZoomParams currentZoom = filteredEvents.zoomParametersProperty().get();
|
||||
if (currentZoom == null) {
|
||||
advance(InitialZoomState.withTimeAndType(timeRange, typeZoom));
|
||||
} else if (currentZoom.hasTimeRange(timeRange) == false && currentZoom.hasTypeZoomLevel(typeZoom) == false) {
|
||||
@ -557,7 +598,7 @@ public class TimeLineController {
|
||||
}
|
||||
|
||||
synchronized public void pushFilters(RootFilter filter) {
|
||||
ZoomParams currentZoom = filteredEvents.getRequestedZoomParamters().get();
|
||||
ZoomParams currentZoom = filteredEvents.zoomParametersProperty().get();
|
||||
if (currentZoom == null) {
|
||||
advance(InitialZoomState.withFilter(filter.copyOf()));
|
||||
} else if (currentZoom.hasFilter(filter) == false) {
|
||||
@ -565,17 +606,17 @@ public class TimeLineController {
|
||||
}
|
||||
}
|
||||
|
||||
synchronized public ZoomParams advance() {
|
||||
return historyManager.advance();
|
||||
|
||||
synchronized public void advance() {
|
||||
historyManager.advance();
|
||||
}
|
||||
|
||||
synchronized public ZoomParams retreat() {
|
||||
return historyManager.retreat();
|
||||
synchronized public void retreat() {
|
||||
historyManager.retreat();
|
||||
}
|
||||
|
||||
synchronized private void advance(ZoomParams newState) {
|
||||
historyManager.advance(newState);
|
||||
|
||||
}
|
||||
|
||||
public void selectTimeAndType(Interval interval, EventType type) {
|
||||
@ -683,16 +724,36 @@ public class TimeLineController {
|
||||
return mainFrame != null && mainFrame.isOpened() && mainFrame.isVisible();
|
||||
}
|
||||
|
||||
/**
|
||||
* prompt the user to rebuild the db because the db is out of date and
|
||||
* doesn't include things from subsequent ingests ONLY IF THE TIMELINE
|
||||
* WINDOW IS OPEN
|
||||
*
|
||||
* @return true if they agree to rebuild
|
||||
*/
|
||||
@ThreadConfined(type = ThreadConfined.ThreadType.AWT)
|
||||
private void confirmOutOfDateRebuildIfWindowOpen() throws MissingResourceException, HeadlessException {
|
||||
if (isWindowOpen()) {
|
||||
if (confirmOutOfDateRebuild()) {
|
||||
rebuildRepo();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* prompt the user to rebuild the db because that datasource_ids are missing
|
||||
* from the database and that the datasource filter will not work
|
||||
*
|
||||
* @return true if they agree to rebuild
|
||||
*/
|
||||
@ThreadConfined(type = ThreadConfined.ThreadType.AWT)
|
||||
@NbBundle.Messages({"datasource.missing.confirmation=The Timeline events database was previously populated with an old version of Autopsy."
|
||||
+ "\nThe data source filter will be unavailable unless you update the events database."
|
||||
+ "\nDo you want to update the events database now?"})
|
||||
synchronized boolean confirmDataSourceIDsMissingRebuild() {
|
||||
return JOptionPane.showConfirmDialog(mainFrame,
|
||||
NbBundle.getMessage(TimeLineController.class, "datasource.missing.confirmation"),
|
||||
"Update Timeline database?",
|
||||
Bundle.datasource_missing_confirmation(),
|
||||
Bundle.Timeline_confirmation_dialogs_title(),
|
||||
JOptionPane.YES_NO_OPTION,
|
||||
JOptionPane.QUESTION_MESSAGE) == JOptionPane.YES_OPTION;
|
||||
}
|
||||
@ -703,11 +764,14 @@ public class TimeLineController {
|
||||
*
|
||||
* @return true if they agree to rebuild
|
||||
*/
|
||||
@ThreadConfined(type = ThreadConfined.ThreadType.AWT)
|
||||
@NbBundle.Messages({"Timeline.do_repopulate.msg=The Timeline events database was previously populated while ingest was running."
|
||||
+ "\nSome events may not have been populated or may have been populated inaccurately."
|
||||
+ "\nDo you want to repopulate the events database now?"})
|
||||
synchronized boolean confirmLastBuiltDuringIngestRebuild() {
|
||||
return JOptionPane.showConfirmDialog(mainFrame,
|
||||
DO_REPOPULATE_MESSAGE,
|
||||
NbBundle.getMessage(TimeLineTopComponent.class,
|
||||
"Timeline.showLastPopulatedWhileIngestingConf.confDlg.details"),
|
||||
Bundle.Timeline_do_repopulate_msg(),
|
||||
Bundle.Timeline_confirmation_dialogs_title(),
|
||||
JOptionPane.YES_NO_OPTION,
|
||||
JOptionPane.QUESTION_MESSAGE) == JOptionPane.YES_OPTION;
|
||||
}
|
||||
@ -718,12 +782,12 @@ public class TimeLineController {
|
||||
*
|
||||
* @return true if they agree to rebuild
|
||||
*/
|
||||
@ThreadConfined(type = ThreadConfined.ThreadType.AWT)
|
||||
@NbBundle.Messages({"Timeline.propChg.confDlg.timelineOOD.msg=The event data is out of date. Would you like to regenerate it?",})
|
||||
synchronized boolean confirmOutOfDateRebuild() throws MissingResourceException, HeadlessException {
|
||||
return JOptionPane.showConfirmDialog(mainFrame,
|
||||
NbBundle.getMessage(TimeLineController.class,
|
||||
"Timeline.propChg.confDlg.timelineOOD.msg"),
|
||||
NbBundle.getMessage(TimeLineController.class,
|
||||
"Timeline.propChg.confDlg.timelineOOD.details"),
|
||||
Bundle.Timeline_propChg_confDlg_timelineOOD_msg(),
|
||||
Bundle.Timeline_confirmation_dialogs_title(),
|
||||
JOptionPane.YES_NO_OPTION) == JOptionPane.YES_OPTION;
|
||||
}
|
||||
|
||||
@ -733,12 +797,13 @@ public class TimeLineController {
|
||||
*
|
||||
* @return true if they want to continue anyways
|
||||
*/
|
||||
@ThreadConfined(type = ThreadConfined.ThreadType.AWT)
|
||||
@NbBundle.Messages({"Timeline.initTimeline.confDlg.genBeforeIngest.msg=You are trying to generate a timeline before ingest has been completed. "
|
||||
+ "The timeline may be incomplete. Do you want to continue?"})
|
||||
synchronized boolean confirmRebuildDuringIngest() throws MissingResourceException, HeadlessException {
|
||||
return JOptionPane.showConfirmDialog(mainFrame,
|
||||
NbBundle.getMessage(TimeLineController.class,
|
||||
"Timeline.initTimeline.confDlg.genBeforeIngest.msg"),
|
||||
NbBundle.getMessage(TimeLineController.class,
|
||||
"Timeline.initTimeline.confDlg.genBeforeIngest.details"),
|
||||
Bundle.Timeline_initTimeline_confDlg_genBeforeIngest_msg(),
|
||||
Bundle.Timeline_confirmation_dialogs_title(),
|
||||
JOptionPane.YES_NO_OPTION) == JOptionPane.YES_OPTION;
|
||||
}
|
||||
|
||||
@ -748,16 +813,9 @@ public class TimeLineController {
|
||||
public void propertyChange(PropertyChangeEvent evt) {
|
||||
switch (IngestManager.IngestModuleEvent.valueOf(evt.getPropertyName())) {
|
||||
case CONTENT_CHANGED:
|
||||
// ((ModuleContentEvent)evt.getOldValue())????
|
||||
//ModuleContentEvent doesn't seem to provide any usefull information...
|
||||
break;
|
||||
case DATA_ADDED:
|
||||
// Collection<BlackboardArtifact> artifacts = ((ModuleDataEvent) evt.getOldValue()).getArtifacts();
|
||||
//new artifacts, insert them into db
|
||||
break;
|
||||
case FILE_DONE:
|
||||
// Long fileID = (Long) evt.getOldValue();
|
||||
//update file (known status) for file with id
|
||||
Platform.runLater(() -> {
|
||||
newEventsFlag.set(true);
|
||||
});
|
||||
@ -774,14 +832,7 @@ public class TimeLineController {
|
||||
switch (IngestManager.IngestJobEvent.valueOf(evt.getPropertyName())) {
|
||||
case CANCELLED:
|
||||
case COMPLETED:
|
||||
//if we are doing incremental updates, drop this
|
||||
SwingUtilities.invokeLater(() -> {
|
||||
if (isWindowOpen()) {
|
||||
if (confirmOutOfDateRebuild()) {
|
||||
rebuildRepo();
|
||||
}
|
||||
}
|
||||
});
|
||||
SwingUtilities.invokeLater(TimeLineController.this::confirmOutOfDateRebuildIfWindowOpen);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -793,27 +844,27 @@ public class TimeLineController {
|
||||
public void propertyChange(PropertyChangeEvent evt) {
|
||||
switch (Case.Events.valueOf(evt.getPropertyName())) {
|
||||
case BLACKBOARD_ARTIFACT_TAG_ADDED:
|
||||
filteredEvents.handleTagAdded((BlackBoardArtifactTagAddedEvent) evt);
|
||||
executor.submit(() -> {
|
||||
filteredEvents.handleArtifactTagAdded((BlackBoardArtifactTagAddedEvent) evt);
|
||||
});
|
||||
break;
|
||||
case BLACKBOARD_ARTIFACT_TAG_DELETED:
|
||||
filteredEvents.handleTagDeleted((BlackBoardArtifactTagDeletedEvent) evt);
|
||||
executor.submit(() -> {
|
||||
filteredEvents.handleArtifactTagDeleted((BlackBoardArtifactTagDeletedEvent) evt);
|
||||
});
|
||||
break;
|
||||
case CONTENT_TAG_ADDED:
|
||||
filteredEvents.handleTagAdded((ContentTagAddedEvent) evt);
|
||||
executor.submit(() -> {
|
||||
filteredEvents.handleContentTagAdded((ContentTagAddedEvent) evt);
|
||||
});
|
||||
break;
|
||||
case CONTENT_TAG_DELETED:
|
||||
filteredEvents.handleTagDeleted((ContentTagDeletedEvent) evt);
|
||||
executor.submit(() -> {
|
||||
filteredEvents.handleContentTagDeleted((ContentTagDeletedEvent) evt);
|
||||
});
|
||||
break;
|
||||
case DATA_SOURCE_ADDED:
|
||||
// Content content = (Content) evt.getNewValue();
|
||||
//if we are doing incremental updates, drop this
|
||||
SwingUtilities.invokeLater(() -> {
|
||||
if (isWindowOpen()) {
|
||||
if (confirmOutOfDateRebuild()) {
|
||||
rebuildRepo();
|
||||
}
|
||||
}
|
||||
});
|
||||
SwingUtilities.invokeLater(TimeLineController.this::confirmOutOfDateRebuildIfWindowOpen);
|
||||
break;
|
||||
case CURRENT_CASE:
|
||||
OpenTimelineAction.invalidateController();
|
||||
|
@ -24,7 +24,7 @@ package org.sleuthkit.autopsy.timeline;
|
||||
* Most implementations should install the relevant listeners in their
|
||||
* {@link #setController} and {@link #setModel} methods
|
||||
*/
|
||||
import org.sleuthkit.autopsy.timeline.events.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel;
|
||||
|
||||
public interface TimeLineView extends TimeLineUI {
|
||||
|
||||
|
@ -23,7 +23,7 @@ import javafx.event.ActionEvent;
|
||||
import org.controlsfx.control.action.Action;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.timeline.TimeLineController;
|
||||
import org.sleuthkit.autopsy.timeline.events.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel;
|
||||
|
||||
/**
|
||||
* Action that resets the filters to their initial/default state.
|
||||
@ -37,12 +37,12 @@ public class ResetFilters extends Action {
|
||||
eventsModel = controller.getEventsModel();
|
||||
disabledProperty().bind(new BooleanBinding() {
|
||||
{
|
||||
bind(eventsModel.getRequestedZoomParamters());
|
||||
bind(eventsModel.zoomParametersProperty());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean computeValue() {
|
||||
return eventsModel.getRequestedZoomParamters().getValue().getFilter().equals(eventsModel.getDefaultFilter());
|
||||
return eventsModel.zoomParametersProperty().getValue().getFilter().equals(eventsModel.getDefaultFilter());
|
||||
}
|
||||
});
|
||||
setEventHandler((ActionEvent t) -> {
|
||||
|
@ -82,9 +82,9 @@ public class SaveSnapshot extends Action {
|
||||
|
||||
reportMetaData.add(new Pair<>("Case", Case.getCurrentCase().getName())); // NON-NLS
|
||||
|
||||
ZoomParams get = controller.getEventsModel().getRequestedZoomParamters().get();
|
||||
ZoomParams get = controller.getEventsModel().zoomParametersProperty().get();
|
||||
reportMetaData.add(new Pair<>("Time Range", get.getTimeRange().toString())); // NON-NLS
|
||||
reportMetaData.add(new Pair<>("Description Level of Detail", get.getDescrLOD().getDisplayName())); // NON-NLS
|
||||
reportMetaData.add(new Pair<>("Description Level of Detail", get.getDescriptionLOD().getDisplayName())); // NON-NLS
|
||||
reportMetaData.add(new Pair<>("Event Type Zoom Level", get.getTypeZoomLevel().getDisplayName())); // NON-NLS
|
||||
reportMetaData.add(new Pair<>("Filters", get.getFilter().getHTMLReportString())); // NON-NLS
|
||||
|
||||
|
@ -23,7 +23,7 @@ import javafx.event.ActionEvent;
|
||||
import org.controlsfx.control.action.Action;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.timeline.TimeLineController;
|
||||
import org.sleuthkit.autopsy.timeline.events.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -40,12 +40,12 @@ public class ZoomOut extends Action {
|
||||
eventsModel = controller.getEventsModel();
|
||||
disabledProperty().bind(new BooleanBinding() {
|
||||
{
|
||||
bind(eventsModel.getRequestedZoomParamters());
|
||||
bind(eventsModel.zoomParametersProperty());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean computeValue() {
|
||||
return eventsModel.getRequestedZoomParamters().getValue().getTimeRange().contains(eventsModel.getSpanningInterval());
|
||||
return eventsModel.zoomParametersProperty().getValue().getTimeRange().contains(eventsModel.getSpanningInterval());
|
||||
}
|
||||
});
|
||||
setEventHandler((ActionEvent t) -> {
|
||||
|
@ -16,14 +16,14 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.events;
|
||||
package org.sleuthkit.autopsy.timeline.datamodel;
|
||||
|
||||
import com.google.common.collect.Sets;
|
||||
import java.util.Collections;
|
||||
import java.util.Set;
|
||||
import javax.annotation.concurrent.Immutable;
|
||||
import org.joda.time.Interval;
|
||||
import org.sleuthkit.autopsy.timeline.events.type.EventType;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.EventType;
|
||||
import org.sleuthkit.autopsy.timeline.utils.IntervalUtils;
|
||||
import org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD;
|
||||
|
||||
@ -138,44 +138,4 @@ public class AggregateEvent {
|
||||
|
||||
return new AggregateEvent(IntervalUtils.span(aggEvent1.span, ag2.span), aggEvent1.getType(), idsUnion, hashHitsUnion, taggedUnion, aggEvent1.getDescription(), aggEvent1.lod);
|
||||
}
|
||||
|
||||
/**
|
||||
* get an AggregateEvent the same as this one but with the given eventIDs
|
||||
* removed from the list of tagged events
|
||||
*
|
||||
* @param unTaggedIDs
|
||||
*
|
||||
* @return a new Aggregate event that is the same as this one but with the
|
||||
* given event Ids removed from the list of tagged ids, or, this
|
||||
* AggregateEvent if no event ids would be removed
|
||||
*/
|
||||
public AggregateEvent withTagsRemoved(Set<Long> unTaggedIDs) {
|
||||
Sets.SetView<Long> stillTagged = Sets.difference(tagged, unTaggedIDs);
|
||||
if (stillTagged.size() < tagged.size()) {
|
||||
return new AggregateEvent(span, type, eventIDs, hashHits, stillTagged.immutableCopy(), description, lod);
|
||||
}
|
||||
return this; //no change
|
||||
}
|
||||
|
||||
/**
|
||||
* get an AggregateEvent the same as this one but with the given eventIDs
|
||||
* added to the list of tagged events if there are part of this Aggregate
|
||||
*
|
||||
* @param taggedIDs
|
||||
*
|
||||
* @return a new Aggregate event that is the same as this one but with the
|
||||
* given event Ids added to the list of tagged ids, or, this
|
||||
* AggregateEvent if no event ids would be added
|
||||
*/
|
||||
public AggregateEvent withTagsAdded(Set<Long> taggedIDs) {
|
||||
Sets.SetView<Long> taggedIdsInAgg = Sets.intersection(eventIDs, taggedIDs);//events that are in this aggregate and (newly) marked as tagged
|
||||
if (taggedIdsInAgg.size() > 0) {
|
||||
Sets.SetView<Long> notYetIncludedTagged = Sets.difference(taggedIdsInAgg, tagged); // events that are tagged, but not already marked as tagged in this Agg
|
||||
if (notYetIncludedTagged.size() > 0) {
|
||||
return new AggregateEvent(span, type, eventIDs, hashHits, Sets.union(tagged, taggedIdsInAgg).immutableCopy(), description, lod);
|
||||
}
|
||||
}
|
||||
|
||||
return this; //no change
|
||||
}
|
||||
}
|
@ -16,7 +16,7 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.events;
|
||||
package org.sleuthkit.autopsy.timeline.datamodel;
|
||||
|
||||
import com.google.common.eventbus.EventBus;
|
||||
import java.util.Collection;
|
||||
@ -27,6 +27,7 @@ import java.util.logging.Level;
|
||||
import javafx.beans.Observable;
|
||||
import javafx.beans.property.ReadOnlyObjectProperty;
|
||||
import javafx.beans.property.ReadOnlyObjectWrapper;
|
||||
import javafx.collections.ListChangeListener;
|
||||
import javafx.collections.MapChangeListener;
|
||||
import javax.annotation.concurrent.GuardedBy;
|
||||
import org.joda.time.DateTimeZone;
|
||||
@ -38,9 +39,11 @@ import org.sleuthkit.autopsy.events.BlackBoardArtifactTagDeletedEvent;
|
||||
import org.sleuthkit.autopsy.events.ContentTagAddedEvent;
|
||||
import org.sleuthkit.autopsy.events.ContentTagDeletedEvent;
|
||||
import org.sleuthkit.autopsy.timeline.TimeLineView;
|
||||
import org.sleuthkit.autopsy.timeline.events.db.EventsRepository;
|
||||
import org.sleuthkit.autopsy.timeline.events.type.EventType;
|
||||
import org.sleuthkit.autopsy.timeline.events.type.RootEventType;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.EventType;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.RootEventType;
|
||||
import org.sleuthkit.autopsy.timeline.db.EventsRepository;
|
||||
import org.sleuthkit.autopsy.timeline.events.RefreshRequestedEvent;
|
||||
import org.sleuthkit.autopsy.timeline.events.TagsUpdatedEvent;
|
||||
import org.sleuthkit.autopsy.timeline.filters.DataSourceFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.DataSourcesFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.Filter;
|
||||
@ -48,13 +51,18 @@ import org.sleuthkit.autopsy.timeline.filters.HashHitsFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.HashSetFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.HideKnownFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.RootFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.TagNameFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.TagsFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.TextFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.TypeFilter;
|
||||
import org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD;
|
||||
import org.sleuthkit.autopsy.timeline.zooming.EventTypeZoomLevel;
|
||||
import org.sleuthkit.autopsy.timeline.zooming.ZoomParams;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifactTag;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.ContentTag;
|
||||
import org.sleuthkit.datamodel.TagName;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
/**
|
||||
@ -83,7 +91,6 @@ public final class FilteredEventsModel {
|
||||
|
||||
private static final Logger LOGGER = Logger.getLogger(FilteredEventsModel.class.getName());
|
||||
|
||||
|
||||
/**
|
||||
* time range that spans the filtered events
|
||||
*/
|
||||
@ -113,40 +120,25 @@ public final class FilteredEventsModel {
|
||||
private final EventsRepository repo;
|
||||
private final Case autoCase;
|
||||
|
||||
/**
|
||||
* @return the default filter used at startup
|
||||
*/
|
||||
public RootFilter getDefaultFilter() {
|
||||
DataSourcesFilter dataSourcesFilter = new DataSourcesFilter();
|
||||
|
||||
repo.getDatasourcesMap().entrySet().stream().forEach((Map.Entry<Long, String> t) -> {
|
||||
DataSourceFilter dataSourceFilter = new DataSourceFilter(t.getValue(), t.getKey());
|
||||
dataSourceFilter.setSelected(Boolean.TRUE);
|
||||
dataSourcesFilter.addDataSourceFilter(dataSourceFilter);
|
||||
});
|
||||
|
||||
HashHitsFilter hashHitsFilter = new HashHitsFilter();
|
||||
repo.getHashSetMap().entrySet().stream().forEach((Map.Entry<Long, String> t) -> {
|
||||
HashSetFilter hashSourceFilter = new HashSetFilter(t.getValue(), t.getKey());
|
||||
hashSourceFilter.setSelected(Boolean.TRUE);
|
||||
hashHitsFilter.addHashSetFilter(hashSourceFilter);
|
||||
});
|
||||
return new RootFilter(new HideKnownFilter(), hashHitsFilter, new TextFilter(), new TypeFilter(RootEventType.getInstance()), dataSourcesFilter);
|
||||
}
|
||||
|
||||
public FilteredEventsModel(EventsRepository repo, ReadOnlyObjectProperty<ZoomParams> currentStateProperty) {
|
||||
this.repo = repo;
|
||||
this.autoCase = repo.getAutoCase();
|
||||
repo.getDatasourcesMap().addListener((MapChangeListener.Change<? extends Long, ? extends String> change) -> {
|
||||
DataSourceFilter dataSourceFilter = new DataSourceFilter(change.getValueAdded(), change.getKey());
|
||||
RootFilter rootFilter = filter().get();
|
||||
rootFilter.getDataSourcesFilter().addDataSourceFilter(dataSourceFilter);
|
||||
RootFilter rootFilter = filterProperty().get();
|
||||
rootFilter.getDataSourcesFilter().addSubFilter(dataSourceFilter);
|
||||
requestedFilter.set(rootFilter.copyOf());
|
||||
});
|
||||
repo.getHashSetMap().addListener((MapChangeListener.Change<? extends Long, ? extends String> change) -> {
|
||||
HashSetFilter hashSetFilter = new HashSetFilter(change.getValueAdded(), change.getKey());
|
||||
RootFilter rootFilter = filter().get();
|
||||
rootFilter.getHashHitsFilter().addHashSetFilter(hashSetFilter);
|
||||
RootFilter rootFilter = filterProperty().get();
|
||||
rootFilter.getHashHitsFilter().addSubFilter(hashSetFilter);
|
||||
requestedFilter.set(rootFilter.copyOf());
|
||||
});
|
||||
repo.getTagNames().addListener((ListChangeListener.Change<? extends TagName> c) -> {
|
||||
RootFilter rootFilter = filterProperty().get();
|
||||
TagsFilter tagsFilter = rootFilter.getTagsFilter();
|
||||
repo.syncTagsFilter(tagsFilter);
|
||||
requestedFilter.set(rootFilter.copyOf());
|
||||
});
|
||||
requestedFilter.set(getDefaultFilter());
|
||||
@ -156,14 +148,14 @@ public final class FilteredEventsModel {
|
||||
|
||||
if (zoomParams != null) {
|
||||
if (zoomParams.getTypeZoomLevel().equals(requestedTypeZoom.get()) == false
|
||||
|| zoomParams.getDescrLOD().equals(requestedLOD.get()) == false
|
||||
|| zoomParams.getDescriptionLOD().equals(requestedLOD.get()) == false
|
||||
|| zoomParams.getFilter().equals(requestedFilter.get()) == false
|
||||
|| zoomParams.getTimeRange().equals(requestedTimeRange.get()) == false) {
|
||||
|
||||
requestedTypeZoom.set(zoomParams.getTypeZoomLevel());
|
||||
requestedFilter.set(zoomParams.getFilter().copyOf());
|
||||
requestedTimeRange.set(zoomParams.getTimeRange());
|
||||
requestedLOD.set(zoomParams.getDescrLOD());
|
||||
requestedLOD.set(zoomParams.getDescriptionLOD());
|
||||
}
|
||||
}
|
||||
});
|
||||
@ -171,12 +163,75 @@ public final class FilteredEventsModel {
|
||||
requestedZoomParamters.bind(currentStateProperty);
|
||||
}
|
||||
|
||||
public Interval getBoundingEventsInterval() {
|
||||
return repo.getBoundingEventsInterval(getRequestedZoomParamters().get().getTimeRange(), getRequestedZoomParamters().get().getFilter());
|
||||
synchronized public ReadOnlyObjectProperty<ZoomParams> zoomParametersProperty() {
|
||||
return requestedZoomParamters.getReadOnlyProperty();
|
||||
}
|
||||
|
||||
synchronized public ReadOnlyObjectProperty<ZoomParams> getRequestedZoomParamters() {
|
||||
return requestedZoomParamters.getReadOnlyProperty();
|
||||
/**
|
||||
* @return a read only view of the time range requested via
|
||||
* {@link #requestTimeRange(org.joda.time.Interval)}
|
||||
*/
|
||||
synchronized public ReadOnlyObjectProperty<Interval> timeRangeProperty() {
|
||||
if (requestedTimeRange.get() == null) {
|
||||
requestedTimeRange.set(getSpanningInterval());
|
||||
}
|
||||
return requestedTimeRange.getReadOnlyProperty();
|
||||
}
|
||||
|
||||
synchronized public ReadOnlyObjectProperty<DescriptionLOD> descriptionLODProperty() {
|
||||
return requestedLOD.getReadOnlyProperty();
|
||||
}
|
||||
|
||||
synchronized public ReadOnlyObjectProperty<RootFilter> filterProperty() {
|
||||
return requestedFilter.getReadOnlyProperty();
|
||||
}
|
||||
|
||||
synchronized public ReadOnlyObjectProperty<EventTypeZoomLevel> eventTypeZoomProperty() {
|
||||
return requestedTypeZoom.getReadOnlyProperty();
|
||||
}
|
||||
|
||||
synchronized public DescriptionLOD getDescriptionLOD() {
|
||||
return requestedLOD.get();
|
||||
}
|
||||
|
||||
synchronized public RootFilter getFilter() {
|
||||
return requestedFilter.get();
|
||||
}
|
||||
|
||||
synchronized public EventTypeZoomLevel getEventTypeZoom() {
|
||||
return requestedTypeZoom.get();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the default filter used at startup
|
||||
*/
|
||||
public RootFilter getDefaultFilter() {
|
||||
DataSourcesFilter dataSourcesFilter = new DataSourcesFilter();
|
||||
|
||||
repo.getDatasourcesMap().entrySet().stream().forEach((Map.Entry<Long, String> t) -> {
|
||||
DataSourceFilter dataSourceFilter = new DataSourceFilter(t.getValue(), t.getKey());
|
||||
dataSourceFilter.setSelected(Boolean.TRUE);
|
||||
dataSourcesFilter.addSubFilter(dataSourceFilter);
|
||||
});
|
||||
|
||||
HashHitsFilter hashHitsFilter = new HashHitsFilter();
|
||||
repo.getHashSetMap().entrySet().stream().forEach((Map.Entry<Long, String> t) -> {
|
||||
HashSetFilter hashSetFilter = new HashSetFilter(t.getValue(), t.getKey());
|
||||
hashSetFilter.setSelected(Boolean.TRUE);
|
||||
hashHitsFilter.addSubFilter(hashSetFilter);
|
||||
});
|
||||
|
||||
TagsFilter tagsFilter = new TagsFilter();
|
||||
repo.getTagNames().stream().forEach(t -> {
|
||||
TagNameFilter tagNameFilter = new TagNameFilter(t, autoCase);
|
||||
tagNameFilter.setSelected(Boolean.TRUE);
|
||||
tagsFilter.addSubFilter(tagNameFilter);
|
||||
});
|
||||
return new RootFilter(new HideKnownFilter(), tagsFilter, hashHitsFilter, new TextFilter(), new TypeFilter(RootEventType.getInstance()), dataSourcesFilter);
|
||||
}
|
||||
|
||||
public Interval getBoundingEventsInterval() {
|
||||
return repo.getBoundingEventsInterval(zoomParametersProperty().get().getTimeRange(), zoomParametersProperty().get().getFilter());
|
||||
}
|
||||
|
||||
public TimeLineEvent getEventById(Long eventID) {
|
||||
@ -187,6 +242,18 @@ public final class FilteredEventsModel {
|
||||
return repo.getEventsById(eventIDs);
|
||||
}
|
||||
|
||||
/**
|
||||
* get a count of tagnames applied to the given event ids as a map from
|
||||
* tagname displayname to count of tag applications
|
||||
*
|
||||
* @param eventIDsWithTags the event ids to get the tag counts map for
|
||||
*
|
||||
* @return a map from tagname displayname to count of applications
|
||||
*/
|
||||
public Map<String, Long> getTagCountsByTagName(Set<Long> eventIDsWithTags) {
|
||||
return repo.getTagCountsByTagName(eventIDsWithTags);
|
||||
}
|
||||
|
||||
public Set<Long> getEventIDs(Interval timeRange, Filter filter) {
|
||||
final Interval overlap;
|
||||
final RootFilter intersect;
|
||||
@ -219,25 +286,6 @@ public final class FilteredEventsModel {
|
||||
return repo.countEvents(new ZoomParams(timeRange, typeZoom, filter, null));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return a read only view of the time range requested via
|
||||
* {@link #requestTimeRange(org.joda.time.Interval)}
|
||||
*/
|
||||
synchronized public ReadOnlyObjectProperty<Interval> timeRange() {
|
||||
if (requestedTimeRange.get() == null) {
|
||||
requestedTimeRange.set(getSpanningInterval());
|
||||
}
|
||||
return requestedTimeRange.getReadOnlyProperty();
|
||||
}
|
||||
|
||||
synchronized public ReadOnlyObjectProperty<DescriptionLOD> descriptionLOD() {
|
||||
return requestedLOD.getReadOnlyProperty();
|
||||
}
|
||||
|
||||
synchronized public ReadOnlyObjectProperty<RootFilter> filter() {
|
||||
return requestedFilter.getReadOnlyProperty();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the smallest interval spanning all the events from the
|
||||
* repository, ignoring any filters or requested ranges
|
||||
@ -303,58 +351,52 @@ public final class FilteredEventsModel {
|
||||
return repo.getAggregatedEvents(params);
|
||||
}
|
||||
|
||||
synchronized public ReadOnlyObjectProperty<EventTypeZoomLevel> eventTypeZoom() {
|
||||
return requestedTypeZoom.getReadOnlyProperty();
|
||||
synchronized public boolean handleContentTagAdded(ContentTagAddedEvent evt) {
|
||||
ContentTag contentTag = evt.getTag();
|
||||
Content content = contentTag.getContent();
|
||||
Set<Long> updatedEventIDs = repo.addTag(content.getId(), null, contentTag);
|
||||
return postTagsUpdated(updatedEventIDs);
|
||||
}
|
||||
|
||||
synchronized public EventTypeZoomLevel getEventTypeZoom() {
|
||||
return requestedTypeZoom.get();
|
||||
synchronized public boolean handleArtifactTagAdded(BlackBoardArtifactTagAddedEvent evt) {
|
||||
BlackboardArtifactTag artifactTag = evt.getTag();
|
||||
BlackboardArtifact artifact = artifactTag.getArtifact();
|
||||
Set<Long> updatedEventIDs = repo.addTag(artifact.getObjectID(), artifact.getArtifactID(), artifactTag);;
|
||||
return postTagsUpdated(updatedEventIDs);
|
||||
}
|
||||
|
||||
synchronized public DescriptionLOD getDescriptionLOD() {
|
||||
return requestedLOD.get();
|
||||
}
|
||||
|
||||
synchronized public void handleTagAdded(BlackBoardArtifactTagAddedEvent e) {
|
||||
BlackboardArtifact artifact = e.getTag().getArtifact();
|
||||
Set<Long> updatedEventIDs = repo.markEventsTagged(artifact.getObjectID(), artifact.getArtifactID(), true);
|
||||
if (!updatedEventIDs.isEmpty()) {
|
||||
eventbus.post(new EventsTaggedEvent(updatedEventIDs));
|
||||
}
|
||||
}
|
||||
|
||||
synchronized public void handleTagDeleted(BlackBoardArtifactTagDeletedEvent e) {
|
||||
BlackboardArtifact artifact = e.getTag().getArtifact();
|
||||
try {
|
||||
boolean tagged = autoCase.getServices().getTagsManager().getBlackboardArtifactTagsByArtifact(artifact).isEmpty() == false;
|
||||
Set<Long> updatedEventIDs = repo.markEventsTagged(artifact.getObjectID(), artifact.getArtifactID(), tagged);
|
||||
if (!updatedEventIDs.isEmpty()) {
|
||||
eventbus.post(new EventsUnTaggedEvent(updatedEventIDs));
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
LOGGER.log(Level.SEVERE, "unable to determine tagged status of attribute.", ex);
|
||||
}
|
||||
}
|
||||
|
||||
synchronized public void handleTagAdded(ContentTagAddedEvent e) {
|
||||
Content content = e.getTag().getContent();
|
||||
Set<Long> updatedEventIDs = repo.markEventsTagged(content.getId(), null, true);
|
||||
if (!updatedEventIDs.isEmpty()) {
|
||||
eventbus.post(new EventsTaggedEvent(updatedEventIDs));
|
||||
}
|
||||
}
|
||||
|
||||
synchronized public void handleTagDeleted(ContentTagDeletedEvent e) {
|
||||
Content content = e.getTag().getContent();
|
||||
synchronized public boolean handleContentTagDeleted(ContentTagDeletedEvent evt) {
|
||||
ContentTag contentTag = evt.getTag();
|
||||
Content content = contentTag.getContent();
|
||||
try {
|
||||
boolean tagged = autoCase.getServices().getTagsManager().getContentTagsByContent(content).isEmpty() == false;
|
||||
Set<Long> updatedEventIDs = repo.markEventsTagged(content.getId(), null, tagged);
|
||||
if (!updatedEventIDs.isEmpty()) {
|
||||
eventbus.post(new EventsUnTaggedEvent(updatedEventIDs));
|
||||
}
|
||||
Set<Long> updatedEventIDs = repo.deleteTag(content.getId(), null, contentTag, tagged);
|
||||
return postTagsUpdated(updatedEventIDs);
|
||||
} catch (TskCoreException ex) {
|
||||
LOGGER.log(Level.SEVERE, "unable to determine tagged status of content.", ex);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
synchronized public boolean handleArtifactTagDeleted(BlackBoardArtifactTagDeletedEvent evt) {
|
||||
BlackboardArtifactTag artifactTag = evt.getTag();
|
||||
BlackboardArtifact artifact = artifactTag.getArtifact();
|
||||
try {
|
||||
boolean tagged = autoCase.getServices().getTagsManager().getBlackboardArtifactTagsByArtifact(artifact).isEmpty() == false;
|
||||
Set<Long> updatedEventIDs = repo.deleteTag(artifact.getObjectID(), artifact.getArtifactID(), artifactTag, tagged);
|
||||
return postTagsUpdated(updatedEventIDs);
|
||||
} catch (TskCoreException ex) {
|
||||
LOGGER.log(Level.SEVERE, "unable to determine tagged status of artifact.", ex);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private boolean postTagsUpdated(Set<Long> updatedEventIDs) {
|
||||
boolean tagsUpdated = !updatedEventIDs.isEmpty();
|
||||
if (tagsUpdated) {
|
||||
eventbus.post(new TagsUpdatedEvent(updatedEventIDs));
|
||||
}
|
||||
return tagsUpdated;
|
||||
}
|
||||
|
||||
synchronized public void registerForEvents(Object o) {
|
||||
@ -364,4 +406,9 @@ public final class FilteredEventsModel {
|
||||
synchronized public void unRegisterForEvents(Object o) {
|
||||
eventbus.unregister(0);
|
||||
}
|
||||
|
||||
public void refresh() {
|
||||
eventbus.post(new RefreshRequestedEvent());
|
||||
}
|
||||
|
||||
}
|
@ -16,47 +16,49 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.events;
|
||||
package org.sleuthkit.autopsy.timeline.datamodel;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import javax.annotation.Nullable;
|
||||
import org.sleuthkit.autopsy.timeline.events.type.EventType;
|
||||
import javax.annotation.concurrent.Immutable;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.EventType;
|
||||
import org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD;
|
||||
import org.sleuthkit.datamodel.TskData;
|
||||
|
||||
/**
|
||||
*
|
||||
* A single event.
|
||||
*/
|
||||
@Immutable
|
||||
public class TimeLineEvent {
|
||||
|
||||
private final Long eventID;
|
||||
|
||||
private final Long fileID;
|
||||
|
||||
private final Long time;
|
||||
|
||||
private final long eventID;
|
||||
private final long fileID;
|
||||
private final Long artifactID;
|
||||
private final long dataSourceID;
|
||||
|
||||
private final long time;
|
||||
private final EventType subType;
|
||||
|
||||
private final String fullDescription, medDescription, shortDescription;
|
||||
private final ImmutableMap<DescriptionLOD, String> descriptions;
|
||||
|
||||
private final TskData.FileKnown known;
|
||||
|
||||
private final boolean hashHit;
|
||||
private final boolean tagged;
|
||||
|
||||
public TimeLineEvent(Long eventID, Long objID, @Nullable Long artifactID, Long time, EventType type, String fullDescription, String medDescription, String shortDescription, TskData.FileKnown known, boolean hashHit, boolean tagged) {
|
||||
public TimeLineEvent(long eventID, long dataSourceID, long objID, @Nullable Long artifactID, long time, EventType type, String fullDescription, String medDescription, String shortDescription, TskData.FileKnown known, boolean hashHit, boolean tagged) {
|
||||
this.eventID = eventID;
|
||||
this.fileID = objID;
|
||||
this.artifactID = artifactID;
|
||||
this.artifactID = artifactID == 0 ? null : artifactID;
|
||||
this.time = time;
|
||||
this.subType = type;
|
||||
descriptions = ImmutableMap.<DescriptionLOD, String>of(
|
||||
DescriptionLOD.FULL, fullDescription,
|
||||
DescriptionLOD.MEDIUM, medDescription,
|
||||
DescriptionLOD.SHORT, shortDescription);
|
||||
|
||||
this.fullDescription = fullDescription;
|
||||
this.medDescription = medDescription;
|
||||
this.shortDescription = shortDescription;
|
||||
this.known = known;
|
||||
this.hashHit = hashHit;
|
||||
this.tagged = tagged;
|
||||
this.dataSourceID = dataSourceID;
|
||||
}
|
||||
|
||||
public boolean isTagged() {
|
||||
@ -72,18 +74,18 @@ public class TimeLineEvent {
|
||||
return artifactID;
|
||||
}
|
||||
|
||||
public Long getEventID() {
|
||||
public long getEventID() {
|
||||
return eventID;
|
||||
}
|
||||
|
||||
public Long getFileID() {
|
||||
public long getFileID() {
|
||||
return fileID;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the time in seconds from unix epoch
|
||||
*/
|
||||
public Long getTime() {
|
||||
public long getTime() {
|
||||
return time;
|
||||
}
|
||||
|
||||
@ -92,18 +94,26 @@ public class TimeLineEvent {
|
||||
}
|
||||
|
||||
public String getFullDescription() {
|
||||
return fullDescription;
|
||||
return getDescription(DescriptionLOD.FULL);
|
||||
}
|
||||
|
||||
public String getMedDescription() {
|
||||
return medDescription;
|
||||
return getDescription(DescriptionLOD.MEDIUM);
|
||||
}
|
||||
|
||||
public String getShortDescription() {
|
||||
return shortDescription;
|
||||
return getDescription(DescriptionLOD.SHORT);
|
||||
}
|
||||
|
||||
public TskData.FileKnown getKnown() {
|
||||
return known;
|
||||
}
|
||||
|
||||
public String getDescription(DescriptionLOD lod) {
|
||||
return descriptions.get(lod);
|
||||
}
|
||||
|
||||
public long getDataSourceID() {
|
||||
return dataSourceID;
|
||||
}
|
||||
}
|
@ -16,7 +16,7 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.events.type;
|
||||
package org.sleuthkit.autopsy.timeline.datamodel.eventtype;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
@ -16,7 +16,7 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.events.type;
|
||||
package org.sleuthkit.autopsy.timeline.datamodel.eventtype;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
@ -70,8 +70,9 @@ public enum BaseTypes implements EventType {
|
||||
|
||||
private final String iconBase;
|
||||
|
||||
private Image image;
|
||||
private final Image image;
|
||||
|
||||
@Override
|
||||
public Image getFXImage() {
|
||||
return image;
|
||||
}
|
@ -16,7 +16,7 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.events.type;
|
||||
package org.sleuthkit.autopsy.timeline.datamodel.eventtype;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
@ -16,7 +16,7 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.events.type;
|
||||
package org.sleuthkit.autopsy.timeline.datamodel.eventtype;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
@ -16,7 +16,7 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.events.type;
|
||||
package org.sleuthkit.autopsy.timeline.datamodel.eventtype;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
@ -16,7 +16,7 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.events.type;
|
||||
package org.sleuthkit.autopsy.timeline.datamodel.eventtype;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
@ -16,7 +16,7 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.events.type;
|
||||
package org.sleuthkit.autopsy.timeline.datamodel.eventtype;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
@ -16,7 +16,7 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.events.db;
|
||||
package org.sleuthkit.autopsy.timeline.db;
|
||||
|
||||
import com.google.common.collect.HashMultimap;
|
||||
import com.google.common.collect.SetMultimap;
|
||||
@ -32,6 +32,7 @@ import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
@ -44,37 +45,30 @@ import java.util.concurrent.locks.Lock;
|
||||
import java.util.concurrent.locks.ReentrantReadWriteLock;
|
||||
import java.util.logging.Level;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.joda.time.Interval;
|
||||
import org.joda.time.Period;
|
||||
import org.openide.util.Exceptions;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.timeline.TimeLineController;
|
||||
import org.sleuthkit.autopsy.timeline.events.AggregateEvent;
|
||||
import org.sleuthkit.autopsy.timeline.events.TimeLineEvent;
|
||||
import org.sleuthkit.autopsy.timeline.events.type.BaseTypes;
|
||||
import org.sleuthkit.autopsy.timeline.events.type.EventType;
|
||||
import org.sleuthkit.autopsy.timeline.events.type.RootEventType;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.AggregateEvent;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.TimeLineEvent;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.BaseTypes;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.EventType;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.RootEventType;
|
||||
import static org.sleuthkit.autopsy.timeline.db.SQLHelper.useHashHitTablesHelper;
|
||||
import static org.sleuthkit.autopsy.timeline.db.SQLHelper.useTagTablesHelper;
|
||||
import org.sleuthkit.autopsy.timeline.filters.RootFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.TagsFilter;
|
||||
import org.sleuthkit.autopsy.timeline.utils.RangeDivisionInfo;
|
||||
import org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD;
|
||||
import static org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD.FULL;
|
||||
import static org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD.MEDIUM;
|
||||
import static org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD.SHORT;
|
||||
import org.sleuthkit.autopsy.timeline.zooming.EventTypeZoomLevel;
|
||||
import org.sleuthkit.autopsy.timeline.zooming.TimeUnits;
|
||||
import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.DAYS;
|
||||
import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.HOURS;
|
||||
import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.MINUTES;
|
||||
import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.MONTHS;
|
||||
import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.SECONDS;
|
||||
import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.YEARS;
|
||||
import org.sleuthkit.autopsy.timeline.zooming.ZoomParams;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.Tag;
|
||||
import org.sleuthkit.datamodel.TskData;
|
||||
import org.sqlite.SQLiteJDBCLoader;
|
||||
|
||||
@ -88,7 +82,7 @@ import org.sqlite.SQLiteJDBCLoader;
|
||||
public class EventDB {
|
||||
|
||||
/**
|
||||
|
||||
*
|
||||
* enum to represent keys stored in db_info table
|
||||
*/
|
||||
private enum DBInfoKey {
|
||||
@ -150,17 +144,21 @@ public class EventDB {
|
||||
private PreparedStatement getMaxTimeStmt;
|
||||
private PreparedStatement getMinTimeStmt;
|
||||
private PreparedStatement getDataSourceIDsStmt;
|
||||
private PreparedStatement getHashSetNamesStmt;
|
||||
private PreparedStatement insertRowStmt;
|
||||
private PreparedStatement recordDBInfoStmt;
|
||||
private PreparedStatement insertHashSetStmt;
|
||||
private PreparedStatement insertHashHitStmt;
|
||||
private PreparedStatement insertTagStmt;
|
||||
private PreparedStatement deleteTagStmt;
|
||||
private PreparedStatement selectHashSetStmt;
|
||||
private PreparedStatement countAllEventsStmt;
|
||||
private PreparedStatement dropEventsTableStmt;
|
||||
private PreparedStatement dropHashSetHitsTableStmt;
|
||||
private PreparedStatement dropHashSetsTableStmt;
|
||||
private PreparedStatement dropTagsTableStmt;
|
||||
private PreparedStatement dropDBInfoTableStmt;
|
||||
private PreparedStatement selectEventsFromOBjectAndArtifactStmt;
|
||||
private PreparedStatement selectEventIDsFromOBjectAndArtifactStmt;
|
||||
|
||||
private final Set<PreparedStatement> preparedStatements = new HashSet<>();
|
||||
|
||||
@ -196,9 +194,9 @@ public class EventDB {
|
||||
public Interval getSpanningInterval(Collection<Long> eventIDs) {
|
||||
DBLock.lock();
|
||||
try (Statement stmt = con.createStatement();
|
||||
ResultSet rs = stmt.executeQuery("select Min(time), Max(time) from events where event_id in (" + StringUtils.join(eventIDs, ", ") + ")");) { // NON-NLS
|
||||
ResultSet rs = stmt.executeQuery("SELECT Min(time), Max(time) FROM events WHERE event_id IN (" + StringUtils.join(eventIDs, ", ") + ")");) { // NON-NLS
|
||||
while (rs.next()) {
|
||||
return new Interval(rs.getLong("Min(time)"), rs.getLong("Max(time)") + 1, DateTimeZone.UTC); // NON-NLS
|
||||
return new Interval(rs.getLong("Min(time)") * 1000, (rs.getLong("Max(time)") + 1) * 1000, DateTimeZone.UTC); // NON-NLS
|
||||
}
|
||||
} catch (SQLException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Error executing get spanning interval query.", ex); // NON-NLS
|
||||
@ -212,11 +210,11 @@ public class EventDB {
|
||||
return new EventTransaction();
|
||||
}
|
||||
|
||||
void commitTransaction(EventTransaction tr, Boolean notify) {
|
||||
void commitTransaction(EventTransaction tr) {
|
||||
if (tr.isClosed()) {
|
||||
throw new IllegalArgumentException("can't close already closed transaction"); // NON-NLS
|
||||
}
|
||||
tr.commit(notify);
|
||||
tr.commit();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -248,7 +246,7 @@ public class EventDB {
|
||||
*/
|
||||
Map<EventType, Long> countEventsByType(ZoomParams params) {
|
||||
if (params.getTimeRange() != null) {
|
||||
return countEvents(params.getTimeRange().getStartMillis() / 1000,
|
||||
return countEventsByType(params.getTimeRange().getStartMillis() / 1000,
|
||||
params.getTimeRange().getEndMillis() / 1000,
|
||||
params.getFilter(), params.getTypeZoomLevel());
|
||||
} else {
|
||||
@ -256,6 +254,33 @@ public class EventDB {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* get a count of tagnames applied to the given event ids as a map from
|
||||
* tagname displayname to count of tag applications
|
||||
*
|
||||
* @param eventIDsWithTags the event ids to get the tag counts map for
|
||||
*
|
||||
* @return a map from tagname displayname to count of applications
|
||||
*/
|
||||
Map<String, Long> getTagCountsByTagName(Set<Long> eventIDsWithTags) {
|
||||
HashMap<String, Long> counts = new HashMap<>();
|
||||
DBLock.lock();
|
||||
try (Statement createStatement = con.createStatement();
|
||||
ResultSet rs = createStatement.executeQuery("SELECT tag_name_display_name, COUNT(DISTINCT tag_id) AS count FROM tags"
|
||||
+ " WHERE event_id IN (" + StringUtils.join(eventIDsWithTags, ", ") + ")"
|
||||
+ " GROUP BY tag_name_id"
|
||||
+ " ORDER BY tag_name_display_name");) {
|
||||
while (rs.next()) {
|
||||
counts.put(rs.getString("tag_name_display_name"), rs.getLong("count"));
|
||||
}
|
||||
} catch (SQLException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Failed to get tag counts by tag name.", ex);
|
||||
} finally {
|
||||
DBLock.unlock();
|
||||
}
|
||||
return counts;
|
||||
}
|
||||
|
||||
/**
|
||||
* drop the tables from this database and recreate them in order to start
|
||||
* over.
|
||||
@ -266,10 +291,27 @@ public class EventDB {
|
||||
dropEventsTableStmt.executeUpdate();
|
||||
dropHashSetHitsTableStmt.executeUpdate();
|
||||
dropHashSetsTableStmt.executeUpdate();
|
||||
dropTagsTableStmt.executeUpdate();
|
||||
dropDBInfoTableStmt.executeUpdate();
|
||||
initializeDB();;
|
||||
initializeDB();
|
||||
} catch (SQLException ex) {
|
||||
LOGGER.log(Level.SEVERE, "could not drop old tables table", ex); // NON-NLS
|
||||
LOGGER.log(Level.SEVERE, "could not drop old tables", ex); // NON-NLS
|
||||
} finally {
|
||||
DBLock.unlock();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* drop only the tags table and rebuild it incase the tags have changed
|
||||
* while TL was not listening,
|
||||
*/
|
||||
void reInitializeTags() {
|
||||
DBLock.lock();
|
||||
try {
|
||||
dropTagsTableStmt.executeUpdate();
|
||||
initializeTagsTable();
|
||||
} catch (SQLException ex) {
|
||||
LOGGER.log(Level.SEVERE, "could not drop old tags table", ex); // NON-NLS
|
||||
} finally {
|
||||
DBLock.unlock();
|
||||
}
|
||||
@ -281,8 +323,8 @@ public class EventDB {
|
||||
final String sqlWhere = SQLHelper.getSQLWhere(filter);
|
||||
DBLock.lock();
|
||||
try (Statement stmt = con.createStatement(); //can't use prepared statement because of complex where clause
|
||||
ResultSet rs = stmt.executeQuery(" select (select Max(time) from events" + useHashHitTablesHelper(filter) + " where time <=" + start + " and " + sqlWhere + ") as start,"
|
||||
+ "(select Min(time) from from events" + useHashHitTablesHelper(filter) + " where time >= " + end + " and " + sqlWhere + ") as end")) { // NON-NLS
|
||||
ResultSet rs = stmt.executeQuery(" SELECT (SELECT Max(time) FROM events " + useHashHitTablesHelper(filter) + useTagTablesHelper(filter) + " WHERE time <=" + start + " AND " + sqlWhere + ") AS start,"
|
||||
+ "(SELECT Min(time) FROM events" + useHashHitTablesHelper(filter) + useTagTablesHelper(filter) + " WHERE time >= " + end + " AND " + sqlWhere + ") AS end")) { // NON-NLS
|
||||
while (rs.next()) {
|
||||
|
||||
long start2 = rs.getLong("start"); // NON-NLS
|
||||
@ -332,10 +374,9 @@ public class EventDB {
|
||||
Set<Long> resultIDs = new HashSet<>();
|
||||
|
||||
DBLock.lock();
|
||||
final String query = "select event_id from from events" + useHashHitTablesHelper(filter) + " where time >= " + startTime + " and time <" + endTime + " and " + SQLHelper.getSQLWhere(filter); // NON-NLS
|
||||
final String query = "SELECT events.event_id AS event_id FROM events" + useHashHitTablesHelper(filter) + useTagTablesHelper(filter) + " WHERE time >= " + startTime + " AND time <" + endTime + " AND " + SQLHelper.getSQLWhere(filter); // NON-NLS
|
||||
try (Statement stmt = con.createStatement();
|
||||
ResultSet rs = stmt.executeQuery(query)) {
|
||||
|
||||
while (rs.next()) {
|
||||
resultIDs.add(rs.getLong("event_id"));
|
||||
}
|
||||
@ -357,11 +398,11 @@ public class EventDB {
|
||||
return getDBInfo(DBInfoKey.LAST_OBJECT_ID, -1);
|
||||
}
|
||||
|
||||
boolean hasNewColumns() {
|
||||
/*
|
||||
/**
|
||||
* this relies on the fact that no tskObj has ID 0 but 0 is the default
|
||||
* value for the datasource_id column in the events table.
|
||||
*/
|
||||
boolean hasNewColumns() {
|
||||
return hasHashHitColumn() && hasDataSourceIDColumn() && hasTaggedColumn()
|
||||
&& (getDataSourceIDs().isEmpty() == false);
|
||||
}
|
||||
@ -388,7 +429,7 @@ public class EventDB {
|
||||
Map<Long, String> getHashSetNames() {
|
||||
Map<Long, String> hashSets = new HashMap<>();
|
||||
DBLock.lock();
|
||||
try (ResultSet rs = con.createStatement().executeQuery("select * from hash_sets")) {
|
||||
try (ResultSet rs = getHashSetNamesStmt.executeQuery();) {
|
||||
while (rs.next()) {
|
||||
long hashSetID = rs.getLong("hash_set_id");
|
||||
String hashSetName = rs.getString("hash_set_name");
|
||||
@ -443,7 +484,6 @@ public class EventDB {
|
||||
/**
|
||||
* create the table and indices if they don't already exist
|
||||
*
|
||||
*
|
||||
* @return the number of rows in the table , count > 0 indicating an
|
||||
* existing table
|
||||
*/
|
||||
@ -488,8 +528,9 @@ public class EventDB {
|
||||
+ " full_description TEXT, " // NON-NLS
|
||||
+ " med_description TEXT, " // NON-NLS
|
||||
+ " short_description TEXT, " // NON-NLS
|
||||
+ " known_state INTEGER,"
|
||||
+ " hash_hit INTEGER)"; //boolean // NON-NLS
|
||||
+ " known_state INTEGER," //boolean // NON-NLS
|
||||
+ " hash_hit INTEGER," //boolean // NON-NLS
|
||||
+ " tagged INTEGER)"; //boolean // NON-NLS
|
||||
stmt.execute(sql);
|
||||
} catch (SQLException ex) {
|
||||
LOGGER.log(Level.SEVERE, "problem creating database table", ex); // NON-NLS
|
||||
@ -500,7 +541,6 @@ public class EventDB {
|
||||
String sql = "ALTER TABLE events ADD COLUMN datasource_id INTEGER"; // NON-NLS
|
||||
stmt.execute(sql);
|
||||
} catch (SQLException ex) {
|
||||
|
||||
LOGGER.log(Level.SEVERE, "problem upgrading events table", ex); // NON-NLS
|
||||
}
|
||||
}
|
||||
@ -509,7 +549,6 @@ public class EventDB {
|
||||
String sql = "ALTER TABLE events ADD COLUMN tagged INTEGER"; // NON-NLS
|
||||
stmt.execute(sql);
|
||||
} catch (SQLException ex) {
|
||||
|
||||
LOGGER.log(Level.SEVERE, "problem upgrading events table", ex); // NON-NLS
|
||||
}
|
||||
}
|
||||
@ -542,8 +581,11 @@ public class EventDB {
|
||||
LOGGER.log(Level.SEVERE, "problem creating hash_set_hits table", ex);
|
||||
}
|
||||
|
||||
initializeTagsTable();
|
||||
|
||||
createIndex("events", Arrays.asList("file_id"));
|
||||
createIndex("events", Arrays.asList("artifact_id"));
|
||||
createIndex("events", Arrays.asList("time"));
|
||||
createIndex("events", Arrays.asList("sub_type", "time"));
|
||||
createIndex("events", Arrays.asList("base_type", "time"));
|
||||
createIndex("events", Arrays.asList("known_state"));
|
||||
@ -552,7 +594,7 @@ public class EventDB {
|
||||
insertRowStmt = prepareStatement(
|
||||
"INSERT INTO events (datasource_id,file_id ,artifact_id, time, sub_type, base_type, full_description, med_description, short_description, known_state, hash_hit, tagged) " // NON-NLS
|
||||
+ "VALUES (?,?,?,?,?,?,?,?,?,?,?,?)"); // NON-NLS
|
||||
|
||||
getHashSetNamesStmt = prepareStatement("SELECT hash_set_id, hash_set_name FROM hash_sets"); // NON-NLS
|
||||
getDataSourceIDsStmt = prepareStatement("SELECT DISTINCT datasource_id FROM events"); // NON-NLS
|
||||
getMaxTimeStmt = prepareStatement("SELECT Max(time) AS max FROM events"); // NON-NLS
|
||||
getMinTimeStmt = prepareStatement("SELECT Min(time) AS min FROM events"); // NON-NLS
|
||||
@ -562,21 +604,41 @@ public class EventDB {
|
||||
insertHashSetStmt = prepareStatement("INSERT OR IGNORE INTO hash_sets (hash_set_name) values (?)");
|
||||
selectHashSetStmt = prepareStatement("SELECT hash_set_id FROM hash_sets WHERE hash_set_name = ?");
|
||||
insertHashHitStmt = prepareStatement("INSERT OR IGNORE INTO hash_set_hits (hash_set_id, event_id) values (?,?)");
|
||||
insertTagStmt = prepareStatement("INSERT OR IGNORE INTO tags (tag_id, tag_name_id,tag_name_display_name, event_id) values (?,?,?,?)");
|
||||
deleteTagStmt = prepareStatement("DELETE FROM tags WHERE tag_id = ?");
|
||||
countAllEventsStmt = prepareStatement("SELECT count(*) AS count FROM events");
|
||||
dropEventsTableStmt = prepareStatement("DROP TABLE IF EXISTS events");
|
||||
dropHashSetHitsTableStmt = prepareStatement("DROP TABLE IF EXISTS hash_set_hits");
|
||||
dropHashSetsTableStmt = prepareStatement("DROP TABLE IF EXISTS hash_sets");
|
||||
dropTagsTableStmt = prepareStatement("DROP TABLE IF EXISTS tags");
|
||||
dropDBInfoTableStmt = prepareStatement("DROP TABLE IF EXISTS db_ino");
|
||||
selectEventsFromOBjectAndArtifactStmt = prepareStatement("SELECT event_id FROM events WHERE file_id == ? AND artifact_id IS ?");
|
||||
selectEventIDsFromOBjectAndArtifactStmt = prepareStatement("SELECT event_id FROM events WHERE file_id == ? AND artifact_id IS ?");
|
||||
} catch (SQLException sQLException) {
|
||||
LOGGER.log(Level.SEVERE, "failed to prepareStatment", sQLException); // NON-NLS
|
||||
}
|
||||
|
||||
} finally {
|
||||
DBLock.unlock();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* create the tags table if it doesn't already exist. This is broken out as
|
||||
* a separate method so it can be used by {@link #reInitializeTags() }
|
||||
*/
|
||||
private void initializeTagsTable() {
|
||||
try (Statement stmt = con.createStatement()) {
|
||||
String sql = "CREATE TABLE IF NOT EXISTS tags "
|
||||
+ "(tag_id INTEGER NOT NULL,"
|
||||
+ " tag_name_id INTEGER NOT NULL, "
|
||||
+ " tag_name_display_name TEXT NOT NULL, "
|
||||
+ " event_id INTEGER REFERENCES events(event_id) NOT NULL, "
|
||||
+ " PRIMARY KEY (event_id, tag_name_id))";
|
||||
stmt.execute(sql);
|
||||
} catch (SQLException ex) {
|
||||
LOGGER.log(Level.SEVERE, "problem creating tags table", ex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param tableName the value of tableName
|
||||
@ -628,11 +690,11 @@ public class EventDB {
|
||||
|
||||
void insertEvent(long time, EventType type, long datasourceID, long objID,
|
||||
Long artifactID, String fullDescription, String medDescription,
|
||||
String shortDescription, TskData.FileKnown known, Set<String> hashSets, boolean tagged) {
|
||||
String shortDescription, TskData.FileKnown known, Set<String> hashSets, List<? extends Tag> tags) {
|
||||
|
||||
EventTransaction transaction = beginTransaction();
|
||||
insertEvent(time, type, datasourceID, objID, artifactID, fullDescription, medDescription, shortDescription, known, hashSets, tagged, transaction);
|
||||
commitTransaction(transaction, true);
|
||||
insertEvent(time, type, datasourceID, objID, artifactID, fullDescription, medDescription, shortDescription, known, hashSets, tags, transaction);
|
||||
commitTransaction(transaction);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -644,17 +706,13 @@ public class EventDB {
|
||||
void insertEvent(long time, EventType type, long datasourceID, long objID,
|
||||
Long artifactID, String fullDescription, String medDescription,
|
||||
String shortDescription, TskData.FileKnown known, Set<String> hashSetNames,
|
||||
boolean tagged,
|
||||
EventTransaction transaction) {
|
||||
List<? extends Tag> tags, EventTransaction transaction) {
|
||||
|
||||
if (transaction.isClosed()) {
|
||||
throw new IllegalArgumentException("can't update database with closed transaction"); // NON-NLS
|
||||
}
|
||||
int typeNum;
|
||||
int superTypeNum;
|
||||
|
||||
typeNum = RootEventType.allTypes.indexOf(type);
|
||||
superTypeNum = type.getSuperType().ordinal();
|
||||
int typeNum = RootEventType.allTypes.indexOf(type);
|
||||
int superTypeNum = type.getSuperType().ordinal();
|
||||
|
||||
DBLock.lock();
|
||||
try {
|
||||
@ -684,7 +742,7 @@ public class EventDB {
|
||||
insertRowStmt.setByte(10, known == null ? TskData.FileKnown.UNKNOWN.getFileKnownValue() : known.getFileKnownValue());
|
||||
|
||||
insertRowStmt.setInt(11, hashSetNames.isEmpty() ? 0 : 1);
|
||||
insertRowStmt.setInt(12, tagged ? 1 : 0);
|
||||
insertRowStmt.setInt(12, tags.isEmpty() ? 0 : 1);
|
||||
|
||||
insertRowStmt.executeUpdate();
|
||||
|
||||
@ -697,7 +755,7 @@ public class EventDB {
|
||||
insertHashSetStmt.setString(1, name);
|
||||
insertHashSetStmt.executeUpdate();
|
||||
|
||||
//TODO: use nested select to get hash_set_id rather than seperate statement/query
|
||||
//TODO: use nested select to get hash_set_id rather than seperate statement/query ?
|
||||
//"select hash_set_id from hash_sets where hash_set_name = ?"
|
||||
selectHashSetStmt.setString(1, name);
|
||||
try (ResultSet rs = selectHashSetStmt.executeQuery()) {
|
||||
@ -711,9 +769,13 @@ public class EventDB {
|
||||
}
|
||||
}
|
||||
}
|
||||
for (Tag tag : tags) {
|
||||
//could this be one insert? is there a performance win?
|
||||
insertTag(tag, eventID);
|
||||
}
|
||||
break;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
} catch (SQLException ex) {
|
||||
LOGGER.log(Level.SEVERE, "failed to insert event", ex); // NON-NLS
|
||||
@ -722,33 +784,133 @@ public class EventDB {
|
||||
}
|
||||
}
|
||||
|
||||
Set<Long> markEventsTagged(long objectID, Long artifactID, boolean tagged) {
|
||||
HashSet<Long> eventIDs = new HashSet<>();
|
||||
|
||||
/**
|
||||
* mark any events with the given object and artifact ids as tagged, and
|
||||
* record the tag it self.
|
||||
*
|
||||
* @param objectID the obj_id that this tag applies to, the id of the
|
||||
* content that the artifact is derived from for artifact
|
||||
* tags
|
||||
* @param artifactID the artifact_id that this tag applies to, or null if
|
||||
* this is a content tag
|
||||
* @param tag the tag that should be inserted
|
||||
*
|
||||
* @return the event ids that match the object/artifact pair
|
||||
*/
|
||||
Set<Long> addTag(long objectID, @Nullable Long artifactID, Tag tag) {
|
||||
DBLock.lock();
|
||||
|
||||
try {
|
||||
selectEventsFromOBjectAndArtifactStmt.clearParameters();
|
||||
selectEventsFromOBjectAndArtifactStmt.setLong(1, objectID);
|
||||
if (Objects.isNull(artifactID)) {
|
||||
selectEventsFromOBjectAndArtifactStmt.setNull(2, Types.NULL);
|
||||
} else {
|
||||
selectEventsFromOBjectAndArtifactStmt.setLong(2, artifactID);
|
||||
Set<Long> eventIDs = markEventsTagged(objectID, artifactID, true);
|
||||
for (Long eventID : eventIDs) {
|
||||
insertTag(tag, eventID);
|
||||
}
|
||||
try (ResultSet executeQuery = selectEventsFromOBjectAndArtifactStmt.executeQuery();) {
|
||||
return eventIDs;
|
||||
} catch (SQLException ex) {
|
||||
LOGGER.log(Level.SEVERE, "failed to add tag to event", ex); // NON-NLS
|
||||
} finally {
|
||||
DBLock.unlock();
|
||||
}
|
||||
return Collections.emptySet();
|
||||
}
|
||||
|
||||
/**
|
||||
* insert this tag into the db
|
||||
* <p>
|
||||
* NOTE: does not lock the db, must be called form inside a
|
||||
* DBLock.lock/unlock pair
|
||||
*
|
||||
* @param tag the tag to insert
|
||||
* @param eventID the event id that this tag is applied to.
|
||||
*
|
||||
* @throws SQLException if there was a problem executing insert
|
||||
*/
|
||||
private void insertTag(Tag tag, long eventID) throws SQLException {
|
||||
|
||||
//"INSERT OR IGNORE INTO tags (tag_id, tag_name_id,tag_name_display_name, event_id) values (?,?,?,?)"
|
||||
insertTagStmt.clearParameters();
|
||||
insertTagStmt.setLong(1, tag.getId());
|
||||
insertTagStmt.setLong(2, tag.getName().getId());
|
||||
insertTagStmt.setString(3, tag.getName().getDisplayName());
|
||||
insertTagStmt.setLong(4, eventID);
|
||||
insertTagStmt.executeUpdate();
|
||||
}
|
||||
|
||||
/**
|
||||
* mark any events with the given object and artifact ids as tagged, and
|
||||
* record the tag it self.
|
||||
*
|
||||
* @param objectID the obj_id that this tag applies to, the id of the
|
||||
* content that the artifact is derived from for artifact
|
||||
* tags
|
||||
* @param artifactID the artifact_id that this tag applies to, or null if
|
||||
* this is a content tag
|
||||
* @param tag the tag that should be deleted
|
||||
* @param stillTagged true if there are other tags still applied to this
|
||||
* event in autopsy
|
||||
*
|
||||
* @return the event ids that match the object/artifact pair
|
||||
*/
|
||||
Set<Long> deleteTag(long objectID, @Nullable Long artifactID, Tag tag, boolean stillTagged) {
|
||||
DBLock.lock();
|
||||
try {
|
||||
//"DELETE FROM tags WHERE tag_id = ?
|
||||
deleteTagStmt.clearParameters();
|
||||
deleteTagStmt.setLong(1, tag.getId());
|
||||
deleteTagStmt.executeUpdate();
|
||||
|
||||
Set<Long> eventIDs = markEventsTagged(objectID, artifactID, stillTagged);
|
||||
return eventIDs;
|
||||
} catch (SQLException ex) {
|
||||
LOGGER.log(Level.SEVERE, "failed to add tag to event", ex); // NON-NLS
|
||||
} finally {
|
||||
DBLock.unlock();
|
||||
}
|
||||
return Collections.emptySet();
|
||||
}
|
||||
|
||||
/**
|
||||
* mark any events with the given object and artifact ids as tagged, and
|
||||
* record the tag it self.
|
||||
* <p>
|
||||
* NOTE: does not lock the db, must be called form inside a
|
||||
* DBLock.lock/unlock pair
|
||||
*
|
||||
* @param objectID the obj_id that this tag applies to, the id of the
|
||||
* content that the artifact is derived from for artifact
|
||||
* tags
|
||||
* @param artifactID the artifact_id that this tag applies to, or null if
|
||||
* this is a content tag
|
||||
* @param tagged true to mark the matching events tagged, false to mark
|
||||
* them as untagged
|
||||
*
|
||||
* @return the event ids that match the object/artifact pair
|
||||
*
|
||||
* @throws SQLException if there is an error marking the events as
|
||||
* (un)taggedS
|
||||
*/
|
||||
private Set<Long> markEventsTagged(long objectID, @Nullable Long artifactID, boolean tagged) throws SQLException {
|
||||
//first select the matching event ids
|
||||
selectEventIDsFromOBjectAndArtifactStmt.clearParameters();
|
||||
selectEventIDsFromOBjectAndArtifactStmt.setLong(1, objectID);
|
||||
if (Objects.isNull(artifactID)) {
|
||||
selectEventIDsFromOBjectAndArtifactStmt.setNull(2, Types.NULL);
|
||||
} else {
|
||||
selectEventIDsFromOBjectAndArtifactStmt.setLong(2, artifactID);
|
||||
}
|
||||
|
||||
HashSet<Long> eventIDs = new HashSet<>();
|
||||
try (ResultSet executeQuery = selectEventIDsFromOBjectAndArtifactStmt.executeQuery();) {
|
||||
while (executeQuery.next()) {
|
||||
eventIDs.add(executeQuery.getLong("event_id"));
|
||||
}
|
||||
}
|
||||
|
||||
//then update tagged state for all event with selected ids
|
||||
try (Statement updateStatement = con.createStatement();) {
|
||||
updateStatement.executeUpdate("UPDATE events SET tagged = " + (tagged ? 1 : 0)
|
||||
+ " WHERE event_id IN (" + StringUtils.join(eventIDs, ",") + ")");
|
||||
}
|
||||
}
|
||||
} catch (SQLException ex) {
|
||||
LOGGER.log(Level.SEVERE, "failed to mark events as " + (tagged ? "" : "(un)") + tagged, ex); // NON-NLS
|
||||
} finally {
|
||||
DBLock.unlock();
|
||||
}
|
||||
|
||||
return eventIDs;
|
||||
}
|
||||
|
||||
@ -768,19 +930,6 @@ public class EventDB {
|
||||
trans.rollback();
|
||||
}
|
||||
|
||||
boolean tableExists() {
|
||||
//TODO: use prepared statement - jm
|
||||
try (Statement createStatement = con.createStatement();
|
||||
ResultSet executeQuery = createStatement.executeQuery("SELECT name FROM sqlite_master WHERE type='table' AND name='events'")) { // NON-NLS
|
||||
if (executeQuery.getString("name").equals("events") == false) { // NON-NLS
|
||||
return false;
|
||||
}
|
||||
} catch (SQLException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private void closeStatements() throws SQLException {
|
||||
for (PreparedStatement pStmt : preparedStatements) {
|
||||
pStmt.close();
|
||||
@ -789,7 +938,7 @@ public class EventDB {
|
||||
|
||||
private void configureDB() throws SQLException {
|
||||
DBLock.lock();
|
||||
//this should match Sleuthkit db setupt
|
||||
//this should match Sleuthkit db setup
|
||||
try (Statement statement = con.createStatement()) {
|
||||
//reduce i/o operations, we have no OS crash recovery anyway
|
||||
statement.execute("PRAGMA synchronous = OFF;"); // NON-NLS
|
||||
@ -813,11 +962,13 @@ public class EventDB {
|
||||
SQLiteJDBCLoader.getVersion(), SQLiteJDBCLoader.isNativeMode()
|
||||
? "native" : "pure-java")); // NON-NLS
|
||||
} catch (Exception exception) {
|
||||
LOGGER.log(Level.SEVERE, "Failed to determine if sqlite-jdbc is loaded in native or pure-java mode.", exception);
|
||||
}
|
||||
}
|
||||
|
||||
private TimeLineEvent constructTimeLineEvent(ResultSet rs) throws SQLException {
|
||||
return new TimeLineEvent(rs.getLong("event_id"),
|
||||
rs.getLong("datasource_id"),
|
||||
rs.getLong("file_id"),
|
||||
rs.getLong("artifact_id"),
|
||||
rs.getLong("time"), RootEventType.allTypes.get(rs.getInt("sub_type")),
|
||||
@ -833,7 +984,6 @@ public class EventDB {
|
||||
* count all the events with the given options and return a map organizing
|
||||
* the counts in a hierarchy from date > eventtype> count
|
||||
*
|
||||
*
|
||||
* @param startTime events before this time will be excluded (seconds from
|
||||
* unix epoch)
|
||||
* @param endTime events at or after this time will be excluded (seconds
|
||||
@ -846,7 +996,7 @@ public class EventDB {
|
||||
* @return a map organizing the counts in a hierarchy from date > eventtype>
|
||||
* count
|
||||
*/
|
||||
private Map<EventType, Long> countEvents(Long startTime, Long endTime, RootFilter filter, EventTypeZoomLevel zoomLevel) {
|
||||
private Map<EventType, Long> countEventsByType(Long startTime, Long endTime, RootFilter filter, EventTypeZoomLevel zoomLevel) {
|
||||
if (Objects.equals(startTime, endTime)) {
|
||||
endTime++;
|
||||
}
|
||||
@ -857,9 +1007,9 @@ public class EventDB {
|
||||
final boolean useSubTypes = (zoomLevel == EventTypeZoomLevel.SUB_TYPE);
|
||||
|
||||
//get some info about the range of dates requested
|
||||
final String queryString = "select count(*), " + useSubTypeHelper(useSubTypes)
|
||||
+ " from events" + useHashHitTablesHelper(filter) + " where time >= " + startTime + " and time < " + endTime + " and " + SQLHelper.getSQLWhere(filter) // NON-NLS
|
||||
+ " GROUP BY " + useSubTypeHelper(useSubTypes); // NON-NLS
|
||||
final String queryString = "SELECT count(DISTINCT events.event_id) AS count, " + typeColumnHelper(useSubTypes)
|
||||
+ " FROM events" + useHashHitTablesHelper(filter) + useTagTablesHelper(filter) + " WHERE time >= " + startTime + " AND time < " + endTime + " AND " + SQLHelper.getSQLWhere(filter) // NON-NLS
|
||||
+ " GROUP BY " + typeColumnHelper(useSubTypes); // NON-NLS
|
||||
|
||||
DBLock.lock();
|
||||
try (Statement stmt = con.createStatement();
|
||||
@ -869,7 +1019,7 @@ public class EventDB {
|
||||
? RootEventType.allTypes.get(rs.getInt("sub_type"))
|
||||
: BaseTypes.values()[rs.getInt("base_type")];
|
||||
|
||||
typeMap.put(type, rs.getLong("count(*)")); // NON-NLS
|
||||
typeMap.put(type, rs.getLong("count")); // NON-NLS
|
||||
}
|
||||
|
||||
} catch (Exception ex) {
|
||||
@ -880,119 +1030,140 @@ public class EventDB {
|
||||
return typeMap;
|
||||
}
|
||||
|
||||
List<AggregateEvent> getAggregatedEvents(ZoomParams params) {
|
||||
return getAggregatedEvents(params.getTimeRange(), params.getFilter(), params.getTypeZoomLevel(), params.getDescrLOD());
|
||||
}
|
||||
|
||||
/**
|
||||
* //TODO: update javadoc //TODO: split this into helper methods
|
||||
*
|
||||
* get a list of {@link AggregateEvent}s.
|
||||
*
|
||||
* General algorithm is as follows:
|
||||
*
|
||||
* 1)get all aggregate events, via one db query. 2) sort them into a map
|
||||
* from (type, description)-> aggevent 3) for each key in map, merge the
|
||||
* events and accumulate them in a list to return
|
||||
*
|
||||
*
|
||||
* @param timeRange the Interval within in which all returned aggregate
|
||||
* events will be.
|
||||
* @param filter only events that pass the filter will be included in
|
||||
* aggregates events returned
|
||||
* @param zoomLevel only events of this level will be included
|
||||
* @param lod description level of detail to use when grouping events
|
||||
* get a list of {@link AggregateEvent}s, clustered according to the given
|
||||
* zoom paramaters.
|
||||
*
|
||||
* @param params the zoom params that determine the zooming, filtering and
|
||||
* clustering.
|
||||
*
|
||||
* @return a list of aggregate events within the given timerange, that pass
|
||||
* the supplied filter, aggregated according to the given event type
|
||||
* and description zoom levels
|
||||
*/
|
||||
private List<AggregateEvent> getAggregatedEvents(Interval timeRange, RootFilter filter, EventTypeZoomLevel zoomLevel, DescriptionLOD lod) {
|
||||
String descriptionColumn = getDescriptionColumn(lod);
|
||||
final boolean useSubTypes = (zoomLevel.equals(EventTypeZoomLevel.SUB_TYPE));
|
||||
List<AggregateEvent> getAggregatedEvents(ZoomParams params) {
|
||||
//unpack params
|
||||
Interval timeRange = params.getTimeRange();
|
||||
RootFilter filter = params.getFilter();
|
||||
DescriptionLOD descriptionLOD = params.getDescriptionLOD();
|
||||
EventTypeZoomLevel typeZoomLevel = params.getTypeZoomLevel();
|
||||
|
||||
//get some info about the time range requested
|
||||
RangeDivisionInfo rangeInfo = RangeDivisionInfo.getRangeDivisionInfo(timeRange);
|
||||
//use 'rounded out' range
|
||||
long start = timeRange.getStartMillis() / 1000;//.getLowerBound();
|
||||
long end = timeRange.getEndMillis() / 1000;//Millis();//rangeInfo.getUpperBound();
|
||||
if (Objects.equals(start, end)) {
|
||||
//ensure length of querried interval is not 0
|
||||
long start = timeRange.getStartMillis() / 1000;
|
||||
long end = timeRange.getEndMillis() / 1000;
|
||||
if (start == end) {
|
||||
end++;
|
||||
}
|
||||
//get some info about the time range requested
|
||||
RangeDivisionInfo rangeInfo = RangeDivisionInfo.getRangeDivisionInfo(timeRange);
|
||||
|
||||
//get a sqlite srtftime format string
|
||||
String strfTimeFormat = getStrfTimeFormat(rangeInfo.getPeriodSize());
|
||||
//build dynamic parts of query
|
||||
String strfTimeFormat = SQLHelper.getStrfTimeFormat(rangeInfo);
|
||||
String descriptionColumn = SQLHelper.getDescriptionColumn(descriptionLOD);
|
||||
final boolean useSubTypes = typeZoomLevel.equals(EventTypeZoomLevel.SUB_TYPE);
|
||||
String timeZone = TimeLineController.getTimeZone().get().equals(TimeZone.getDefault()) ? ", 'localtime'" : ""; // NON-NLS
|
||||
String typeColumn = typeColumnHelper(useSubTypes);
|
||||
|
||||
//effectively map from type to (map from description to events)
|
||||
Map<EventType, SetMultimap< String, AggregateEvent>> typeMap = new HashMap<>();
|
||||
//compose query string
|
||||
String query = "SELECT strftime('" + strfTimeFormat + "',time , 'unixepoch'" + timeZone + ") AS interval," // NON-NLS
|
||||
+ " group_concat(events.event_id) as event_ids, min(time), max(time), " + typeColumn + ", " + descriptionColumn // NON-NLS
|
||||
+ "\n FROM events" + useHashHitTablesHelper(filter) + useTagTablesHelper(filter) // NON-NLS
|
||||
+ "\n WHERE time >= " + start + " AND time < " + end + " AND " + SQLHelper.getSQLWhere(filter) // NON-NLS
|
||||
+ "\n GROUP BY interval, " + typeColumn + " , " + descriptionColumn // NON-NLS
|
||||
+ "\n ORDER BY min(time)"; // NON-NLS
|
||||
|
||||
//get all agregate events in this time unit
|
||||
// perform query and map results to AggregateEvent objects
|
||||
List<AggregateEvent> events = new ArrayList<>();
|
||||
DBLock.lock();
|
||||
String query = "select strftime('" + strfTimeFormat + "',time , 'unixepoch'" + (TimeLineController.getTimeZone().get().equals(TimeZone.getDefault()) ? ", 'localtime'" : "") + ") as interval,"
|
||||
+ " group_concat(events.event_id) as event_ids, Min(time), Max(time), " + descriptionColumn + ", " + useSubTypeHelper(useSubTypes)
|
||||
+ " from events" + useHashHitTablesHelper(filter) + " where " + "time >= " + start + " and time < " + end + " and " + SQLHelper.getSQLWhere(filter) // NON-NLS
|
||||
+ " group by interval, " + useSubTypeHelper(useSubTypes) + " , " + descriptionColumn // NON-NLS
|
||||
+ " order by Min(time)"; // NON-NLS
|
||||
// scoop up requested events in groups organized by interval, type, and desription
|
||||
try (ResultSet rs = con.createStatement().executeQuery(query);) {
|
||||
|
||||
try (Statement createStatement = con.createStatement();
|
||||
ResultSet rs = createStatement.executeQuery(query)) {
|
||||
while (rs.next()) {
|
||||
Interval interval = new Interval(rs.getLong("Min(time)") * 1000, rs.getLong("Max(time)") * 1000, TimeLineController.getJodaTimeZone());
|
||||
String eventIDS = rs.getString("event_ids");
|
||||
EventType type = useSubTypes ? RootEventType.allTypes.get(rs.getInt("sub_type")) : BaseTypes.values()[rs.getInt("base_type")];
|
||||
|
||||
HashSet<Long> hashHits = new HashSet<>();
|
||||
HashSet<Long> tagged = new HashSet<>();
|
||||
try (Statement st2 = con.createStatement();
|
||||
ResultSet hashQueryResults = st2.executeQuery("select event_id , tagged, hash_hit from events where event_id in (" + eventIDS + ")");) {
|
||||
while (hashQueryResults.next()) {
|
||||
long eventID = hashQueryResults.getLong("event_id");
|
||||
if (hashQueryResults.getInt("tagged") != 0) {
|
||||
tagged.add(eventID);
|
||||
events.add(aggregateEventHelper(rs, useSubTypes, descriptionLOD, filter.getTagsFilter()));
|
||||
}
|
||||
if (hashQueryResults.getInt("hash_hit") != 0) {
|
||||
hashHits.add(eventID);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
AggregateEvent aggregateEvent = new AggregateEvent(
|
||||
interval, // NON-NLS
|
||||
type,
|
||||
Stream.of(eventIDS.split(",")).map(Long::valueOf).collect(Collectors.toSet()), // NON-NLS
|
||||
hashHits,
|
||||
tagged,
|
||||
rs.getString(descriptionColumn),
|
||||
lod);
|
||||
|
||||
//put events in map from type/descrition -> event
|
||||
SetMultimap<String, AggregateEvent> descrMap = typeMap.get(type);
|
||||
if (descrMap == null) {
|
||||
descrMap = HashMultimap.<String, AggregateEvent>create();
|
||||
typeMap.put(type, descrMap);
|
||||
}
|
||||
descrMap.put(aggregateEvent.getDescription(), aggregateEvent);
|
||||
}
|
||||
|
||||
} catch (SQLException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
LOGGER.log(Level.SEVERE, "Failed to get aggregate events with query: " + query, ex); // NON-NLS
|
||||
} finally {
|
||||
DBLock.unlock();
|
||||
}
|
||||
|
||||
return mergeAggregateEvents(rangeInfo.getPeriodSize().getPeriod(), events);
|
||||
}
|
||||
|
||||
/**
|
||||
* map a single row in a ResultSet to an AggregateEvent
|
||||
*
|
||||
* @param rs the result set whose current row should be mapped
|
||||
* @param useSubTypes use the sub_type column if true, else use the
|
||||
* base_type column
|
||||
* @param descriptionLOD the description level of detail for this event
|
||||
*
|
||||
* @return an AggregateEvent corresponding to the current row in the given
|
||||
* result set
|
||||
*
|
||||
* @throws SQLException
|
||||
*/
|
||||
private AggregateEvent aggregateEventHelper(ResultSet rs, boolean useSubTypes, DescriptionLOD descriptionLOD, TagsFilter filter) throws SQLException {
|
||||
Interval interval = new Interval(rs.getLong("min(time)") * 1000, rs.getLong("max(time)") * 1000, TimeLineController.getJodaTimeZone());// NON-NLS
|
||||
String eventIDsString = rs.getString("event_ids");// NON-NLS
|
||||
Set<Long> eventIDs = SQLHelper.unGroupConcat(eventIDsString, Long::valueOf);
|
||||
String description = rs.getString(SQLHelper.getDescriptionColumn(descriptionLOD));
|
||||
EventType type = useSubTypes ? RootEventType.allTypes.get(rs.getInt("sub_type")) : BaseTypes.values()[rs.getInt("base_type")];// NON-NLS
|
||||
|
||||
Set<Long> hashHits = new HashSet<>();
|
||||
String hashHitQuery = "SELECT group_concat(event_id) FROM events WHERE event_id IN (" + eventIDsString + ") AND hash_hit = 1";// NON-NLS
|
||||
try (Statement stmt = con.createStatement();
|
||||
ResultSet hashHitsRS = stmt.executeQuery(hashHitQuery)) {
|
||||
while (hashHitsRS.next()) {
|
||||
hashHits = SQLHelper.unGroupConcat(hashHitsRS.getString("group_concat(event_id)"), Long::valueOf);// NON-NLS
|
||||
}
|
||||
}
|
||||
|
||||
Set<Long> tagged = new HashSet<>();
|
||||
String taggedQuery = "SELECT group_concat(event_id) FROM events WHERE event_id IN (" + eventIDsString + ") AND tagged = 1";// NON-NLS
|
||||
try (Statement stmt = con.createStatement();
|
||||
ResultSet taggedRS = stmt.executeQuery(taggedQuery)) {
|
||||
while (taggedRS.next()) {
|
||||
tagged = SQLHelper.unGroupConcat(taggedRS.getString("group_concat(event_id)"), Long::valueOf);// NON-NLS
|
||||
}
|
||||
}
|
||||
|
||||
return new AggregateEvent(interval, type, eventIDs, hashHits, tagged,
|
||||
description, descriptionLOD);
|
||||
}
|
||||
|
||||
/**
|
||||
* merge the events in the given list if they are within the same period
|
||||
* General algorithm is as follows:
|
||||
*
|
||||
* 1) sort them into a map from (type, description)-> List<aggevent>
|
||||
* 2) for each key in map, merge the events and accumulate them in a list to
|
||||
* return
|
||||
*
|
||||
* @param timeUnitLength
|
||||
* @param preMergedEvents
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
static private List<AggregateEvent> mergeAggregateEvents(Period timeUnitLength, List<AggregateEvent> preMergedEvents) {
|
||||
|
||||
//effectively map from type to (map from description to events)
|
||||
Map<EventType, SetMultimap< String, AggregateEvent>> typeMap = new HashMap<>();
|
||||
|
||||
for (AggregateEvent aggregateEvent : preMergedEvents) {
|
||||
typeMap.computeIfAbsent(aggregateEvent.getType(), eventType -> HashMultimap.create())
|
||||
.put(aggregateEvent.getDescription(), aggregateEvent);
|
||||
}
|
||||
//result list to return
|
||||
ArrayList<AggregateEvent> aggEvents = new ArrayList<>();
|
||||
|
||||
//save this for use when comparing gap size
|
||||
Period timeUnitLength = rangeInfo.getPeriodSize().getPeriod();
|
||||
|
||||
//For each (type, description) key, merge agg events
|
||||
for (SetMultimap<String, AggregateEvent> descrMap : typeMap.values()) {
|
||||
//for each description ...
|
||||
for (String descr : descrMap.keySet()) {
|
||||
//run through the sorted events, merging together adjacent events
|
||||
Iterator<AggregateEvent> iterator = descrMap.get(descr).stream()
|
||||
.sorted((AggregateEvent o1, AggregateEvent o2)
|
||||
-> Long.compare(o1.getSpan().getStartMillis(), o2.getSpan().getStartMillis()))
|
||||
.sorted(Comparator.comparing(event -> event.getSpan().getStartMillis()))
|
||||
.iterator();
|
||||
AggregateEvent current = iterator.next();
|
||||
while (iterator.hasNext()) {
|
||||
@ -1013,17 +1184,10 @@ public class EventDB {
|
||||
aggEvents.add(current);
|
||||
}
|
||||
}
|
||||
|
||||
//at this point we should have a list of aggregate events.
|
||||
//one per type/description spanning consecutive time units as determined in rangeInfo
|
||||
return aggEvents;
|
||||
}
|
||||
|
||||
private String useHashHitTablesHelper(RootFilter filter) {
|
||||
return SQLHelper.hasActiveHashFilter(filter) ? ", hash_set_hits" : "";
|
||||
}
|
||||
|
||||
private static String useSubTypeHelper(final boolean useSubTypes) {
|
||||
private static String typeColumnHelper(final boolean useSubTypes) {
|
||||
return useSubTypes ? "sub_type" : "base_type";
|
||||
}
|
||||
|
||||
@ -1050,37 +1214,6 @@ public class EventDB {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
private String getDescriptionColumn(DescriptionLOD lod) {
|
||||
switch (lod) {
|
||||
case FULL:
|
||||
return "full_description";
|
||||
case MEDIUM:
|
||||
return "med_description";
|
||||
case SHORT:
|
||||
default:
|
||||
return "short_description";
|
||||
}
|
||||
}
|
||||
|
||||
private String getStrfTimeFormat(TimeUnits info) {
|
||||
switch (info) {
|
||||
case DAYS:
|
||||
return "%Y-%m-%dT00:00:00"; // NON-NLS
|
||||
case HOURS:
|
||||
return "%Y-%m-%dT%H:00:00"; // NON-NLS
|
||||
case MINUTES:
|
||||
return "%Y-%m-%dT%H:%M:00"; // NON-NLS
|
||||
case MONTHS:
|
||||
return "%Y-%m-01T00:00:00"; // NON-NLS
|
||||
case SECONDS:
|
||||
return "%Y-%m-%dT%H:%M:%S"; // NON-NLS
|
||||
case YEARS:
|
||||
return "%Y-01-01T00:00:00"; // NON-NLS
|
||||
default:
|
||||
return "%Y-%m-%dT%H:%M:%S"; // NON-NLS
|
||||
}
|
||||
}
|
||||
|
||||
private PreparedStatement prepareStatement(String queryString) throws SQLException {
|
||||
PreparedStatement prepareStatement = con.prepareStatement(queryString);
|
||||
preparedStatements.add(prepareStatement);
|
||||
@ -1143,16 +1276,13 @@ public class EventDB {
|
||||
}
|
||||
}
|
||||
|
||||
private void commit(Boolean notify) {
|
||||
private void commit() {
|
||||
if (!closed) {
|
||||
try {
|
||||
con.commit();
|
||||
// make sure we close before we update, bc they'll need locks
|
||||
close();
|
||||
|
||||
if (notify) {
|
||||
// fireNewEvents(newEvents);
|
||||
}
|
||||
} catch (SQLException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Error commiting events.db.", ex); // NON-NLS
|
||||
rollback();
|
@ -16,13 +16,12 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.events.db;
|
||||
package org.sleuthkit.autopsy.timeline.db;
|
||||
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import com.google.common.cache.CacheLoader;
|
||||
import com.google.common.cache.LoadingCache;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@ -34,6 +33,7 @@ import java.util.logging.Level;
|
||||
import java.util.stream.Collectors;
|
||||
import javafx.beans.property.ReadOnlyObjectProperty;
|
||||
import javafx.collections.FXCollections;
|
||||
import javafx.collections.ObservableList;
|
||||
import javafx.collections.ObservableMap;
|
||||
import javax.annotation.concurrent.GuardedBy;
|
||||
import javax.swing.JOptionPane;
|
||||
@ -45,27 +45,34 @@ import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.services.TagsManager;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.timeline.ProgressWindow;
|
||||
import org.sleuthkit.autopsy.timeline.events.AggregateEvent;
|
||||
import org.sleuthkit.autopsy.timeline.events.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.events.TimeLineEvent;
|
||||
import org.sleuthkit.autopsy.timeline.events.type.ArtifactEventType;
|
||||
import org.sleuthkit.autopsy.timeline.events.type.EventType;
|
||||
import org.sleuthkit.autopsy.timeline.events.type.FileSystemTypes;
|
||||
import org.sleuthkit.autopsy.timeline.events.type.RootEventType;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.AggregateEvent;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.TimeLineEvent;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.ArtifactEventType;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.EventType;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.FileSystemTypes;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.RootEventType;
|
||||
import org.sleuthkit.autopsy.timeline.filters.RootFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.TagNameFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.TagsFilter;
|
||||
import org.sleuthkit.autopsy.timeline.zooming.ZoomParams;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifactTag;
|
||||
import org.sleuthkit.datamodel.ContentTag;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.Tag;
|
||||
import org.sleuthkit.datamodel.TagName;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.TskData;
|
||||
|
||||
/**
|
||||
* Provides public API (over EventsDB) to access events. In theory this
|
||||
* insulates the rest of the timeline module form the details of the db
|
||||
* Provides higher-level public API (over EventsDB) to access events. In theory
|
||||
* this insulates the rest of the timeline module form the details of the db
|
||||
* implementation. Since there are no other implementations of the database or
|
||||
* clients of this class, and no Java Interface defined yet, in practice this
|
||||
* just delegates everything to the eventDB
|
||||
* just delegates everything to the eventDB. Some results are also cached by
|
||||
* this layer.
|
||||
*
|
||||
* Concurrency Policy:
|
||||
*
|
||||
@ -95,12 +102,17 @@ public class EventsRepository {
|
||||
|
||||
private final ObservableMap<Long, String> datasourcesMap = FXCollections.observableHashMap();
|
||||
private final ObservableMap<Long, String> hashSetMap = FXCollections.observableHashMap();
|
||||
private final ObservableList<TagName> tagNames = FXCollections.observableArrayList();
|
||||
private final Case autoCase;
|
||||
|
||||
public Case getAutoCase() {
|
||||
return autoCase;
|
||||
}
|
||||
|
||||
public ObservableList<TagName> getTagNames() {
|
||||
return tagNames;
|
||||
}
|
||||
|
||||
synchronized public ObservableMap<Long, String> getDatasourcesMap() {
|
||||
return datasourcesMap;
|
||||
}
|
||||
@ -125,7 +137,7 @@ public class EventsRepository {
|
||||
this.autoCase = autoCase;
|
||||
//TODO: we should check that case is open, or get passed a case object/directory -jm
|
||||
this.eventDB = EventDB.getEventDB(autoCase);
|
||||
populateFilterMaps(autoCase.getSleuthkitCase());
|
||||
populateFilterData(autoCase.getSleuthkitCase());
|
||||
idToEventCache = CacheBuilder.newBuilder()
|
||||
.maximumSize(5000L)
|
||||
.expireAfterAccess(10, TimeUnit.MINUTES)
|
||||
@ -218,6 +230,94 @@ public class EventsRepository {
|
||||
return eventDB.getSpanningInterval(eventIDs);
|
||||
}
|
||||
|
||||
public boolean hasNewColumns() {
|
||||
return eventDB.hasNewColumns();
|
||||
}
|
||||
|
||||
/**
|
||||
* get a count of tagnames applied to the given event ids as a map from
|
||||
* tagname displayname to count of tag applications
|
||||
*
|
||||
* @param eventIDsWithTags the event ids to get the tag counts map for
|
||||
*
|
||||
* @return a map from tagname displayname to count of applications
|
||||
*/
|
||||
public Map<String, Long> getTagCountsByTagName(Set<Long> eventIDsWithTags) {
|
||||
return eventDB.getTagCountsByTagName(eventIDsWithTags);
|
||||
}
|
||||
|
||||
/**
|
||||
* use the given SleuthkitCase to update the data used to determine the
|
||||
* available filters.
|
||||
*
|
||||
* @param skCase
|
||||
*/
|
||||
synchronized private void populateFilterData(SleuthkitCase skCase) {
|
||||
|
||||
for (Map.Entry<Long, String> hashSet : eventDB.getHashSetNames().entrySet()) {
|
||||
hashSetMap.putIfAbsent(hashSet.getKey(), hashSet.getValue());
|
||||
}
|
||||
//because there is no way to remove a datasource we only add to this map.
|
||||
for (Long id : eventDB.getDataSourceIDs()) {
|
||||
try {
|
||||
datasourcesMap.putIfAbsent(id, skCase.getContentById(id).getDataSource().getName());
|
||||
} catch (TskCoreException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Failed to get datasource by ID.", ex);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
//should this only be tags applied to files or event bearing artifacts?
|
||||
tagNames.setAll(skCase.getTagNamesInUse());
|
||||
} catch (TskCoreException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Failed to get tag names in use.", ex);
|
||||
}
|
||||
}
|
||||
|
||||
synchronized public Set<Long> addTag(long objID, Long artifactID, Tag tag) {
|
||||
Set<Long> updatedEventIDs = eventDB.addTag(objID, artifactID, tag);
|
||||
if (!updatedEventIDs.isEmpty()) {
|
||||
invalidateCaches(updatedEventIDs);
|
||||
}
|
||||
return updatedEventIDs;
|
||||
}
|
||||
|
||||
synchronized public Set<Long> deleteTag(long objID, Long artifactID, Tag tag, boolean tagged) {
|
||||
Set<Long> updatedEventIDs = eventDB.deleteTag(objID, artifactID, tag, tagged);
|
||||
if (!updatedEventIDs.isEmpty()) {
|
||||
invalidateCaches(updatedEventIDs);
|
||||
}
|
||||
return updatedEventIDs;
|
||||
}
|
||||
|
||||
synchronized private void invalidateCaches(Set<Long> updatedEventIDs) {
|
||||
eventCountsCache.invalidateAll();
|
||||
aggregateEventsCache.invalidateAll();
|
||||
idToEventCache.invalidateAll(updatedEventIDs);
|
||||
try {
|
||||
tagNames.setAll(autoCase.getSleuthkitCase().getTagNamesInUse());
|
||||
} catch (TskCoreException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Failed to get tag names in use.", ex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* "sync" the given tags filter with the tagnames in use: Disable filters
|
||||
* for tags that are not in use in the case, and add new filters for tags
|
||||
* that don't have them. New filters are selected by default.
|
||||
*
|
||||
* @param tagsFilter the tags filter to modify so it is consistent with the
|
||||
* tags in use in the case
|
||||
*/
|
||||
public void syncTagsFilter(TagsFilter tagsFilter) {
|
||||
for (TagName t : tagNames) {
|
||||
tagsFilter.addSubFilter(new TagNameFilter(t, autoCase));
|
||||
}
|
||||
for (TagNameFilter t : tagsFilter.getSubFilters()) {
|
||||
t.setDisabled(tagNames.contains(t.getTagName()) == false);
|
||||
}
|
||||
}
|
||||
|
||||
synchronized public void rebuildRepository(Runnable r) {
|
||||
if (dbPopulationWorker != null) {
|
||||
dbPopulationWorker.cancel(true);
|
||||
@ -227,8 +327,110 @@ public class EventsRepository {
|
||||
dbPopulationWorker.execute();
|
||||
}
|
||||
|
||||
public boolean hasDataSourceInfo() {
|
||||
return eventDB.hasNewColumns();
|
||||
synchronized public void rebuildTags(Runnable r) {
|
||||
if (dbPopulationWorker != null) {
|
||||
dbPopulationWorker.cancel(true);
|
||||
|
||||
}
|
||||
dbPopulationWorker = new RebuildTagsWorker(r);
|
||||
dbPopulationWorker.execute();
|
||||
}
|
||||
|
||||
private class RebuildTagsWorker extends SwingWorker<Void, ProgressWindow.ProgressUpdate> {
|
||||
|
||||
private final ProgressWindow progressDialog;
|
||||
|
||||
//TODO: can we avoid this with a state listener? does it amount to the same thing?
|
||||
//post population operation to execute
|
||||
private final Runnable postPopulationOperation;
|
||||
private final SleuthkitCase skCase;
|
||||
private final TagsManager tagsManager;
|
||||
|
||||
public RebuildTagsWorker(Runnable postPopulationOperation) {
|
||||
progressDialog = new ProgressWindow(null, true, this);
|
||||
progressDialog.setVisible(true);
|
||||
|
||||
skCase = autoCase.getSleuthkitCase();
|
||||
tagsManager = autoCase.getServices().getTagsManager();
|
||||
|
||||
this.postPopulationOperation = postPopulationOperation;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Void doInBackground() throws Exception {
|
||||
|
||||
EventDB.EventTransaction trans = eventDB.beginTransaction();
|
||||
LOGGER.log(Level.INFO, "dropping old tags"); // NON-NLS
|
||||
eventDB.reInitializeTags();
|
||||
|
||||
LOGGER.log(Level.INFO, "updating content tags"); // NON-NLS
|
||||
List<ContentTag> contentTags = tagsManager.getAllContentTags();
|
||||
int size = contentTags.size();
|
||||
for (int i = 0; i < size; i++) {
|
||||
if (isCancelled()) {
|
||||
break;
|
||||
}
|
||||
publish(new ProgressWindow.ProgressUpdate(i, size, "refreshing file tags", ""));
|
||||
ContentTag contentTag = contentTags.get(i);
|
||||
eventDB.addTag(contentTag.getContent().getId(), null, contentTag);
|
||||
}
|
||||
LOGGER.log(Level.INFO, "updating artifact tags"); // NON-NLS
|
||||
List<BlackboardArtifactTag> artifactTags = tagsManager.getAllBlackboardArtifactTags();
|
||||
size = artifactTags.size();
|
||||
for (int i = 0; i < size; i++) {
|
||||
if (isCancelled()) {
|
||||
break;
|
||||
}
|
||||
publish(new ProgressWindow.ProgressUpdate(i, size, "refreshing result tags", ""));
|
||||
BlackboardArtifactTag artifactTag = artifactTags.get(i);
|
||||
eventDB.addTag(artifactTag.getContent().getId(), artifactTag.getArtifact().getArtifactID(), artifactTag);
|
||||
}
|
||||
|
||||
LOGGER.log(Level.INFO, "committing tags"); // NON-NLS
|
||||
publish(new ProgressWindow.ProgressUpdate(0, -1, "committing tag changes", ""));
|
||||
if (isCancelled()) {
|
||||
eventDB.rollBackTransaction(trans);
|
||||
} else {
|
||||
eventDB.commitTransaction(trans);
|
||||
}
|
||||
|
||||
populateFilterData(skCase);
|
||||
invalidateCaches();
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* handle intermediate 'results': just update progress dialog
|
||||
*
|
||||
* @param chunks
|
||||
*/
|
||||
@Override
|
||||
protected void process(List<ProgressWindow.ProgressUpdate> chunks) {
|
||||
super.process(chunks);
|
||||
ProgressWindow.ProgressUpdate chunk = chunks.get(chunks.size() - 1);
|
||||
progressDialog.update(chunk);
|
||||
}
|
||||
|
||||
@Override
|
||||
@NbBundle.Messages("msgdlg.tagsproblem.text=There was a problem refreshing the tagged events."
|
||||
+ " Some events may have inacurate tags. See the log for details.")
|
||||
protected void done() {
|
||||
super.done();
|
||||
try {
|
||||
progressDialog.close();
|
||||
get();
|
||||
} catch (CancellationException ex) {
|
||||
LOGGER.log(Level.WARNING, "Database population was cancelled by the user. Not all events may be present or accurate. See the log for details.", ex); // NON-NLS
|
||||
} catch (InterruptedException | ExecutionException ex) {
|
||||
LOGGER.log(Level.WARNING, "Exception while populating database.", ex); // NON-NLS
|
||||
JOptionPane.showMessageDialog(null, Bundle.msgdlg_tagsproblem_text());
|
||||
} catch (Exception ex) {
|
||||
LOGGER.log(Level.WARNING, "Unexpected exception while populating database.", ex); // NON-NLS
|
||||
JOptionPane.showMessageDialog(null, Bundle.msgdlg_tagsproblem_text());
|
||||
}
|
||||
postPopulationOperation.run(); //execute post db population operation
|
||||
}
|
||||
}
|
||||
|
||||
private class DBPopulationWorker extends SwingWorker<Void, ProgressWindow.ProgressUpdate> {
|
||||
@ -256,7 +458,7 @@ public class EventsRepository {
|
||||
"progressWindow.msg.reinit_db=(re)initializing events database",
|
||||
"progressWindow.msg.commitingDb=committing events db"})
|
||||
protected Void doInBackground() throws Exception {
|
||||
process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, -1, Bundle.progressWindow_msg_reinit_db(), "")));
|
||||
publish(new ProgressWindow.ProgressUpdate(0, -1, Bundle.progressWindow_msg_reinit_db(), ""));
|
||||
//reset database
|
||||
//TODO: can we do more incremental updates? -jm
|
||||
eventDB.reInitializeDB();
|
||||
@ -265,7 +467,7 @@ public class EventsRepository {
|
||||
List<Long> files = skCase.findAllFileIdsWhere("name != '.' AND name != '..'");
|
||||
|
||||
final int numFiles = files.size();
|
||||
process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, numFiles, Bundle.progressWindow_msg_populateMacEventsFiles(), "")));
|
||||
publish(new ProgressWindow.ProgressUpdate(0, numFiles, Bundle.progressWindow_msg_populateMacEventsFiles(), ""));
|
||||
|
||||
//insert file events into db
|
||||
int i = 1;
|
||||
@ -290,25 +492,25 @@ public class EventsRepository {
|
||||
String shortDesc = datasourceName + "/" + StringUtils.defaultIfBlank(rootFolder, "");
|
||||
String medD = datasourceName + parentPath;
|
||||
final TskData.FileKnown known = f.getKnown();
|
||||
Set<String> hashSets = f.getHashSetNames() ;
|
||||
boolean tagged = !tagsManager.getContentTagsByContent(f).isEmpty();
|
||||
Set<String> hashSets = f.getHashSetNames();
|
||||
List<ContentTag> tags = tagsManager.getContentTagsByContent(f);
|
||||
|
||||
//insert it into the db if time is > 0 => time is legitimate (drops logical files)
|
||||
if (f.getAtime() > 0) {
|
||||
eventDB.insertEvent(f.getAtime(), FileSystemTypes.FILE_ACCESSED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, tagged, trans);
|
||||
eventDB.insertEvent(f.getAtime(), FileSystemTypes.FILE_ACCESSED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, tags, trans);
|
||||
}
|
||||
if (f.getMtime() > 0) {
|
||||
eventDB.insertEvent(f.getMtime(), FileSystemTypes.FILE_MODIFIED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, tagged, trans);
|
||||
eventDB.insertEvent(f.getMtime(), FileSystemTypes.FILE_MODIFIED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, tags, trans);
|
||||
}
|
||||
if (f.getCtime() > 0) {
|
||||
eventDB.insertEvent(f.getCtime(), FileSystemTypes.FILE_CHANGED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, tagged, trans);
|
||||
eventDB.insertEvent(f.getCtime(), FileSystemTypes.FILE_CHANGED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, tags, trans);
|
||||
}
|
||||
if (f.getCrtime() > 0) {
|
||||
eventDB.insertEvent(f.getCrtime(), FileSystemTypes.FILE_CREATED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, tagged, trans);
|
||||
eventDB.insertEvent(f.getCrtime(), FileSystemTypes.FILE_CREATED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, tags, trans);
|
||||
}
|
||||
|
||||
process(Arrays.asList(new ProgressWindow.ProgressUpdate(i, numFiles,
|
||||
Bundle.progressWindow_msg_populateMacEventsFiles(), f.getName())));
|
||||
publish(new ProgressWindow.ProgressUpdate(i, numFiles,
|
||||
Bundle.progressWindow_msg_populateMacEventsFiles(), f.getName()));
|
||||
}
|
||||
} catch (TskCoreException tskCoreException) {
|
||||
LOGGER.log(Level.WARNING, "failed to insert mac event for file : " + fID, tskCoreException); // NON-NLS
|
||||
@ -329,14 +531,15 @@ public class EventsRepository {
|
||||
}
|
||||
}
|
||||
|
||||
process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, -1, Bundle.progressWindow_msg_commitingDb(), "")));
|
||||
publish(new ProgressWindow.ProgressUpdate(0, -1, Bundle.progressWindow_msg_commitingDb(), ""));
|
||||
|
||||
if (isCancelled()) {
|
||||
eventDB.rollBackTransaction(trans);
|
||||
} else {
|
||||
eventDB.commitTransaction(trans, true);
|
||||
eventDB.commitTransaction(trans);
|
||||
}
|
||||
|
||||
populateFilterMaps(skCase);
|
||||
populateFilterData(skCase);
|
||||
invalidateCaches();
|
||||
|
||||
return null;
|
||||
@ -363,7 +566,7 @@ public class EventsRepository {
|
||||
progressDialog.close();
|
||||
get();
|
||||
} catch (CancellationException ex) {
|
||||
LOGGER.log(Level.INFO, "Database population was cancelled by the user. Not all events may be present or accurate. See the log for details.", ex); // NON-NLS
|
||||
LOGGER.log(Level.WARNING, "Database population was cancelled by the user. Not all events may be present or accurate. See the log for details.", ex); // NON-NLS
|
||||
} catch (InterruptedException | ExecutionException ex) {
|
||||
LOGGER.log(Level.WARNING, "Exception while populating database.", ex); // NON-NLS
|
||||
JOptionPane.showMessageDialog(null, Bundle.msgdlg_problem_text());
|
||||
@ -388,64 +591,32 @@ public class EventsRepository {
|
||||
final ArrayList<BlackboardArtifact> blackboardArtifacts = skCase.getBlackboardArtifacts(type.getArtifactType());
|
||||
final int numArtifacts = blackboardArtifacts.size();
|
||||
|
||||
process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, numArtifacts,
|
||||
Bundle.progressWindow_populatingXevents(type.toString()), "")));
|
||||
for (int i = 0; i < numArtifacts; i++) {
|
||||
publish(new ProgressWindow.ProgressUpdate(i, numArtifacts,
|
||||
Bundle.progressWindow_populatingXevents(type.getDisplayName()), ""));
|
||||
|
||||
int i = 0;
|
||||
for (final BlackboardArtifact bbart : blackboardArtifacts) {
|
||||
//for each artifact, extract the relevant information for the descriptions
|
||||
BlackboardArtifact bbart = blackboardArtifacts.get(i);
|
||||
ArtifactEventType.AttributeEventDescription eventDescription = ArtifactEventType.AttributeEventDescription.buildEventDescription(type, bbart);
|
||||
|
||||
if (eventDescription != null && eventDescription.getTime() > 0L) { //insert it into the db if time is > 0 => time is legitimate
|
||||
long datasourceID = skCase.getContentById(bbart.getObjectID()).getDataSource().getId();
|
||||
|
||||
AbstractFile f = skCase.getAbstractFileById(bbart.getObjectID());
|
||||
//insert it into the db if time is > 0 => time is legitimate
|
||||
if (eventDescription != null && eventDescription.getTime() > 0L) {
|
||||
long objectID = bbart.getObjectID();
|
||||
AbstractFile f = skCase.getAbstractFileById(objectID);
|
||||
long datasourceID = f.getDataSource().getId();
|
||||
long artifactID = bbart.getArtifactID();
|
||||
Set<String> hashSets = f.getHashSetNames();
|
||||
boolean tagged = tagsManager.getBlackboardArtifactTagsByArtifact(bbart).isEmpty() == false;
|
||||
List<BlackboardArtifactTag> tags = tagsManager.getBlackboardArtifactTagsByArtifact(bbart);
|
||||
String fullDescription = eventDescription.getFullDescription();
|
||||
String medDescription = eventDescription.getMedDescription();
|
||||
String shortDescription = eventDescription.getShortDescription();
|
||||
|
||||
eventDB.insertEvent(eventDescription.getTime(), type, datasourceID, bbart.getObjectID(), bbart.getArtifactID(), eventDescription.getFullDescription(), eventDescription.getMedDescription(), eventDescription.getShortDescription(), null, hashSets, tagged, trans);
|
||||
eventDB.insertEvent(eventDescription.getTime(), type, datasourceID, objectID, artifactID, fullDescription, medDescription, shortDescription, null, hashSets, tags, trans);
|
||||
}
|
||||
|
||||
i++;
|
||||
process(Arrays.asList(new ProgressWindow.ProgressUpdate(i, numArtifacts,
|
||||
Bundle.progressWindow_populatingXevents(type), "")));
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
LOGGER.log(Level.SEVERE, "There was a problem getting events with sub type = " + type.toString() + ".", ex); // NON-NLS
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* use the given SleuthkitCase to look up the names for the datasources in
|
||||
* the events table.
|
||||
*
|
||||
* TODO: we could keep a table of id -> name in the eventdb but I am wary of
|
||||
* having too much redundant info.
|
||||
*
|
||||
* @param skCase
|
||||
*/
|
||||
synchronized private void populateFilterMaps(SleuthkitCase skCase) {
|
||||
|
||||
for (Map.Entry<Long, String> hashSet : eventDB.getHashSetNames().entrySet()) {
|
||||
hashSetMap.putIfAbsent(hashSet.getKey(), hashSet.getValue());
|
||||
}
|
||||
//because there is no way to remove a datasource we only add to this map.
|
||||
for (Long id : eventDB.getDataSourceIDs()) {
|
||||
try {
|
||||
datasourcesMap.putIfAbsent(id, skCase.getContentById(id).getDataSource().getName());
|
||||
} catch (TskCoreException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Failed to get datasource by ID.", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
synchronized public Set<Long> markEventsTagged(long objID, Long artifactID, boolean tagged) {
|
||||
Set<Long> updatedEventIDs = eventDB.markEventsTagged(objID, artifactID, tagged);
|
||||
if (!updatedEventIDs.isEmpty()) {
|
||||
aggregateEventsCache.invalidateAll();
|
||||
idToEventCache.invalidateAll(updatedEventIDs);
|
||||
}
|
||||
return updatedEventIDs;
|
||||
}
|
||||
}
|
@ -16,7 +16,7 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.events.db;
|
||||
package org.sleuthkit.autopsy.timeline.db;
|
||||
|
||||
/**
|
||||
*
|
283
Core/src/org/sleuthkit/autopsy/timeline/db/SQLHelper.java
Normal file
283
Core/src/org/sleuthkit/autopsy/timeline/db/SQLHelper.java
Normal file
@ -0,0 +1,283 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2013-15 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.db;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
import javax.annotation.Nonnull;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.RootEventType;
|
||||
import org.sleuthkit.autopsy.timeline.filters.AbstractFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.DataSourceFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.DataSourcesFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.Filter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.HashHitsFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.HashSetFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.HideKnownFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.IntersectionFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.RootFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.TagNameFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.TagsFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.TextFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.TypeFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.UnionFilter;
|
||||
import org.sleuthkit.autopsy.timeline.utils.RangeDivisionInfo;
|
||||
import org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD;
|
||||
import static org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD.FULL;
|
||||
import static org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD.MEDIUM;
|
||||
import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.DAYS;
|
||||
import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.HOURS;
|
||||
import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.MINUTES;
|
||||
import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.MONTHS;
|
||||
import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.SECONDS;
|
||||
import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.YEARS;
|
||||
import org.sleuthkit.datamodel.TskData;
|
||||
|
||||
/**
|
||||
* Static helper methods for converting between java data model objects and
|
||||
* sqlite queries.
|
||||
*/
|
||||
public class SQLHelper {
|
||||
|
||||
static String useHashHitTablesHelper(RootFilter filter) {
|
||||
HashHitsFilter hashHitFilter = filter.getHashHitsFilter();
|
||||
return hashHitFilter.isSelected() && false == hashHitFilter.isDisabled() ? " LEFT JOIN hash_set_hits " : " ";
|
||||
}
|
||||
|
||||
static String useTagTablesHelper(RootFilter filter) {
|
||||
TagsFilter tagsFilter = filter.getTagsFilter();
|
||||
return tagsFilter.isSelected() && false == tagsFilter.isDisabled() ? " LEFT JOIN tags " : " ";
|
||||
}
|
||||
|
||||
/**
|
||||
* take the result of a group_concat SQLite operation and split it into a
|
||||
* set of X using the mapper to to convert from string to X
|
||||
*
|
||||
* @param <X> the type of elements to return
|
||||
* @param groupConcat a string containing the group_concat result ( a comma
|
||||
* separated list)
|
||||
* @param mapper a function from String to X
|
||||
*
|
||||
* @return a Set of X, each element mapped from one element of the original
|
||||
* comma delimited string
|
||||
*/
|
||||
static <X> Set<X> unGroupConcat(String groupConcat, Function<String, X> mapper) {
|
||||
return StringUtils.isBlank(groupConcat) ? Collections.emptySet()
|
||||
: Stream.of(groupConcat.split(","))
|
||||
.map(mapper::apply)
|
||||
.collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
private static String getSQLWhere(IntersectionFilter<?> filter) {
|
||||
return filter.getSubFilters().stream()
|
||||
.filter(Filter::isSelected)
|
||||
.map(SQLHelper::getSQLWhere)
|
||||
.collect(Collectors.joining(" and ", "( ", ")"));
|
||||
}
|
||||
|
||||
private static String getSQLWhere(UnionFilter<?> filter) {
|
||||
return filter.getSubFilters().stream()
|
||||
.filter(Filter::isSelected).map(SQLHelper::getSQLWhere)
|
||||
.collect(Collectors.joining(" or ", "( ", ")"));
|
||||
}
|
||||
|
||||
static String getSQLWhere(RootFilter filter) {
|
||||
return getSQLWhere((IntersectionFilter) filter);
|
||||
}
|
||||
|
||||
private static String getSQLWhere(Filter filter) {
|
||||
String result = "";
|
||||
if (filter == null) {
|
||||
return "1";
|
||||
} else if (filter instanceof TagsFilter) {
|
||||
result = getSQLWhere((TagsFilter) filter);
|
||||
} else if (filter instanceof HashHitsFilter) {
|
||||
result = getSQLWhere((HashHitsFilter) filter);
|
||||
} else if (filter instanceof DataSourceFilter) {
|
||||
result = getSQLWhere((DataSourceFilter) filter);
|
||||
} else if (filter instanceof DataSourcesFilter) {
|
||||
result = getSQLWhere((DataSourcesFilter) filter);
|
||||
} else if (filter instanceof HideKnownFilter) {
|
||||
result = getSQLWhere((HideKnownFilter) filter);
|
||||
} else if (filter instanceof HashHitsFilter) {
|
||||
result = getSQLWhere((HashHitsFilter) filter);
|
||||
} else if (filter instanceof TextFilter) {
|
||||
result = getSQLWhere((TextFilter) filter);
|
||||
} else if (filter instanceof TypeFilter) {
|
||||
result = getSQLWhere((TypeFilter) filter);
|
||||
} else if (filter instanceof IntersectionFilter) {
|
||||
result = getSQLWhere((IntersectionFilter) filter);
|
||||
} else if (filter instanceof UnionFilter) {
|
||||
result = getSQLWhere((UnionFilter) filter);
|
||||
} else {
|
||||
return "1";
|
||||
}
|
||||
result = StringUtils.deleteWhitespace(result).equals("(1and1and1)") ? "1" : result;
|
||||
result = StringUtils.deleteWhitespace(result).equals("()") ? "1" : result;
|
||||
return result;
|
||||
}
|
||||
|
||||
private static String getSQLWhere(HideKnownFilter filter) {
|
||||
if (filter.isSelected()) {
|
||||
return "(known_state IS NOT '" + TskData.FileKnown.KNOWN.getFileKnownValue() + "')"; // NON-NLS
|
||||
} else {
|
||||
return "1";
|
||||
}
|
||||
}
|
||||
|
||||
private static String getSQLWhere(TagsFilter filter) {
|
||||
if (filter.isSelected()
|
||||
&& (false == filter.isDisabled())
|
||||
&& (filter.getSubFilters().isEmpty() == false)) {
|
||||
String tagNameIDs = filter.getSubFilters().stream()
|
||||
.filter((TagNameFilter t) -> t.isSelected() && !t.isDisabled())
|
||||
.map((TagNameFilter t) -> String.valueOf(t.getTagName().getId()))
|
||||
.collect(Collectors.joining(", ", "(", ")"));
|
||||
return "(events.event_id == tags.event_id AND "
|
||||
+ "tags.tag_name_id IN " + tagNameIDs + ") ";
|
||||
} else {
|
||||
return "1";
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static String getSQLWhere(HashHitsFilter filter) {
|
||||
if (filter.isSelected()
|
||||
&& (false == filter.isDisabled())
|
||||
&& (filter.getSubFilters().isEmpty() == false)) {
|
||||
String hashSetIDs = filter.getSubFilters().stream()
|
||||
.filter((HashSetFilter t) -> t.isSelected() && !t.isDisabled())
|
||||
.map((HashSetFilter t) -> String.valueOf(t.getHashSetID()))
|
||||
.collect(Collectors.joining(", ", "(", ")"));
|
||||
return "(hash_set_hits.hash_set_id IN " + hashSetIDs + " AND hash_set_hits.event_id == events.event_id)";
|
||||
} else {
|
||||
return "1";
|
||||
}
|
||||
}
|
||||
|
||||
private static String getSQLWhere(DataSourceFilter filter) {
|
||||
if (filter.isSelected()) {
|
||||
return "(datasource_id = '" + filter.getDataSourceID() + "')";
|
||||
} else {
|
||||
return "1";
|
||||
}
|
||||
}
|
||||
|
||||
private static String getSQLWhere(DataSourcesFilter filter) {
|
||||
return (filter.isSelected()) ? "(datasource_id in ("
|
||||
+ filter.getSubFilters().stream()
|
||||
.filter(AbstractFilter::isSelected)
|
||||
.map((dataSourceFilter) -> String.valueOf(dataSourceFilter.getDataSourceID()))
|
||||
.collect(Collectors.joining(", ")) + "))" : "1";
|
||||
}
|
||||
|
||||
private static String getSQLWhere(TextFilter filter) {
|
||||
if (filter.isSelected()) {
|
||||
if (StringUtils.isBlank(filter.getText())) {
|
||||
return "1";
|
||||
}
|
||||
String strippedFilterText = StringUtils.strip(filter.getText());
|
||||
return "((med_description like '%" + strippedFilterText + "%')"
|
||||
+ " or (full_description like '%" + strippedFilterText + "%')"
|
||||
+ " or (short_description like '%" + strippedFilterText + "%'))";
|
||||
} else {
|
||||
return "1";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* generate a sql where clause for the given type filter, while trying to be
|
||||
* as simple as possible to improve performance.
|
||||
*
|
||||
* @param typeFilter
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
private static String getSQLWhere(TypeFilter typeFilter) {
|
||||
if (typeFilter.isSelected() == false) {
|
||||
return "0";
|
||||
} else if (typeFilter.getEventType() instanceof RootEventType) {
|
||||
if (typeFilter.getSubFilters().stream()
|
||||
.allMatch(subFilter -> subFilter.isSelected() && subFilter.getSubFilters().stream().allMatch(Filter::isSelected))) {
|
||||
return "1"; //then collapse clause to true
|
||||
}
|
||||
}
|
||||
return "(sub_type IN (" + StringUtils.join(getActiveSubTypes(typeFilter), ",") + "))";
|
||||
}
|
||||
|
||||
private static List<Integer> getActiveSubTypes(TypeFilter filter) {
|
||||
if (filter.isSelected()) {
|
||||
if (filter.getSubFilters().isEmpty()) {
|
||||
return Collections.singletonList(RootEventType.allTypes.indexOf(filter.getEventType()));
|
||||
} else {
|
||||
return filter.getSubFilters().stream().flatMap((Filter t) -> getActiveSubTypes((TypeFilter) t).stream()).collect(Collectors.toList());
|
||||
}
|
||||
} else {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* get a sqlite strftime format string that will allow us to group by the
|
||||
* requested period size. That is, with all info more granular that that
|
||||
* requested dropped (replaced with zeros).
|
||||
*
|
||||
* @param info the {@link RangeDivisionInfo} with the requested period size
|
||||
*
|
||||
* @return a String formatted according to the sqlite strftime spec
|
||||
*
|
||||
* @see https://www.sqlite.org/lang_datefunc.html
|
||||
*/
|
||||
static String getStrfTimeFormat(@Nonnull RangeDivisionInfo info) {
|
||||
switch (info.getPeriodSize()) {
|
||||
case YEARS:
|
||||
return "%Y-01-01T00:00:00"; // NON-NLS
|
||||
case MONTHS:
|
||||
return "%Y-%m-01T00:00:00"; // NON-NLS
|
||||
case DAYS:
|
||||
return "%Y-%m-%dT00:00:00"; // NON-NLS
|
||||
case HOURS:
|
||||
return "%Y-%m-%dT%H:00:00"; // NON-NLS
|
||||
case MINUTES:
|
||||
return "%Y-%m-%dT%H:%M:00"; // NON-NLS
|
||||
case SECONDS:
|
||||
default: //seconds - should never happen
|
||||
return "%Y-%m-%dT%H:%M:%S"; // NON-NLS
|
||||
}
|
||||
}
|
||||
|
||||
static String getDescriptionColumn(DescriptionLOD lod) {
|
||||
switch (lod) {
|
||||
case FULL:
|
||||
return "full_description";
|
||||
case MEDIUM:
|
||||
return "med_description";
|
||||
case SHORT:
|
||||
default:
|
||||
return "short_description";
|
||||
}
|
||||
}
|
||||
|
||||
private SQLHelper() {
|
||||
}
|
||||
}
|
@ -18,22 +18,13 @@
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.events;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* Posted to eventbus when a tag as been added to a file artifact that
|
||||
* corresponds to an event
|
||||
* A "local" event published by filteredEventsModel to indicate that the user
|
||||
* requested that the current visualization be refreshed with out changing any
|
||||
* of the parameters ( to include more up to date tag data for example.)
|
||||
* <p>
|
||||
* This event is not intended for use out side of the timeline module.
|
||||
*/
|
||||
public class EventsTaggedEvent {
|
||||
public class RefreshRequestedEvent {
|
||||
|
||||
private final Set<Long> eventIDs;
|
||||
|
||||
public EventsTaggedEvent(Set<Long> eventIDs) {
|
||||
this.eventIDs = eventIDs;
|
||||
}
|
||||
|
||||
public Set<Long> getEventIDs() {
|
||||
return Collections.unmodifiableSet(eventIDs);
|
||||
}
|
||||
}
|
@ -18,23 +18,26 @@
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.events;
|
||||
|
||||
import java.util.Collections;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* Posted to eventbus when a tag as been removed from a file artifact that
|
||||
* corresponds to an event
|
||||
* A "local" event published by filteredEventsModel to indicate that events have
|
||||
* been(un)tagged. This event is not intended for use out side of the timeline
|
||||
* module.
|
||||
*/
|
||||
public class EventsUnTaggedEvent {
|
||||
public class TagsUpdatedEvent {
|
||||
|
||||
private final Set<Long> eventIDs;
|
||||
private final Set<Long> updatedEventIDs;
|
||||
|
||||
public Set<Long> getEventIDs() {
|
||||
return Collections.unmodifiableSet(eventIDs);
|
||||
|
||||
public ImmutableSet<Long> getUpdatedEventIDs() {
|
||||
return ImmutableSet.copyOf(updatedEventIDs);
|
||||
}
|
||||
|
||||
public EventsUnTaggedEvent(Set<Long> eventIDs) {
|
||||
this.eventIDs = eventIDs;
|
||||
}
|
||||
|
||||
public TagsUpdatedEvent(Set<Long> updatedEventIDs) {
|
||||
this.updatedEventIDs = updatedEventIDs;
|
||||
|
||||
}
|
||||
}
|
@ -1,157 +0,0 @@
|
||||
/*
|
||||
* To change this license header, choose License Headers in Project Properties.
|
||||
* To change this template file, choose Tools | Templates
|
||||
* and open the template in the editor.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.events.db;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.sleuthkit.autopsy.timeline.events.type.RootEventType;
|
||||
import org.sleuthkit.autopsy.timeline.filters.AbstractFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.DataSourceFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.DataSourcesFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.Filter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.HashHitsFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.HashSetFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.HideKnownFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.IntersectionFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.RootFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.TextFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.TypeFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.UnionFilter;
|
||||
import org.sleuthkit.datamodel.TskData;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class SQLHelper {
|
||||
|
||||
private static List<Integer> getActiveSubTypes(TypeFilter filter) {
|
||||
if (filter.isSelected()) {
|
||||
if (filter.getSubFilters().isEmpty()) {
|
||||
return Collections.singletonList(RootEventType.allTypes.indexOf(filter.getEventType()));
|
||||
} else {
|
||||
return filter.getSubFilters().stream().flatMap((Filter t) -> getActiveSubTypes((TypeFilter) t).stream()).collect(Collectors.toList());
|
||||
}
|
||||
} else {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
}
|
||||
|
||||
static boolean hasActiveHashFilter(RootFilter filter) {
|
||||
HashHitsFilter hashHitFilter = filter.getHashHitsFilter();
|
||||
return hashHitFilter.isSelected() && false == hashHitFilter.isDisabled();
|
||||
}
|
||||
|
||||
private SQLHelper() {
|
||||
}
|
||||
|
||||
static String getSQLWhere(IntersectionFilter<?> filter) {
|
||||
return filter.getSubFilters().stream().filter(Filter::isSelected).map(SQLHelper::getSQLWhere).collect(Collectors.joining(" and ", "( ", ")"));
|
||||
}
|
||||
|
||||
static String getSQLWhere(UnionFilter<?> filter) {
|
||||
return filter.getSubFilters().stream().filter(Filter::isSelected).map(SQLHelper::getSQLWhere).collect(Collectors.joining(" or ", "( ", ")"));
|
||||
}
|
||||
|
||||
static String getSQLWhere(Filter filter) {
|
||||
String result = "";
|
||||
if (filter == null) {
|
||||
return "1";
|
||||
} else if (filter instanceof HashHitsFilter) {
|
||||
result = getSQLWhere((HashHitsFilter) filter);
|
||||
} else if (filter instanceof DataSourceFilter) {
|
||||
result = getSQLWhere((DataSourceFilter) filter);
|
||||
} else if (filter instanceof DataSourcesFilter) {
|
||||
result = getSQLWhere((DataSourcesFilter) filter);
|
||||
} else if (filter instanceof HideKnownFilter) {
|
||||
result = getSQLWhere((HideKnownFilter) filter);
|
||||
} else if (filter instanceof HashHitsFilter) {
|
||||
result = getSQLWhere((HashHitsFilter) filter);
|
||||
} else if (filter instanceof TextFilter) {
|
||||
result = getSQLWhere((TextFilter) filter);
|
||||
} else if (filter instanceof TypeFilter) {
|
||||
result = getSQLWhere((TypeFilter) filter);
|
||||
} else if (filter instanceof IntersectionFilter) {
|
||||
result = getSQLWhere((IntersectionFilter) filter);
|
||||
} else if (filter instanceof UnionFilter) {
|
||||
result = getSQLWhere((UnionFilter) filter);
|
||||
} else {
|
||||
return "1";
|
||||
}
|
||||
result = StringUtils.deleteWhitespace(result).equals("(1and1and1)") ? "1" : result;
|
||||
result = StringUtils.deleteWhitespace(result).equals("()") ? "1" : result;
|
||||
return result;
|
||||
}
|
||||
|
||||
static String getSQLWhere(HideKnownFilter filter) {
|
||||
if (filter.isSelected()) {
|
||||
return "(known_state IS NOT '" + TskData.FileKnown.KNOWN.getFileKnownValue() + "')"; // NON-NLS
|
||||
} else {
|
||||
return "1";
|
||||
}
|
||||
}
|
||||
|
||||
static String getSQLWhere(HashHitsFilter filter) {
|
||||
if (filter.isSelected()
|
||||
&& (false == filter.isDisabled())
|
||||
&& (filter.getSubFilters().isEmpty() == false)) {
|
||||
return "(hash_set_hits.hash_set_id in " + filter.getSubFilters().stream()
|
||||
.filter((HashSetFilter t) -> t.isSelected() && !t.isDisabled())
|
||||
.map((HashSetFilter t) -> String.valueOf(t.getHashSetID()))
|
||||
.collect(Collectors.joining(", ", "(", ")")) + " and hash_set_hits.event_id == events.event_id)";
|
||||
} else {
|
||||
return "1";
|
||||
}
|
||||
}
|
||||
|
||||
static String getSQLWhere(DataSourceFilter filter) {
|
||||
return (filter.isSelected()) ? "(datasource_id = '" + filter.getDataSourceID() + "')" : "1";
|
||||
}
|
||||
|
||||
static String getSQLWhere(DataSourcesFilter filter) {
|
||||
return (filter.isSelected()) ? "(datasource_id in ("
|
||||
+ filter.getSubFilters().stream()
|
||||
.filter(AbstractFilter::isSelected)
|
||||
.map((dataSourceFilter) -> String.valueOf(dataSourceFilter.getDataSourceID()))
|
||||
.collect(Collectors.joining(", ")) + "))" : "1";
|
||||
}
|
||||
|
||||
static String getSQLWhere(TextFilter filter) {
|
||||
if (filter.isSelected()) {
|
||||
if (StringUtils.isBlank(filter.getText())) {
|
||||
return "1";
|
||||
}
|
||||
String strippedFilterText = StringUtils.strip(filter.getText());
|
||||
return "((med_description like '%" + strippedFilterText + "%')"
|
||||
+ " or (full_description like '%" + strippedFilterText + "%')"
|
||||
+ " or (short_description like '%" + strippedFilterText + "%'))";
|
||||
} else {
|
||||
return "1";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* generate a sql where clause for the given type filter, while trying to be
|
||||
* as simple as possible to improve performance.
|
||||
*
|
||||
* @param typeFilter
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
static String getSQLWhere(TypeFilter typeFilter) {
|
||||
if (typeFilter.isSelected() == false) {
|
||||
return "0";
|
||||
} else if (typeFilter.getEventType() instanceof RootEventType) {
|
||||
if (typeFilter.getSubFilters().stream()
|
||||
.allMatch(subFilter -> subFilter.isSelected() && subFilter.getSubFilters().stream().allMatch(Filter::isSelected))) {
|
||||
return "1"; //then collapse clause to true
|
||||
}
|
||||
}
|
||||
return "(sub_type IN (" + StringUtils.join(getActiveSubTypes(typeFilter), ",") + "))";
|
||||
}
|
||||
|
||||
}
|
@ -36,7 +36,7 @@ import org.sleuthkit.autopsy.datamodel.DisplayableItemNode;
|
||||
import org.sleuthkit.autopsy.datamodel.DisplayableItemNodeVisitor;
|
||||
import org.sleuthkit.autopsy.datamodel.NodeProperty;
|
||||
import org.sleuthkit.autopsy.timeline.TimeLineController;
|
||||
import org.sleuthkit.autopsy.timeline.events.TimeLineEvent;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.TimeLineEvent;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
|
@ -27,9 +27,9 @@ import org.openide.nodes.Children;
|
||||
import org.openide.nodes.Node;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.openide.util.lookup.Lookups;
|
||||
import org.sleuthkit.autopsy.timeline.events.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.events.TimeLineEvent;
|
||||
import org.sleuthkit.autopsy.timeline.events.type.BaseTypes;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.TimeLineEvent;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.BaseTypes;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.datamodel.DisplayableItemNode;
|
||||
|
@ -78,7 +78,7 @@ public abstract class CompoundFilter<SubFilterType extends Filter> extends Abstr
|
||||
}
|
||||
}
|
||||
|
||||
static <SubFilterType extends Filter> boolean hashEqualSubFilters(final CompoundFilter<SubFilterType> oneFilter, final CompoundFilter<SubFilterType> otherFilter) {
|
||||
static <SubFilterType extends Filter> boolean areSubFiltersEqual(final CompoundFilter<SubFilterType> oneFilter, final CompoundFilter<SubFilterType> otherFilter) {
|
||||
if (oneFilter.getSubFilters().size() != otherFilter.getSubFilters().size()) {
|
||||
return false;
|
||||
}
|
||||
|
@ -85,4 +85,5 @@ public class DataSourceFilter extends AbstractFilter {
|
||||
return isSelected() == other.isSelected();
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -18,6 +18,7 @@
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.filters;
|
||||
|
||||
import java.util.Comparator;
|
||||
import java.util.stream.Collectors;
|
||||
import javafx.beans.binding.Bindings;
|
||||
import org.openide.util.NbBundle;
|
||||
@ -38,7 +39,7 @@ public class DataSourcesFilter extends UnionFilter<DataSourceFilter> {
|
||||
filterCopy.setSelected(isSelected());
|
||||
//add a copy of each subfilter
|
||||
this.getSubFilters().forEach((DataSourceFilter t) -> {
|
||||
filterCopy.addDataSourceFilter(t.copyOf());
|
||||
filterCopy.addSubFilter(t.copyOf());
|
||||
});
|
||||
|
||||
return filterCopy;
|
||||
@ -63,13 +64,14 @@ public class DataSourcesFilter extends UnionFilter<DataSourceFilter> {
|
||||
return string;
|
||||
}
|
||||
|
||||
public void addDataSourceFilter(DataSourceFilter dataSourceFilter) {
|
||||
public void addSubFilter(DataSourceFilter dataSourceFilter) {
|
||||
if (getSubFilters().stream().map(DataSourceFilter.class::cast)
|
||||
.map(DataSourceFilter::getDataSourceID)
|
||||
.filter(t -> t == dataSourceFilter.getDataSourceID())
|
||||
.findAny().isPresent() == false) {
|
||||
dataSourceFilter.getDisabledProperty().bind(getDisabledProperty());
|
||||
getSubFilters().add(dataSourceFilter);
|
||||
getSubFilters().sort(Comparator.comparing(DataSourceFilter::getDisplayName));
|
||||
}
|
||||
if (getSubFilters().size() > 1) {
|
||||
setSelected(Boolean.TRUE);
|
||||
@ -90,7 +92,7 @@ public class DataSourcesFilter extends UnionFilter<DataSourceFilter> {
|
||||
return false;
|
||||
}
|
||||
|
||||
return hashEqualSubFilters(this, other);
|
||||
return areSubFiltersEqual(this, other);
|
||||
|
||||
}
|
||||
|
||||
@ -98,5 +100,4 @@ public class DataSourcesFilter extends UnionFilter<DataSourceFilter> {
|
||||
public int hashCode() {
|
||||
return 9;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -5,6 +5,7 @@
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.filters;
|
||||
|
||||
import java.util.Comparator;
|
||||
import java.util.stream.Collectors;
|
||||
import javafx.beans.binding.Bindings;
|
||||
import org.openide.util.NbBundle;
|
||||
@ -31,7 +32,7 @@ public class HashHitsFilter extends UnionFilter<HashSetFilter> {
|
||||
filterCopy.setSelected(isSelected());
|
||||
//add a copy of each subfilter
|
||||
this.getSubFilters().forEach((HashSetFilter t) -> {
|
||||
filterCopy.addHashSetFilter(t.copyOf());
|
||||
filterCopy.addSubFilter(t.copyOf());
|
||||
});
|
||||
return filterCopy;
|
||||
}
|
||||
@ -68,15 +69,16 @@ public class HashHitsFilter extends UnionFilter<HashSetFilter> {
|
||||
return false;
|
||||
}
|
||||
|
||||
return hashEqualSubFilters(this, other);
|
||||
return areSubFiltersEqual(this, other);
|
||||
}
|
||||
|
||||
public void addHashSetFilter(HashSetFilter hashSetFilter) {
|
||||
if (getSubFilters().stream().map(HashSetFilter.class::cast)
|
||||
public void addSubFilter(HashSetFilter hashSetFilter) {
|
||||
if (getSubFilters().stream()
|
||||
.map(HashSetFilter::getHashSetID)
|
||||
.filter(t -> t == hashSetFilter.getHashSetID())
|
||||
.findAny().isPresent() == false) {
|
||||
getSubFilters().add(hashSetFilter);
|
||||
getSubFilters().sort(Comparator.comparing(HashSetFilter::getDisplayName));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -21,7 +21,7 @@ package org.sleuthkit.autopsy.timeline.filters;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Filter for an individual datasource
|
||||
* Filter for an individual hash set
|
||||
*/
|
||||
public class HashSetFilter extends AbstractFilter {
|
||||
|
||||
|
@ -24,7 +24,7 @@ import javafx.collections.FXCollections;
|
||||
import org.openide.util.NbBundle;
|
||||
|
||||
/**
|
||||
* Intersection(And) filter
|
||||
* Intersection (And) filter
|
||||
*/
|
||||
public class IntersectionFilter<S extends Filter> extends CompoundFilter<S> {
|
||||
|
||||
@ -60,7 +60,10 @@ public class IntersectionFilter<S extends Filter> extends CompoundFilter<S> {
|
||||
|
||||
@Override
|
||||
public String getHTMLReportString() {
|
||||
return getSubFilters().stream().filter(Filter::isSelected).map(Filter::getHTMLReportString).collect(Collectors.joining("</li><li>", "<ul><li>", "</li></ul>")); // NON-NLS
|
||||
return getSubFilters().stream()
|
||||
.filter(Filter::isSelected)
|
||||
.map(Filter::getHTMLReportString)
|
||||
.collect(Collectors.joining("</li><li>", "<ul><li>", "</li></ul>")); // NON-NLS
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -26,7 +26,8 @@ import javafx.collections.FXCollections;
|
||||
*/
|
||||
public class RootFilter extends IntersectionFilter<Filter> {
|
||||
|
||||
private final HideKnownFilter knwonFilter;
|
||||
private final HideKnownFilter knownFilter;
|
||||
private final TagsFilter tagsFilter;
|
||||
private final HashHitsFilter hashFilter;
|
||||
private final TextFilter textFilter;
|
||||
private final TypeFilter typeFilter;
|
||||
@ -36,13 +37,20 @@ public class RootFilter extends IntersectionFilter<Filter> {
|
||||
return dataSourcesFilter;
|
||||
}
|
||||
|
||||
public TagsFilter getTagsFilter() {
|
||||
return tagsFilter;
|
||||
}
|
||||
|
||||
public HashHitsFilter getHashHitsFilter() {
|
||||
return hashFilter;
|
||||
}
|
||||
|
||||
public RootFilter(HideKnownFilter knownFilter, HashHitsFilter hashFilter, TextFilter textFilter, TypeFilter typeFilter, DataSourcesFilter dataSourceFilter) {
|
||||
super(FXCollections.observableArrayList(knownFilter, hashFilter, textFilter, dataSourceFilter, typeFilter));
|
||||
this.knwonFilter = knownFilter;
|
||||
public RootFilter(HideKnownFilter knownFilter, TagsFilter tagsFilter, HashHitsFilter hashFilter, TextFilter textFilter, TypeFilter typeFilter, DataSourcesFilter dataSourceFilter) {
|
||||
super(FXCollections.observableArrayList(knownFilter, tagsFilter, hashFilter, textFilter, dataSourceFilter, typeFilter));
|
||||
setSelected(Boolean.TRUE);
|
||||
setDisabled(false);
|
||||
this.knownFilter = knownFilter;
|
||||
this.tagsFilter = tagsFilter;
|
||||
this.hashFilter = hashFilter;
|
||||
this.textFilter = textFilter;
|
||||
this.typeFilter = typeFilter;
|
||||
@ -51,7 +59,7 @@ public class RootFilter extends IntersectionFilter<Filter> {
|
||||
|
||||
@Override
|
||||
public RootFilter copyOf() {
|
||||
RootFilter filter = new RootFilter(knwonFilter.copyOf(), hashFilter.copyOf(), textFilter.copyOf(), typeFilter.copyOf(), dataSourcesFilter.copyOf());
|
||||
RootFilter filter = new RootFilter(knownFilter.copyOf(), tagsFilter.copyOf(), hashFilter.copyOf(), textFilter.copyOf(), typeFilter.copyOf(), dataSourcesFilter.copyOf());
|
||||
filter.setSelected(isSelected());
|
||||
filter.setDisabled(isDisabled());
|
||||
return filter;
|
||||
@ -71,6 +79,6 @@ public class RootFilter extends IntersectionFilter<Filter> {
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
return hashEqualSubFilters(this, (CompoundFilter<Filter>) obj);
|
||||
return areSubFiltersEqual(this, (CompoundFilter<Filter>) obj);
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,89 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2015 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.filters;
|
||||
|
||||
import java.util.Objects;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.services.TagsManager;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.TagName;
|
||||
|
||||
/**
|
||||
* Filter for an individual TagName
|
||||
*/
|
||||
public class TagNameFilter extends AbstractFilter {
|
||||
|
||||
private final TagName tagName;
|
||||
private final Case autoCase;
|
||||
private final TagsManager tagsManager;
|
||||
private final SleuthkitCase sleuthkitCase;
|
||||
|
||||
public TagNameFilter(TagName tagName, Case autoCase) {
|
||||
this.autoCase = autoCase;
|
||||
sleuthkitCase = autoCase.getSleuthkitCase();
|
||||
tagsManager = autoCase.getServices().getTagsManager();
|
||||
this.tagName = tagName;
|
||||
setSelected(Boolean.TRUE);
|
||||
}
|
||||
|
||||
public TagName getTagName() {
|
||||
return tagName;
|
||||
}
|
||||
|
||||
@Override
|
||||
synchronized public TagNameFilter copyOf() {
|
||||
TagNameFilter filterCopy = new TagNameFilter(getTagName(), autoCase);
|
||||
filterCopy.setSelected(isSelected());
|
||||
filterCopy.setDisabled(isDisabled());
|
||||
return filterCopy;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDisplayName() {
|
||||
return tagName.getDisplayName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHTMLReportString() {
|
||||
return getDisplayName() + getStringCheckBox();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int hash = 3;
|
||||
hash = 53 * hash + Objects.hashCode(this.tagName);
|
||||
return hash;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
final TagNameFilter other = (TagNameFilter) obj;
|
||||
if (!Objects.equals(this.tagName, other.tagName)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return isSelected() == other.isSelected();
|
||||
}
|
||||
}
|
@ -0,0 +1,93 @@
|
||||
/*
|
||||
* To change this license header, choose License Headers in Project Properties.
|
||||
* To change this template file, choose Tools | Templates
|
||||
* and open the template in the editor.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.filters;
|
||||
|
||||
import java.util.Comparator;
|
||||
import java.util.stream.Collectors;
|
||||
import javafx.beans.binding.Bindings;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.datamodel.TagName;
|
||||
|
||||
/**
|
||||
* Filter to show only events tag with the tagNames of the selected subfilters.
|
||||
*/
|
||||
public class TagsFilter extends UnionFilter<TagNameFilter> {
|
||||
|
||||
@Override
|
||||
@NbBundle.Messages("tagsFilter.displayName.text=Only Events Tagged")
|
||||
public String getDisplayName() {
|
||||
return Bundle.tagsFilter_displayName_text();
|
||||
}
|
||||
|
||||
public TagsFilter() {
|
||||
getDisabledProperty().bind(Bindings.size(getSubFilters()).lessThan(1));
|
||||
setSelected(false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public TagsFilter copyOf() {
|
||||
TagsFilter filterCopy = new TagsFilter();
|
||||
filterCopy.setSelected(isSelected());
|
||||
//add a copy of each subfilter
|
||||
this.getSubFilters().forEach((TagNameFilter t) -> {
|
||||
filterCopy.addSubFilter(t.copyOf());
|
||||
});
|
||||
return filterCopy;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHTMLReportString() {
|
||||
//move this logic into SaveSnapshot
|
||||
String string = getDisplayName() + getStringCheckBox();
|
||||
if (getSubFilters().isEmpty() == false) {
|
||||
string = string + " : " + getSubFilters().stream()
|
||||
.filter(Filter::isSelected)
|
||||
.map(Filter::getHTMLReportString)
|
||||
.collect(Collectors.joining("</li><li>", "<ul><li>", "</li></ul>")); // NON-NLS
|
||||
}
|
||||
return string;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return 7;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
final TagsFilter other = (TagsFilter) obj;
|
||||
|
||||
if (isSelected() != other.isSelected()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return areSubFiltersEqual(this, other);
|
||||
}
|
||||
|
||||
public void addSubFilter(TagNameFilter tagFilter) {
|
||||
TagName newFilterTagName = tagFilter.getTagName();
|
||||
if (getSubFilters().stream()
|
||||
.map(TagNameFilter::getTagName)
|
||||
.filter(newFilterTagName::equals)
|
||||
.findAny().isPresent() == false) {
|
||||
getSubFilters().add(tagFilter);
|
||||
}
|
||||
getSubFilters().sort(Comparator.comparing(TagNameFilter::getDisplayName));
|
||||
}
|
||||
|
||||
public void removeFilterForTag(TagName tagName) {
|
||||
getSubFilters().removeIf(subfilter -> subfilter.getTagName().equals(tagName));
|
||||
getSubFilters().sort(Comparator.comparing(TagNameFilter::getDisplayName));
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -91,5 +91,4 @@ public class TextFilter extends AbstractFilter {
|
||||
hash = 29 * hash + Objects.hashCode(this.text.get());
|
||||
return hash;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -24,8 +24,8 @@ import javafx.collections.FXCollections;
|
||||
import javafx.scene.image.Image;
|
||||
import javafx.scene.paint.Color;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.timeline.events.type.EventType;
|
||||
import org.sleuthkit.autopsy.timeline.events.type.RootEventType;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.EventType;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.RootEventType;
|
||||
|
||||
/**
|
||||
* Event Type Filter. An instance of TypeFilter is usually a tree that parallels
|
||||
@ -133,7 +133,7 @@ public class TypeFilter extends UnionFilter<TypeFilter> {
|
||||
if (this.eventType != other.eventType) {
|
||||
return false;
|
||||
}
|
||||
return hashEqualSubFilters(this, other);
|
||||
return areSubFiltersEqual(this, other);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -142,5 +142,4 @@ public class TypeFilter extends UnionFilter<TypeFilter> {
|
||||
hash = 67 * hash + Objects.hashCode(this.eventType);
|
||||
return hash;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -33,5 +33,4 @@ abstract public class UnionFilter<SubFilterType extends Filter> extends Compound
|
||||
public UnionFilter() {
|
||||
super(FXCollections.<SubFilterType>observableArrayList());
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -18,9 +18,11 @@
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.ui;
|
||||
|
||||
import com.google.common.eventbus.Subscribe;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.logging.Level;
|
||||
import javafx.beans.InvalidationListener;
|
||||
import javafx.beans.Observable;
|
||||
import javafx.beans.property.ReadOnlyListProperty;
|
||||
@ -48,12 +50,14 @@ import javafx.scene.text.Font;
|
||||
import javafx.scene.text.FontWeight;
|
||||
import javafx.scene.text.Text;
|
||||
import javafx.scene.text.TextAlignment;
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.concurrent.Immutable;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.openide.util.Exceptions;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.timeline.TimeLineController;
|
||||
import org.sleuthkit.autopsy.timeline.TimeLineView;
|
||||
import org.sleuthkit.autopsy.timeline.events.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.events.RefreshRequestedEvent;
|
||||
|
||||
/**
|
||||
* Abstract base class for {@link Chart} based {@link TimeLineView}s used in the
|
||||
@ -169,8 +173,8 @@ public abstract class AbstractVisualization<X, Y, N extends Node, C extends XYCh
|
||||
protected abstract Axis<Y> getYAxis();
|
||||
|
||||
/**
|
||||
* * update this visualization based on current state of zoom /
|
||||
* filters.Primarily this invokes the background {@link Task} returned by
|
||||
* update this visualization based on current state of zoom /
|
||||
* filters. Primarily this invokes the background {@link Task} returned by
|
||||
* {@link #getUpdateTask()} which derived classes must implement.
|
||||
*/
|
||||
synchronized public void update() {
|
||||
@ -191,7 +195,7 @@ public abstract class AbstractVisualization<X, Y, N extends Node, C extends XYCh
|
||||
try {
|
||||
this.hasEvents.set(updateTask.get());
|
||||
} catch (InterruptedException | ExecutionException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
Logger.getLogger(AbstractVisualization.class.getName()).log(Level.SEVERE, "Unexpected exception updating visualization", ex);
|
||||
}
|
||||
break;
|
||||
}
|
||||
@ -203,7 +207,7 @@ public abstract class AbstractVisualization<X, Y, N extends Node, C extends XYCh
|
||||
if (updateTask != null) {
|
||||
updateTask.cancel(true);
|
||||
}
|
||||
this.filteredEvents.getRequestedZoomParamters().removeListener(invalidationListener);
|
||||
this.filteredEvents.zoomParametersProperty().removeListener(invalidationListener);
|
||||
invalidationListener = null;
|
||||
}
|
||||
|
||||
@ -236,10 +240,23 @@ public abstract class AbstractVisualization<X, Y, N extends Node, C extends XYCh
|
||||
}
|
||||
|
||||
@Override
|
||||
synchronized public void setModel(FilteredEventsModel filteredEvents) {
|
||||
synchronized public void setModel(@Nonnull FilteredEventsModel filteredEvents) {
|
||||
|
||||
if (this.filteredEvents != null && this.filteredEvents != filteredEvents) {
|
||||
this.filteredEvents.unRegisterForEvents(this);
|
||||
this.filteredEvents.zoomParametersProperty().removeListener(invalidationListener);
|
||||
}
|
||||
if (this.filteredEvents != filteredEvents) {
|
||||
filteredEvents.registerForEvents(this);
|
||||
filteredEvents.zoomParametersProperty().addListener(invalidationListener);
|
||||
}
|
||||
this.filteredEvents = filteredEvents;
|
||||
|
||||
this.filteredEvents.getRequestedZoomParamters().addListener(invalidationListener);
|
||||
update();
|
||||
}
|
||||
|
||||
@Subscribe
|
||||
public void handleRefreshRequested(RefreshRequestedEvent event) {
|
||||
update();
|
||||
}
|
||||
|
||||
|
@ -6,15 +6,28 @@
|
||||
<?import javafx.scene.layout.*?>
|
||||
|
||||
<fx:root maxHeight="-Infinity" maxWidth="-Infinity" minHeight="-Infinity" minWidth="-Infinity" type="ToolBar" xmlns="http://javafx.com/javafx/8" xmlns:fx="http://javafx.com/fxml/1">
|
||||
<items><Label fx:id="refreshLabel">
|
||||
<graphic><ImageView fitHeight="16.0" fitWidth="16.0" pickOnBounds="true" preserveRatio="true">
|
||||
</ImageView>
|
||||
</graphic></Label><Separator orientation="VERTICAL" /><Region fx:id="spacer" /><Separator orientation="VERTICAL" /><Label fx:id="taskLabel" contentDisplay="RIGHT">
|
||||
<graphic><StackPane>
|
||||
<children><ProgressBar fx:id="progressBar" progress="0.0" /><Label fx:id="messageLabel" />
|
||||
</children></StackPane>
|
||||
</graphic></Label>
|
||||
</items>
|
||||
<items>
|
||||
<Label fx:id="refreshLabel">
|
||||
<graphic>
|
||||
<ImageView fitHeight="16.0" fitWidth="16.0" pickOnBounds="true" preserveRatio="true">
|
||||

|
||||
</ImageView>
|
||||
</graphic>
|
||||
</Label>
|
||||
<Separator orientation="VERTICAL" />
|
||||
<Region fx:id="spacer" maxWidth="1.7976931348623157E308" />
|
||||
<Separator orientation="VERTICAL" />
|
||||
<Label fx:id="taskLabel" contentDisplay="RIGHT">
|
||||
<graphic>
|
||||
<StackPane>
|
||||
<children>
|
||||
<ProgressBar fx:id="progressBar" progress="0.0" />
|
||||
<Label fx:id="messageLabel" />
|
||||
</children>
|
||||
</StackPane>
|
||||
</graphic>
|
||||
</Label>
|
||||
</items>
|
||||
</fx:root>
|
||||
|
@ -27,7 +27,7 @@ import org.openide.nodes.Node;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.timeline.TimeLineController;
|
||||
import org.sleuthkit.autopsy.timeline.TimeLineView;
|
||||
import org.sleuthkit.autopsy.timeline.events.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.explorernodes.EventRootNode;
|
||||
import org.sleuthkit.autopsy.corecomponentinterfaces.DataContent;
|
||||
import org.sleuthkit.autopsy.corecomponents.DataResultPanel;
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2013 Basis Technology Corp.
|
||||
* Copyright 2013-15 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -18,6 +18,7 @@
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.ui;
|
||||
|
||||
import com.google.common.eventbus.Subscribe;
|
||||
import java.net.URL;
|
||||
import java.time.Instant;
|
||||
import java.time.LocalDateTime;
|
||||
@ -45,6 +46,8 @@ import javafx.scene.control.ToggleButton;
|
||||
import javafx.scene.control.ToolBar;
|
||||
import javafx.scene.control.Tooltip;
|
||||
import javafx.scene.effect.Lighting;
|
||||
import javafx.scene.image.Image;
|
||||
import javafx.scene.image.ImageView;
|
||||
import javafx.scene.image.WritableImage;
|
||||
import javafx.scene.input.MouseEvent;
|
||||
import javafx.scene.layout.Background;
|
||||
@ -60,6 +63,7 @@ import javafx.scene.layout.StackPane;
|
||||
import javafx.scene.paint.Color;
|
||||
import javax.annotation.concurrent.GuardedBy;
|
||||
import jfxtras.scene.control.LocalDateTimeTextField;
|
||||
import org.controlsfx.control.NotificationPane;
|
||||
import org.controlsfx.control.RangeSlider;
|
||||
import org.controlsfx.control.action.Action;
|
||||
import org.joda.time.DateTime;
|
||||
@ -75,7 +79,11 @@ import org.sleuthkit.autopsy.timeline.VisualizationMode;
|
||||
import org.sleuthkit.autopsy.timeline.actions.ResetFilters;
|
||||
import org.sleuthkit.autopsy.timeline.actions.SaveSnapshot;
|
||||
import org.sleuthkit.autopsy.timeline.actions.ZoomOut;
|
||||
import org.sleuthkit.autopsy.timeline.events.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.events.TagsUpdatedEvent;
|
||||
import org.sleuthkit.autopsy.timeline.filters.TagsFilter;
|
||||
import static org.sleuthkit.autopsy.timeline.ui.Bundle.VisualizationPanel_refresh;
|
||||
import static org.sleuthkit.autopsy.timeline.ui.Bundle.VisualizationPanel_tagsAddedOrDeleted;
|
||||
import org.sleuthkit.autopsy.timeline.ui.countsview.CountsViewPane;
|
||||
import org.sleuthkit.autopsy.timeline.ui.detailview.DetailViewPane;
|
||||
import org.sleuthkit.autopsy.timeline.ui.detailview.tree.NavPanel;
|
||||
@ -91,27 +99,24 @@ import org.sleuthkit.autopsy.timeline.utils.RangeDivisionInfo;
|
||||
*/
|
||||
public class VisualizationPanel extends BorderPane implements TimeLineView {
|
||||
|
||||
@GuardedBy("this")
|
||||
private LoggedTask<Void> histogramTask;
|
||||
private static final Image INFORMATION = new Image("org/sleuthkit/autopsy/timeline/images/information.png", 16, 16, true, true); // NON-NLS
|
||||
private static final Image REFRESH = new Image("org/sleuthkit/autopsy/timeline/images/arrow-circle-double-135.png"); // NON-NLS
|
||||
|
||||
private static final Logger LOGGER = Logger.getLogger(VisualizationPanel.class.getName());
|
||||
|
||||
@GuardedBy("this")
|
||||
private LoggedTask<Void> histogramTask;
|
||||
|
||||
private final NavPanel navPanel;
|
||||
|
||||
private AbstractVisualization<?, ?, ?, ?> visualization;
|
||||
|
||||
@FXML // ResourceBundle that was given to the FXMLLoader
|
||||
private ResourceBundle resources;
|
||||
|
||||
@FXML // URL location of the FXML file that was given to the FXMLLoader
|
||||
private URL location;
|
||||
|
||||
//// range slider and histogram componenets
|
||||
@FXML // fx:id="histogramBox"
|
||||
protected HBox histogramBox; // Value injected by FXMLLoader
|
||||
@FXML
|
||||
protected HBox histogramBox;
|
||||
|
||||
@FXML // fx:id="rangeHistogramStack"
|
||||
protected StackPane rangeHistogramStack; // Value injected by FXMLLoader
|
||||
@FXML
|
||||
protected StackPane rangeHistogramStack;
|
||||
|
||||
private final RangeSlider rangeSlider = new RangeSlider(0, 1.0, .25, .75);
|
||||
|
||||
@ -167,7 +172,7 @@ public class VisualizationPanel extends BorderPane implements TimeLineView {
|
||||
@FXML
|
||||
private Label endLabel;
|
||||
|
||||
private double preDragPos;
|
||||
private final NotificationPane notificationPane = new NotificationPane();
|
||||
|
||||
protected TimeLineController controller;
|
||||
|
||||
@ -186,14 +191,14 @@ public class VisualizationPanel extends BorderPane implements TimeLineView {
|
||||
|
||||
private final InvalidationListener endListener = (Observable observable) -> {
|
||||
if (endPicker.getLocalDateTime() != null) {
|
||||
controller.pushTimeRange(VisualizationPanel.this.filteredEvents.timeRange().get().withEndMillis(
|
||||
controller.pushTimeRange(VisualizationPanel.this.filteredEvents.timeRangeProperty().get().withEndMillis(
|
||||
ZonedDateTime.of(endPicker.getLocalDateTime(), TimeLineController.getTimeZoneID()).toInstant().toEpochMilli()));
|
||||
}
|
||||
};
|
||||
|
||||
private final InvalidationListener startListener = (Observable observable) -> {
|
||||
if (startPicker.getLocalDateTime() != null) {
|
||||
controller.pushTimeRange(VisualizationPanel.this.filteredEvents.timeRange().get().withStartMillis(
|
||||
controller.pushTimeRange(VisualizationPanel.this.filteredEvents.timeRangeProperty().get().withStartMillis(
|
||||
ZonedDateTime.of(startPicker.getLocalDateTime(), TimeLineController.getTimeZoneID()).toInstant().toEpochMilli()));
|
||||
}
|
||||
};
|
||||
@ -208,6 +213,7 @@ public class VisualizationPanel extends BorderPane implements TimeLineView {
|
||||
}
|
||||
|
||||
@FXML // This method is called by the FXMLLoader when initialization is complete
|
||||
@NbBundle.Messages("VisualizationPanel.refresh=refresh")
|
||||
protected void initialize() {
|
||||
assert endPicker != null : "fx:id=\"endPicker\" was not injected: check your FXML file 'ViewWrapper.fxml'."; // NON-NLS
|
||||
assert histogramBox != null : "fx:id=\"histogramBox\" was not injected: check your FXML file 'ViewWrapper.fxml'."; // NON-NLS
|
||||
@ -216,10 +222,20 @@ public class VisualizationPanel extends BorderPane implements TimeLineView {
|
||||
assert countsToggle != null : "fx:id=\"countsToggle\" was not injected: check your FXML file 'VisToggle.fxml'."; // NON-NLS
|
||||
assert detailsToggle != null : "fx:id=\"eventsToggle\" was not injected: check your FXML file 'VisToggle.fxml'."; // NON-NLS
|
||||
|
||||
visualizationModeLabel.setText(
|
||||
NbBundle.getMessage(this.getClass(), "VisualizationPanel.visualizationModeLabel.text"));
|
||||
startLabel.setText(NbBundle.getMessage(this.getClass(), "VisualizationPanel.startLabel.text"));
|
||||
endLabel.setText(NbBundle.getMessage(this.getClass(), "VisualizationPanel.endLabel.text"));
|
||||
notificationPane.getStyleClass().add(NotificationPane.STYLE_CLASS_DARK);
|
||||
notificationPane.getActions().setAll(new Action(VisualizationPanel_refresh()) {
|
||||
{
|
||||
setGraphic(new ImageView(REFRESH));
|
||||
setEventHandler((ActionEvent t) -> {
|
||||
filteredEvents.refresh();
|
||||
notificationPane.hide();
|
||||
});
|
||||
}
|
||||
});
|
||||
setCenter(notificationPane);
|
||||
visualizationModeLabel.setText(NbBundle.getMessage(VisualizationPanel.class, "VisualizationPanel.visualizationModeLabel.text")); // NON-NLS
|
||||
startLabel.setText(NbBundle.getMessage(VisualizationPanel.class, "VisualizationPanel.startLabel.text")); // NON-NLS
|
||||
endLabel.setText(NbBundle.getMessage(VisualizationPanel.class, "VisualizationPanel.endLabel.text")); // NON-NLS
|
||||
|
||||
HBox.setHgrow(leftSeperator, Priority.ALWAYS);
|
||||
HBox.setHgrow(rightSeperator, Priority.ALWAYS);
|
||||
@ -242,26 +258,12 @@ public class VisualizationPanel extends BorderPane implements TimeLineView {
|
||||
countsToggle.getToggleGroup().selectedToggleProperty().addListener(toggleListener);
|
||||
});
|
||||
}
|
||||
countsToggle.setText(NbBundle.getMessage(this.getClass(), "VisualizationPanel.countsToggle.text"));
|
||||
detailsToggle.setText(NbBundle.getMessage(this.getClass(), "VisualizationPanel.detailsToggle.text"));
|
||||
countsToggle.setText(NbBundle.getMessage(VisualizationPanel.class, "VisualizationPanel.countsToggle.text")); // NON-NLS
|
||||
detailsToggle.setText(NbBundle.getMessage(VisualizationPanel.class, "VisualizationPanel.detailsToggle.text")); // NON-NLS
|
||||
|
||||
//setup rangeslider
|
||||
rangeSlider.setOpacity(.7);
|
||||
rangeSlider.setMin(0);
|
||||
|
||||
// /** this is still needed to not get swamped by low/high value changes.
|
||||
// * https://bitbucket.org/controlsfx/controlsfx/issue/241/rangeslider-high-low-properties
|
||||
// * TODO: committ an appropriate version of this fix to the ControlsFX
|
||||
// * repo on bitbucket, remove this after next release -jm */
|
||||
// Skin<?> skin = rangeSlider.getSkin();
|
||||
// if (skin != null) {
|
||||
// attachDragListener((RangeSliderSkin) skin);
|
||||
// } else {
|
||||
// rangeSlider.skinProperty().addListener((Observable observable) -> {
|
||||
// RangeSliderSkin skin1 = (RangeSliderSkin) rangeSlider.getSkin();
|
||||
// attachDragListener(skin1);
|
||||
// });
|
||||
// }
|
||||
rangeSlider.setBlockIncrement(1);
|
||||
|
||||
rangeHistogramStack.getChildren().add(rangeSlider);
|
||||
@ -275,7 +277,6 @@ public class VisualizationPanel extends BorderPane implements TimeLineView {
|
||||
|
||||
zoomMenuButton.getItems().clear();
|
||||
for (ZoomRanges b : ZoomRanges.values()) {
|
||||
|
||||
MenuItem menuItem = new MenuItem(b.getDisplayName());
|
||||
menuItem.setOnAction((event) -> {
|
||||
if (b != ZoomRanges.ALL) {
|
||||
@ -286,7 +287,7 @@ public class VisualizationPanel extends BorderPane implements TimeLineView {
|
||||
});
|
||||
zoomMenuButton.getItems().add(menuItem);
|
||||
}
|
||||
zoomMenuButton.setText(NbBundle.getMessage(this.getClass(), "VisualizationPanel.zoomMenuButton.text"));
|
||||
zoomMenuButton.setText(NbBundle.getMessage(VisualizationPanel.class, "VisualizationPanel.zoomMenuButton.text")); // NON-NLS
|
||||
|
||||
zoomOutButton.setOnAction(e -> {
|
||||
controller.pushZoomOutTime();
|
||||
@ -307,69 +308,14 @@ public class VisualizationPanel extends BorderPane implements TimeLineView {
|
||||
new SaveSnapshot(controller, snapshot).handle(event);
|
||||
});
|
||||
|
||||
snapShotButton.setText(NbBundle.getMessage(this.getClass(), "VisualizationPanel.snapShotButton.text"));
|
||||
snapShotButton.setText(NbBundle.getMessage(VisualizationPanel.class, "VisualizationPanel.snapShotButton.text")); // NON-NLS
|
||||
}
|
||||
|
||||
// /**
|
||||
// * TODO: committed an appropriate version of this fix to the ControlsFX repo
|
||||
// * on bitbucket, remove this after next release -jm
|
||||
// *
|
||||
// * @param skin
|
||||
// */
|
||||
// private void attachDragListener(RangeSliderSkin skin) {
|
||||
// if (skin != null) {
|
||||
// for (Node n : skin.getChildren()) {
|
||||
// if (n.getStyleClass().contains("track")) {
|
||||
// n.setOpacity(.3);
|
||||
// }
|
||||
// if (n.getStyleClass().contains("range-bar")) {
|
||||
// StackPane rangeBar = (StackPane) n;
|
||||
// rangeBar.setOnMousePressed((MouseEvent e) -> {
|
||||
// rangeBar.requestFocus();
|
||||
// preDragPos = e.getX();
|
||||
// });
|
||||
//
|
||||
// //don't mark as not changing until mouse is released
|
||||
// rangeBar.setOnMouseReleased((MouseEvent event) -> {
|
||||
// rangeSlider.setLowValueChanging(false);
|
||||
// rangeSlider.setHighValueChanging(false);
|
||||
// });
|
||||
// rangeBar.setOnMouseDragged((MouseEvent event) -> {
|
||||
// final double min = rangeSlider.getMin();
|
||||
// final double max = rangeSlider.getMax();
|
||||
//
|
||||
// ///!!! compensate for range and width so that rangebar actualy stays with the slider
|
||||
// double delta = (event.getX() - preDragPos) * (max - min) / rangeSlider.
|
||||
// getWidth();
|
||||
// ////////////////////////////////////////////////////
|
||||
//
|
||||
// final double lowValue = rangeSlider.getLowValue();
|
||||
// final double newLowValue = Math.min(Math.max(min, lowValue + delta),
|
||||
// max);
|
||||
// final double highValue = rangeSlider.getHighValue();
|
||||
// final double newHighValue = Math.min(Math.max(min, highValue + delta),
|
||||
// max);
|
||||
//
|
||||
// if (newLowValue <= min || newHighValue >= max) {
|
||||
// return;
|
||||
// }
|
||||
//
|
||||
// rangeSlider.setLowValueChanging(true);
|
||||
// rangeSlider.setHighValueChanging(true);
|
||||
// rangeSlider.setLowValue(newLowValue);
|
||||
// rangeSlider.setHighValue(newHighValue);
|
||||
// });
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
@Override
|
||||
public synchronized void setController(TimeLineController controller) {
|
||||
this.controller = controller;
|
||||
setModel(controller.getEventsModel());
|
||||
|
||||
setViewMode(controller.getViewMode().get());
|
||||
|
||||
controller.getNeedsHistogramRebuild().addListener((ObservableValue<? extends Boolean> observable, Boolean oldValue, Boolean newValue) -> {
|
||||
if (newValue) {
|
||||
refreshHistorgram();
|
||||
@ -379,9 +325,29 @@ public class VisualizationPanel extends BorderPane implements TimeLineView {
|
||||
controller.getViewMode().addListener((ObservableValue<? extends VisualizationMode> ov, VisualizationMode t, VisualizationMode t1) -> {
|
||||
setViewMode(t1);
|
||||
});
|
||||
TimeLineController.getTimeZone().addListener(timeRangeInvalidationListener);
|
||||
refreshHistorgram();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setModel(FilteredEventsModel filteredEvents) {
|
||||
if (this.filteredEvents != null && this.filteredEvents != filteredEvents) {
|
||||
this.filteredEvents.unRegisterForEvents(this);
|
||||
this.filteredEvents.timeRangeProperty().removeListener(timeRangeInvalidationListener);
|
||||
this.filteredEvents.zoomParametersProperty().removeListener(zoomListener);
|
||||
}
|
||||
if (this.filteredEvents != filteredEvents) {
|
||||
filteredEvents.registerForEvents(this);
|
||||
filteredEvents.timeRangeProperty().addListener(timeRangeInvalidationListener);
|
||||
filteredEvents.zoomParametersProperty().addListener(zoomListener);
|
||||
}
|
||||
|
||||
this.filteredEvents = filteredEvents;
|
||||
|
||||
refreshTimeUI(filteredEvents.timeRangeProperty().get());
|
||||
|
||||
}
|
||||
|
||||
private void setViewMode(VisualizationMode visualizationMode) {
|
||||
switch (visualizationMode) {
|
||||
case COUNTS:
|
||||
@ -407,29 +373,40 @@ public class VisualizationPanel extends BorderPane implements TimeLineView {
|
||||
toolBar.getItems().addAll(newViz.getSettingsNodes());
|
||||
|
||||
visualization.setController(controller);
|
||||
setCenter(visualization);
|
||||
notificationPane.setContent(visualization);
|
||||
if (visualization instanceof DetailViewPane) {
|
||||
navPanel.setChart((DetailViewPane) visualization);
|
||||
}
|
||||
visualization.hasEvents.addListener((ObservableValue<? extends Boolean> observable, Boolean oldValue, Boolean newValue) -> {
|
||||
if (newValue == false) {
|
||||
|
||||
setCenter(new StackPane(visualization, new Region() {
|
||||
notificationPane.setContent(new StackPane(visualization, new Region() {
|
||||
{
|
||||
setBackground(new Background(new BackgroundFill(Color.GREY, CornerRadii.EMPTY, Insets.EMPTY)));
|
||||
setOpacity(.3);
|
||||
}
|
||||
}, new NoEventsDialog(() -> {
|
||||
setCenter(visualization);
|
||||
notificationPane.setContent(visualization);
|
||||
})));
|
||||
} else {
|
||||
setCenter(visualization);
|
||||
notificationPane.setContent(visualization);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Subscribe
|
||||
@NbBundle.Messages("VisualizationPanel.tagsAddedOrDeleted=Tags have been created and/or deleted. The visualization may not be up to date.")
|
||||
public void handleTimeLineTagEvent(TagsUpdatedEvent event) {
|
||||
TagsFilter tagsFilter = filteredEvents.getFilter().getTagsFilter();
|
||||
if (tagsFilter.isSelected() && tagsFilter.isDisabled() == false) {
|
||||
Platform.runLater(() -> {
|
||||
notificationPane.show(VisualizationPanel_tagsAddedOrDeleted(), new ImageView(INFORMATION));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
synchronized private void refreshHistorgram() {
|
||||
|
||||
if (histogramTask != null) {
|
||||
@ -437,12 +414,12 @@ public class VisualizationPanel extends BorderPane implements TimeLineView {
|
||||
}
|
||||
|
||||
histogramTask = new LoggedTask<Void>(
|
||||
NbBundle.getMessage(this.getClass(), "VisualizationPanel.histogramTask.title"), true) {
|
||||
NbBundle.getMessage(VisualizationPanel.class, "VisualizationPanel.histogramTask.title"), true) { // NON-NLS
|
||||
|
||||
@Override
|
||||
protected Void call() throws Exception {
|
||||
|
||||
updateMessage(NbBundle.getMessage(this.getClass(), "VisualizationPanel.histogramTask.preparing"));
|
||||
updateMessage(NbBundle.getMessage(VisualizationPanel.class, "VisualizationPanel.histogramTask.preparing")); // NON-NLS
|
||||
|
||||
long max = 0;
|
||||
final RangeDivisionInfo rangeInfo = RangeDivisionInfo.getRangeDivisionInfo(filteredEvents.getSpanningInterval());
|
||||
@ -455,7 +432,7 @@ public class VisualizationPanel extends BorderPane implements TimeLineView {
|
||||
|
||||
//clear old data, and reset ranges and series
|
||||
Platform.runLater(() -> {
|
||||
updateMessage(NbBundle.getMessage(this.getClass(), "VisualizationPanel.histogramTask.resetUI"));
|
||||
updateMessage(NbBundle.getMessage(VisualizationPanel.class, "VisualizationPanel.histogramTask.resetUI")); // NON-NLS
|
||||
|
||||
});
|
||||
|
||||
@ -472,7 +449,7 @@ public class VisualizationPanel extends BorderPane implements TimeLineView {
|
||||
|
||||
start = end;
|
||||
|
||||
updateMessage(NbBundle.getMessage(this.getClass(), "VisualizationPanel.histogramTask.queryDb"));
|
||||
updateMessage(NbBundle.getMessage(VisualizationPanel.class, "VisualizationPanel.histogramTask.queryDb")); // NON-NLS
|
||||
//query for current range
|
||||
long count = filteredEvents.getEventCounts(interval).values().stream().mapToLong(Long::valueOf).sum();
|
||||
bins.add(count);
|
||||
@ -482,7 +459,7 @@ public class VisualizationPanel extends BorderPane implements TimeLineView {
|
||||
final double fMax = Math.log(max);
|
||||
final ArrayList<Long> fbins = new ArrayList<>(bins);
|
||||
Platform.runLater(() -> {
|
||||
updateMessage(NbBundle.getMessage(this.getClass(), "VisualizationPanel.histogramTask.updateUI2"));
|
||||
updateMessage(NbBundle.getMessage(VisualizationPanel.class, "VisualizationPanel.histogramTask.updateUI2")); // NON-NLS
|
||||
|
||||
histogramBox.getChildren().clear();
|
||||
|
||||
@ -514,18 +491,13 @@ public class VisualizationPanel extends BorderPane implements TimeLineView {
|
||||
controller.monitorTask(histogramTask);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setModel(FilteredEventsModel filteredEvents) {
|
||||
this.filteredEvents = filteredEvents;
|
||||
private InvalidationListener timeRangeInvalidationListener = (Observable observable) -> {
|
||||
refreshTimeUI(filteredEvents.timeRangeProperty().get());
|
||||
};
|
||||
|
||||
refreshTimeUI(filteredEvents.timeRange().get());
|
||||
this.filteredEvents.timeRange().addListener((Observable observable) -> {
|
||||
refreshTimeUI(filteredEvents.timeRange().get());
|
||||
});
|
||||
TimeLineController.getTimeZone().addListener((Observable observable) -> {
|
||||
refreshTimeUI(filteredEvents.timeRange().get());
|
||||
});
|
||||
}
|
||||
private InvalidationListener zoomListener = (Observable observable) -> {
|
||||
notificationPane.hide();
|
||||
};
|
||||
|
||||
private void refreshTimeUI(Interval interval) {
|
||||
RangeDivisionInfo rangeDivisionInfo = RangeDivisionInfo.getRangeDivisionInfo(filteredEvents.getSpanningInterval());
|
||||
@ -589,9 +561,8 @@ public class VisualizationPanel extends BorderPane implements TimeLineView {
|
||||
assert dismissButton != null : "fx:id=\"dismissButton\" was not injected: check your FXML file 'NoEventsDialog.fxml'."; // NON-NLS
|
||||
assert zoomButton != null : "fx:id=\"zoomButton\" was not injected: check your FXML file 'NoEventsDialog.fxml'."; // NON-NLS
|
||||
|
||||
noEventsDialogLabel.setText(
|
||||
NbBundle.getMessage(this.getClass(), "VisualizationPanel.noEventsDialogLabel.text"));
|
||||
zoomButton.setText(NbBundle.getMessage(this.getClass(), "VisualizationPanel.zoomButton.text"));
|
||||
noEventsDialogLabel.setText(NbBundle.getMessage(NoEventsDialog.class, "VisualizationPanel.noEventsDialogLabel.text")); // NON-NLS
|
||||
zoomButton.setText(NbBundle.getMessage(NoEventsDialog.class, "VisualizationPanel.zoomButton.text")); // NON-NLS
|
||||
|
||||
Action zoomOutAction = new ZoomOut(controller);
|
||||
zoomButton.setOnAction(zoomOutAction);
|
||||
@ -603,8 +574,7 @@ public class VisualizationPanel extends BorderPane implements TimeLineView {
|
||||
Action defaultFiltersAction = new ResetFilters(controller);
|
||||
resetFiltersButton.setOnAction(defaultFiltersAction);
|
||||
resetFiltersButton.disableProperty().bind(defaultFiltersAction.disabledProperty());
|
||||
resetFiltersButton.setText(
|
||||
NbBundle.getMessage(this.getClass(), "VisualizationPanel.resetFiltersButton.text"));
|
||||
resetFiltersButton.setText(NbBundle.getMessage(NoEventsDialog.class, "VisualizationPanel.resetFiltersButton.text")); // NON-NLS
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -38,7 +38,13 @@ import javafx.scene.chart.CategoryAxis;
|
||||
import javafx.scene.chart.NumberAxis;
|
||||
import javafx.scene.chart.StackedBarChart;
|
||||
import javafx.scene.chart.XYChart;
|
||||
import javafx.scene.control.*;
|
||||
import javafx.scene.control.ContextMenu;
|
||||
import javafx.scene.control.Label;
|
||||
import javafx.scene.control.MenuItem;
|
||||
import javafx.scene.control.RadioButton;
|
||||
import javafx.scene.control.SeparatorMenuItem;
|
||||
import javafx.scene.control.ToggleGroup;
|
||||
import javafx.scene.control.Tooltip;
|
||||
import javafx.scene.effect.DropShadow;
|
||||
import javafx.scene.effect.Effect;
|
||||
import javafx.scene.effect.Lighting;
|
||||
@ -64,9 +70,9 @@ import org.sleuthkit.autopsy.timeline.TimeLineView;
|
||||
import org.sleuthkit.autopsy.timeline.VisualizationMode;
|
||||
import org.sleuthkit.autopsy.timeline.actions.Back;
|
||||
import org.sleuthkit.autopsy.timeline.actions.Forward;
|
||||
import org.sleuthkit.autopsy.timeline.events.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.events.type.EventType;
|
||||
import org.sleuthkit.autopsy.timeline.events.type.RootEventType;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.EventType;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.RootEventType;
|
||||
import org.sleuthkit.autopsy.timeline.ui.AbstractVisualization;
|
||||
import org.sleuthkit.autopsy.timeline.utils.RangeDivisionInfo;
|
||||
|
||||
@ -141,7 +147,7 @@ public class CountsViewPane extends AbstractVisualization<String, Number, Node,
|
||||
setCursor(Cursor.WAIT);
|
||||
});
|
||||
|
||||
final RangeDivisionInfo rangeInfo = RangeDivisionInfo.getRangeDivisionInfo(filteredEvents.timeRange().get());
|
||||
final RangeDivisionInfo rangeInfo = RangeDivisionInfo.getRangeDivisionInfo(filteredEvents.timeRangeProperty().get());
|
||||
chart.setRangeInfo(rangeInfo);
|
||||
//extend range to block bounderies (ie day, month, year)
|
||||
final long lowerBound = rangeInfo.getLowerBound();
|
||||
@ -304,6 +310,9 @@ public class CountsViewPane extends AbstractVisualization<String, Number, Node,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
protected NumberAxis getYAxis() {
|
||||
return countAxis;
|
||||
|
@ -36,7 +36,7 @@ import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.timeline.TimeLineController;
|
||||
import org.sleuthkit.autopsy.timeline.actions.Back;
|
||||
import org.sleuthkit.autopsy.timeline.actions.Forward;
|
||||
import org.sleuthkit.autopsy.timeline.events.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.ui.TimeLineChart;
|
||||
import org.sleuthkit.autopsy.timeline.utils.RangeDivisionInfo;
|
||||
|
||||
@ -127,7 +127,7 @@ class EventCountsChart extends StackedBarChart<String, Number> implements TimeLi
|
||||
|
||||
@Override
|
||||
public void setModel(FilteredEventsModel filteredEvents) {
|
||||
filteredEvents.getRequestedZoomParamters().addListener(o -> {
|
||||
filteredEvents.zoomParametersProperty().addListener(o -> {
|
||||
clearIntervalSelector();
|
||||
controller.selectEventIDs(Collections.emptyList());
|
||||
});
|
||||
|
@ -18,7 +18,6 @@
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.ui.detailview;
|
||||
|
||||
import com.google.common.eventbus.Subscribe;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@ -63,21 +62,16 @@ import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.coreutils.ColorUtilities;
|
||||
import org.sleuthkit.autopsy.coreutils.LoggedTask;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.ThreadConfined;
|
||||
import org.sleuthkit.autopsy.timeline.TimeLineController;
|
||||
import org.sleuthkit.autopsy.timeline.events.AggregateEvent;
|
||||
import org.sleuthkit.autopsy.timeline.events.EventsTaggedEvent;
|
||||
import org.sleuthkit.autopsy.timeline.events.EventsUnTaggedEvent;
|
||||
import org.sleuthkit.autopsy.timeline.events.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.events.TimeLineEvent;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.AggregateEvent;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.TimeLineEvent;
|
||||
import org.sleuthkit.autopsy.timeline.filters.RootFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.TextFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.TypeFilter;
|
||||
import org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD;
|
||||
import org.sleuthkit.autopsy.timeline.zooming.ZoomParams;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifactTag;
|
||||
import org.sleuthkit.datamodel.ContentTag;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
@ -302,27 +296,8 @@ public class AggregateEventNode extends StackPane {
|
||||
|
||||
Map<String, Long> tagCounts = new HashMap<>();
|
||||
if (!aggEvent.getEventIDsWithTags().isEmpty()) {
|
||||
try {
|
||||
for (TimeLineEvent tle : eventsModel.getEventsById(aggEvent.getEventIDsWithTags())) {
|
||||
tagCounts.putAll( eventsModel.getTagCountsByTagName(aggEvent.getEventIDsWithTags()));
|
||||
|
||||
AbstractFile abstractFileById = sleuthkitCase.getAbstractFileById(tle.getFileID());
|
||||
List<ContentTag> contentTags = sleuthkitCase.getContentTagsByContent(abstractFileById);
|
||||
for (ContentTag tag : contentTags) {
|
||||
tagCounts.merge(tag.getName().getDisplayName(), 1l, Long::sum);
|
||||
}
|
||||
|
||||
Long artifactID = tle.getArtifactID();
|
||||
if (artifactID != 0) {
|
||||
BlackboardArtifact blackboardArtifact = sleuthkitCase.getBlackboardArtifact(artifactID);
|
||||
List<BlackboardArtifactTag> artifactTags = sleuthkitCase.getBlackboardArtifactTagsByArtifact(blackboardArtifact);
|
||||
for (BlackboardArtifactTag tag : artifactTags) {
|
||||
tagCounts.merge(tag.getName().getDisplayName(), 1l, Long::sum);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Error getting tag info for event.", ex);
|
||||
}
|
||||
}
|
||||
|
||||
String hashSetCountsString = hashSetCounts.entrySet().stream()
|
||||
@ -375,6 +350,7 @@ public class AggregateEventNode extends StackPane {
|
||||
/**
|
||||
* @param descrVis the level of description that should be displayed
|
||||
*/
|
||||
@ThreadConfined(type = ThreadConfined.ThreadType.JFX)
|
||||
synchronized final void setDescriptionVisibility(DescriptionVisibility descrVis) {
|
||||
this.descrVis = descrVis;
|
||||
final int size = aggEvent.getEventIDs().size();
|
||||
@ -469,7 +445,7 @@ public class AggregateEventNode extends StackPane {
|
||||
chart.setRequiresLayout(true);
|
||||
chart.requestChartLayout();
|
||||
} else {
|
||||
RootFilter combinedFilter = eventsModel.filter().get().copyOf();
|
||||
RootFilter combinedFilter = eventsModel.filterProperty().get().copyOf();
|
||||
//make a new filter intersecting the global filter with text(description) and type filters to restrict sub-clusters
|
||||
combinedFilter.getSubFilters().addAll(new TextFilter(aggEvent.getDescription()),
|
||||
new TypeFilter(aggEvent.getType()));
|
||||
@ -485,7 +461,7 @@ public class AggregateEventNode extends StackPane {
|
||||
protected List<AggregateEventNode> call() throws Exception {
|
||||
//query for the sub-clusters
|
||||
List<AggregateEvent> aggregatedEvents = eventsModel.getAggregatedEvents(new ZoomParams(span,
|
||||
eventsModel.eventTypeZoom().get(),
|
||||
eventsModel.eventTypeZoomProperty().get(),
|
||||
combinedFilter,
|
||||
newDescriptionLOD));
|
||||
//for each sub cluster make an AggregateEventNode to visually represent it, and set x-position
|
||||
@ -544,30 +520,4 @@ public class AggregateEventNode extends StackPane {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
synchronized void handleEventsUnTagged(EventsUnTaggedEvent tagEvent) {
|
||||
AggregateEvent withTagsRemoved = aggEvent.withTagsRemoved(tagEvent.getEventIDs());
|
||||
if (withTagsRemoved != aggEvent) {
|
||||
aggEvent = withTagsRemoved;
|
||||
tooltip = null;
|
||||
boolean hasTags = aggEvent.getEventIDsWithTags().isEmpty() == false;
|
||||
Platform.runLater(() -> {
|
||||
tagIV.setManaged(hasTags);
|
||||
tagIV.setVisible(hasTags);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@Subscribe
|
||||
synchronized void handleEventsTagged(EventsTaggedEvent tagEvent) {
|
||||
AggregateEvent withTagsAdded = aggEvent.withTagsAdded(tagEvent.getEventIDs());
|
||||
if (withTagsAdded != aggEvent) {
|
||||
aggEvent = withTagsAdded;
|
||||
tooltip = null;
|
||||
Platform.runLater(() -> {
|
||||
tagIV.setManaged(true);
|
||||
tagIV.setVisible(true);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -67,11 +67,12 @@ import org.joda.time.DateTime;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.coreutils.LoggedTask;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.ThreadConfined;
|
||||
import org.sleuthkit.autopsy.timeline.FXMLConstructor;
|
||||
import org.sleuthkit.autopsy.timeline.TimeLineController;
|
||||
import org.sleuthkit.autopsy.timeline.events.AggregateEvent;
|
||||
import org.sleuthkit.autopsy.timeline.events.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.events.type.EventType;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.AggregateEvent;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.EventType;
|
||||
import org.sleuthkit.autopsy.timeline.ui.AbstractVisualization;
|
||||
import org.sleuthkit.autopsy.timeline.ui.countsview.CountsViewPane;
|
||||
import org.sleuthkit.autopsy.timeline.ui.detailview.tree.NavTreeNode;
|
||||
@ -220,6 +221,11 @@ public class DetailViewPane extends AbstractVisualization<DateTime, AggregateEve
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void setModel(FilteredEventsModel filteredEvents) {
|
||||
super.setModel(filteredEvents);
|
||||
}
|
||||
|
||||
private void incrementScrollValue(int factor) {
|
||||
vertScrollBar.valueProperty().set(Math.max(0, Math.min(100, vertScrollBar.getValue() + factor * (chart.getHeight() / chart.getMaxVScroll().get()))));
|
||||
}
|
||||
@ -272,6 +278,7 @@ public class DetailViewPane extends AbstractVisualization<DateTime, AggregateEve
|
||||
* @return a Series object to contain all the events with the given
|
||||
* EventType
|
||||
*/
|
||||
@ThreadConfined(type = ThreadConfined.ThreadType.JFX)
|
||||
private XYChart.Series<DateTime, AggregateEvent> getSeries(final EventType et) {
|
||||
XYChart.Series<DateTime, AggregateEvent> series = eventTypeToSeriesMap.get(et);
|
||||
if (series == null) {
|
||||
@ -302,7 +309,7 @@ public class DetailViewPane extends AbstractVisualization<DateTime, AggregateEve
|
||||
updateProgress(-1, 1);
|
||||
updateMessage(NbBundle.getMessage(this.getClass(), "DetailViewPane.loggedTask.preparing"));
|
||||
|
||||
final RangeDivisionInfo rangeInfo = RangeDivisionInfo.getRangeDivisionInfo(filteredEvents.timeRange().get());
|
||||
final RangeDivisionInfo rangeInfo = RangeDivisionInfo.getRangeDivisionInfo(filteredEvents.timeRangeProperty().get());
|
||||
final long lowerBound = rangeInfo.getLowerBound();
|
||||
final long upperBound = rangeInfo.getUpperBound();
|
||||
|
||||
@ -315,9 +322,6 @@ public class DetailViewPane extends AbstractVisualization<DateTime, AggregateEve
|
||||
}
|
||||
dateAxis.setLowerBound(new DateTime(lowerBound, TimeLineController.getJodaTimeZone()));
|
||||
dateAxis.setUpperBound(new DateTime(upperBound, TimeLineController.getJodaTimeZone()));
|
||||
// if (chart == null) {
|
||||
// initializeClusterChart();
|
||||
// }
|
||||
vertScrollBar.setValue(0);
|
||||
eventTypeToSeriesMap.clear();
|
||||
dataSets.clear();
|
||||
@ -478,4 +482,5 @@ public class DetailViewPane extends AbstractVisualization<DateTime, AggregateEve
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -22,7 +22,7 @@ import java.util.Collections;
|
||||
import java.util.List;
|
||||
import javafx.scene.chart.Axis;
|
||||
import javafx.scene.chart.XYChart;
|
||||
import org.sleuthkit.autopsy.timeline.events.AggregateEvent;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.AggregateEvent;
|
||||
|
||||
/**
|
||||
* No-Op axis that doesn't do anything usefull but is necessary to pass
|
||||
|
@ -19,7 +19,6 @@
|
||||
package org.sleuthkit.autopsy.timeline.ui.detailview;
|
||||
|
||||
import com.google.common.collect.Collections2;
|
||||
import com.google.common.eventbus.Subscribe;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
@ -72,11 +71,9 @@ import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.timeline.TimeLineController;
|
||||
import org.sleuthkit.autopsy.timeline.actions.Back;
|
||||
import org.sleuthkit.autopsy.timeline.actions.Forward;
|
||||
import org.sleuthkit.autopsy.timeline.events.AggregateEvent;
|
||||
import org.sleuthkit.autopsy.timeline.events.EventsTaggedEvent;
|
||||
import org.sleuthkit.autopsy.timeline.events.EventsUnTaggedEvent;
|
||||
import org.sleuthkit.autopsy.timeline.events.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.events.type.EventType;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.AggregateEvent;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.EventType;
|
||||
import org.sleuthkit.autopsy.timeline.ui.TimeLineChart;
|
||||
|
||||
/**
|
||||
@ -344,12 +341,9 @@ public final class EventDetailChart extends XYChart<DateTime, AggregateEvent> im
|
||||
|
||||
@Override
|
||||
public void setModel(FilteredEventsModel filteredEvents) {
|
||||
if (this.filteredEvents != null) {
|
||||
this.filteredEvents.unRegisterForEvents(this);
|
||||
}
|
||||
|
||||
if (this.filteredEvents != filteredEvents) {
|
||||
filteredEvents.registerForEvents(this);
|
||||
filteredEvents.getRequestedZoomParamters().addListener(o -> {
|
||||
filteredEvents.zoomParametersProperty().addListener(o -> {
|
||||
clearGuideLine();
|
||||
clearIntervalSelector();
|
||||
|
||||
@ -527,7 +521,7 @@ public final class EventDetailChart extends XYChart<DateTime, AggregateEvent> im
|
||||
return nodes;
|
||||
}
|
||||
|
||||
private Iterable<AggregateEventNode> getAllNodes() {
|
||||
Iterable<AggregateEventNode> getAllNodes() {
|
||||
return getNodes(x -> true);
|
||||
}
|
||||
|
||||
@ -736,18 +730,4 @@ public final class EventDetailChart extends XYChart<DateTime, AggregateEvent> im
|
||||
protected void requestChartLayout() {
|
||||
super.requestChartLayout();
|
||||
}
|
||||
|
||||
@Subscribe
|
||||
synchronized public void handleEventsUnTagged(EventsUnTaggedEvent tagEvent) {
|
||||
for (AggregateEventNode t : getAllNodes()) {
|
||||
t.handleEventsUnTagged(tagEvent);
|
||||
}
|
||||
}
|
||||
|
||||
@Subscribe
|
||||
synchronized public void handleEventsTagged(EventsTaggedEvent tagEvent) {
|
||||
for (AggregateEventNode t : getAllNodes()) {
|
||||
t.handleEventsTagged(tagEvent);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -20,7 +20,7 @@ package org.sleuthkit.autopsy.timeline.ui.detailview.tree;
|
||||
|
||||
import java.util.Comparator;
|
||||
import javafx.scene.control.TreeItem;
|
||||
import org.sleuthkit.autopsy.timeline.events.AggregateEvent;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.AggregateEvent;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -24,7 +24,7 @@ import java.util.concurrent.ConcurrentHashMap;
|
||||
import javafx.application.Platform;
|
||||
import javafx.collections.FXCollections;
|
||||
import javafx.scene.control.TreeItem;
|
||||
import org.sleuthkit.autopsy.timeline.events.AggregateEvent;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.AggregateEvent;
|
||||
|
||||
class EventTypeTreeItem extends NavTreeItem {
|
||||
|
||||
|
@ -26,7 +26,13 @@ import javafx.application.Platform;
|
||||
import javafx.beans.Observable;
|
||||
import javafx.collections.ObservableList;
|
||||
import javafx.fxml.FXML;
|
||||
import javafx.scene.control.*;
|
||||
import javafx.scene.control.ComboBox;
|
||||
import javafx.scene.control.Label;
|
||||
import javafx.scene.control.SelectionMode;
|
||||
import javafx.scene.control.Tooltip;
|
||||
import javafx.scene.control.TreeCell;
|
||||
import javafx.scene.control.TreeItem;
|
||||
import javafx.scene.control.TreeView;
|
||||
import javafx.scene.image.ImageView;
|
||||
import javafx.scene.layout.BorderPane;
|
||||
import javafx.scene.layout.StackPane;
|
||||
@ -35,8 +41,8 @@ import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.timeline.FXMLConstructor;
|
||||
import org.sleuthkit.autopsy.timeline.TimeLineController;
|
||||
import org.sleuthkit.autopsy.timeline.TimeLineView;
|
||||
import org.sleuthkit.autopsy.timeline.events.AggregateEvent;
|
||||
import org.sleuthkit.autopsy.timeline.events.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.AggregateEvent;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.ui.detailview.AggregateEventNode;
|
||||
import org.sleuthkit.autopsy.timeline.ui.detailview.DetailViewPane;
|
||||
|
||||
|
@ -20,7 +20,7 @@ package org.sleuthkit.autopsy.timeline.ui.detailview.tree;
|
||||
|
||||
import java.util.Comparator;
|
||||
import javafx.scene.control.TreeItem;
|
||||
import org.sleuthkit.autopsy.timeline.events.AggregateEvent;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.AggregateEvent;
|
||||
|
||||
/**
|
||||
* A node in the nav tree. Manages inserts and resorts. Has parents and
|
||||
|
@ -19,7 +19,7 @@
|
||||
package org.sleuthkit.autopsy.timeline.ui.detailview.tree;
|
||||
|
||||
import javax.annotation.concurrent.Immutable;
|
||||
import org.sleuthkit.autopsy.timeline.events.type.EventType;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.EventType;
|
||||
|
||||
/**
|
||||
* The data item for the nav tree. Represents a combination of type and
|
||||
|
@ -24,8 +24,8 @@ import java.util.Map;
|
||||
import javafx.application.Platform;
|
||||
import javafx.collections.FXCollections;
|
||||
import javafx.scene.control.TreeItem;
|
||||
import org.sleuthkit.autopsy.timeline.events.type.EventType;
|
||||
import org.sleuthkit.autopsy.timeline.events.AggregateEvent;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.EventType;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.AggregateEvent;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -20,7 +20,7 @@ package org.sleuthkit.autopsy.timeline.ui.detailview.tree;
|
||||
|
||||
import java.util.Comparator;
|
||||
import javafx.scene.control.TreeItem;
|
||||
import org.sleuthkit.autopsy.timeline.events.type.EventType;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.EventType;
|
||||
|
||||
enum TreeComparator implements Comparator<TreeItem<NavTreeNode>> {
|
||||
|
||||
|
@ -38,8 +38,8 @@ import org.sleuthkit.autopsy.timeline.FXMLConstructor;
|
||||
import org.sleuthkit.autopsy.timeline.TimeLineController;
|
||||
import org.sleuthkit.autopsy.timeline.TimeLineView;
|
||||
import org.sleuthkit.autopsy.timeline.actions.ResetFilters;
|
||||
import org.sleuthkit.autopsy.timeline.events.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.events.type.RootEventType;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.RootEventType;
|
||||
import org.sleuthkit.autopsy.timeline.filters.AbstractFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.Filter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.RootFilter;
|
||||
@ -174,14 +174,14 @@ public class FilterSetPanel extends BorderPane implements TimeLineView {
|
||||
public void setModel(FilteredEventsModel filteredEvents) {
|
||||
this.filteredEvents = filteredEvents;
|
||||
refresh();
|
||||
this.filteredEvents.filter().addListener((Observable o) -> {
|
||||
this.filteredEvents.filterProperty().addListener((Observable o) -> {
|
||||
refresh();
|
||||
});
|
||||
}
|
||||
|
||||
private void refresh() {
|
||||
Platform.runLater(() -> {
|
||||
filterTreeTable.setRoot(new FilterTreeItem(filteredEvents.filter().get().copyOf(), expansionMap));
|
||||
filterTreeTable.setRoot(new FilterTreeItem(filteredEvents.filterProperty().get().copyOf(), expansionMap));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -30,7 +30,7 @@ import javafx.scene.shape.Rectangle;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.timeline.TimeLineController;
|
||||
import org.sleuthkit.autopsy.timeline.TimeLineView;
|
||||
import org.sleuthkit.autopsy.timeline.events.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.filters.AbstractFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.TextFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.TypeFilter;
|
||||
@ -74,7 +74,7 @@ class LegendCell extends TreeTableCell<AbstractFilter, AbstractFilter> implement
|
||||
rect.setArcWidth(5);
|
||||
rect.setStrokeWidth(3);
|
||||
setLegendColor(filter, rect, this.filteredEvents.getEventTypeZoom());
|
||||
this.filteredEvents.eventTypeZoom().addListener((obs, oldZoomLevel, newZoomLevel) -> {
|
||||
this.filteredEvents.eventTypeZoomProperty().addListener((obs, oldZoomLevel, newZoomLevel) -> {
|
||||
setLegendColor(filter, rect, newZoomLevel);
|
||||
});
|
||||
|
||||
|
@ -50,7 +50,7 @@ public class ZoomParams {
|
||||
return filter;
|
||||
}
|
||||
|
||||
public DescriptionLOD getDescrLOD() {
|
||||
public DescriptionLOD getDescriptionLOD() {
|
||||
return descrLOD;
|
||||
}
|
||||
|
||||
|
@ -34,7 +34,7 @@ import org.sleuthkit.autopsy.timeline.TimeLineView;
|
||||
import org.sleuthkit.autopsy.timeline.VisualizationMode;
|
||||
import org.sleuthkit.autopsy.timeline.actions.Back;
|
||||
import org.sleuthkit.autopsy.timeline.actions.Forward;
|
||||
import org.sleuthkit.autopsy.timeline.events.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.utils.IntervalUtils;
|
||||
import org.sleuthkit.autopsy.timeline.utils.RangeDivisionInfo;
|
||||
|
||||
@ -141,12 +141,12 @@ public class ZoomSettingsPane extends TitledPane implements TimeLineView {
|
||||
if (requestedUnit == TimeUnits.FOREVER) {
|
||||
controller.showFullRange();
|
||||
} else {
|
||||
controller.pushTimeRange(IntervalUtils.getIntervalAround(IntervalUtils.middleOf(ZoomSettingsPane.this.filteredEvents.timeRange().get()), requestedUnit.getPeriod()));
|
||||
controller.pushTimeRange(IntervalUtils.getIntervalAround(IntervalUtils.middleOf(ZoomSettingsPane.this.filteredEvents.timeRangeProperty().get()), requestedUnit.getPeriod()));
|
||||
}
|
||||
},
|
||||
this.filteredEvents.timeRange(),
|
||||
this.filteredEvents.timeRangeProperty(),
|
||||
() -> {
|
||||
RangeDivisionInfo rangeInfo = RangeDivisionInfo.getRangeDivisionInfo(this.filteredEvents.timeRange().get());
|
||||
RangeDivisionInfo rangeInfo = RangeDivisionInfo.getRangeDivisionInfo(this.filteredEvents.timeRangeProperty().get());
|
||||
ChronoUnit chronoUnit = rangeInfo.getPeriodSize().getChronoUnit();
|
||||
|
||||
timeUnitSlider.setValue(TimeUnits.fromChronoUnit(chronoUnit).ordinal() - 1);
|
||||
@ -158,9 +158,9 @@ public class ZoomSettingsPane extends TitledPane implements TimeLineView {
|
||||
if (controller.pushDescrLOD(newLOD) == false) {
|
||||
descrLODSlider.setValue(new DescrLODConverter().fromString(filteredEvents.getDescriptionLOD().toString()));
|
||||
}
|
||||
}, this.filteredEvents.descriptionLOD(),
|
||||
}, this.filteredEvents.descriptionLODProperty(),
|
||||
() -> {
|
||||
descrLODSlider.setValue(this.filteredEvents.descriptionLOD().get().ordinal());
|
||||
descrLODSlider.setValue(this.filteredEvents.descriptionLODProperty().get().ordinal());
|
||||
});
|
||||
|
||||
initializeSlider(typeZoomSlider,
|
||||
@ -168,9 +168,9 @@ public class ZoomSettingsPane extends TitledPane implements TimeLineView {
|
||||
EventTypeZoomLevel newZoomLevel = EventTypeZoomLevel.values()[Math.round(typeZoomSlider.valueProperty().floatValue())];
|
||||
controller.pushEventTypeZoom(newZoomLevel);
|
||||
},
|
||||
this.filteredEvents.eventTypeZoom(),
|
||||
this.filteredEvents.eventTypeZoomProperty(),
|
||||
() -> {
|
||||
typeZoomSlider.setValue(this.filteredEvents.eventTypeZoom().get().ordinal());
|
||||
typeZoomSlider.setValue(this.filteredEvents.eventTypeZoomProperty().get().ordinal());
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -77,5 +77,7 @@ file.reference.xmlbeans-2.3.0.jar=release/modules/ext/xmlbeans-2.3.0.jar
|
||||
javac.source=1.7
|
||||
javac.compilerargs=-Xlint -Xlint:-serial
|
||||
javadoc.reference.controlsfx-8.40.9.jar=release/modules/ext/controlsfx-8.40.9-javadoc.jar
|
||||
javadoc.reference.guava-11.0.2.jar=release/modules/ext/guava-11.0.2-javadoc.jar
|
||||
nbm.needs.restart=true
|
||||
source.reference.controlsfx-8.40.9.jar=release/modules/ext/controlsfx-8.40.9-sources.jar
|
||||
source.reference.guava-11.0.2.jar=release/modules/ext/guava-11.0.2-sources.jar
|
||||
|
@ -685,10 +685,6 @@
|
||||
<runtime-relative-path>ext/poi-ooxml-schemas-3.8.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/poi-ooxml-schemas-3.8.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/jfxtras-controls-8.0-r3.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/jfxtras-controls-8.0-r3.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/gson-1.4.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/gson-1.4.jar</binary-origin>
|
||||
@ -697,6 +693,10 @@
|
||||
<runtime-relative-path>ext/jsr305-1.3.9.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/jsr305-1.3.9.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/jfxtras-common-8.0-r3-sources.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/jfxtras-common-8.0-r3-sources.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/xml-apis-1.0.b2.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/xml-apis-1.0.b2.jar</binary-origin>
|
||||
@ -713,10 +713,6 @@
|
||||
<runtime-relative-path>ext/guava-11.0.2.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/guava-11.0.2.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/jfxtras-common-8.0-r3-sources.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/jfxtras-common-8.0-r3-sources.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/mail-1.4.3.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/mail-1.4.3.jar</binary-origin>
|
||||
@ -733,6 +729,10 @@
|
||||
<runtime-relative-path>ext/common-lang-3.1.1.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/common-lang-3.1.1.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/jfxtras-common-8.0-r3.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/jfxtras-common-8.0-r3.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/slf4j-api-1.6.1.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/slf4j-api-1.6.1.jar</binary-origin>
|
||||
@ -746,8 +746,8 @@
|
||||
<binary-origin>release/modules/ext/joda-time-2.4-javadoc.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/jfxtras-fxml-8.0-r3-sources.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/jfxtras-fxml-8.0-r3-sources.jar</binary-origin>
|
||||
<runtime-relative-path>ext/jfxtras-fxml-8.0-r3.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/jfxtras-fxml-8.0-r3.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/jcalendarbutton-1.4.6.jar</runtime-relative-path>
|
||||
@ -765,6 +765,14 @@
|
||||
<runtime-relative-path>ext/servlet-api-2.5.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/servlet-api-2.5.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/jfxtras-controls-8.0-r3.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/jfxtras-controls-8.0-r3.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/jfxtras-fxml-8.0-r3-sources.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/jfxtras-fxml-8.0-r3-sources.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/poi-excelant-3.8.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/poi-excelant-3.8.jar</binary-origin>
|
||||
@ -785,6 +793,10 @@
|
||||
<runtime-relative-path>ext/avalon-framework-4.1.5.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/avalon-framework-4.1.5.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/jfxtras-fxml-8.0-r3-javadoc.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/jfxtras-fxml-8.0-r3-javadoc.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/geronimo-jms_1.1_spec-1.0.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/geronimo-jms_1.1_spec-1.0.jar</binary-origin>
|
||||
@ -801,30 +813,14 @@
|
||||
<runtime-relative-path>ext/joda-time-2.4.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/joda-time-2.4.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/jfxtras-fxml-8.0-r3-javadoc.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/jfxtras-fxml-8.0-r3-javadoc.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/jfxtras-fxml-8.0-r3.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/jfxtras-fxml-8.0-r3.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/common-io-3.1.1.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/common-io-3.1.1.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/jfxtras-common-8.0-r3.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/jfxtras-common-8.0-r3.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/commons-logging-1.1.2-javadoc.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/commons-logging-1.1.2-javadoc.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/jfxtras-controls-8.0-r3-javadoc.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/jfxtras-controls-8.0-r3-javadoc.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/slf4j-simple-1.6.1.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/slf4j-simple-1.6.1.jar</binary-origin>
|
||||
@ -861,6 +857,10 @@
|
||||
<runtime-relative-path>ext/imageio-icns-3.1.1.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/imageio-icns-3.1.1.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/jfxtras-common-8.0-r3-javadoc.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/jfxtras-common-8.0-r3-javadoc.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/javassist-3.12.1.GA.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/javassist-3.12.1.GA.jar</binary-origin>
|
||||
@ -893,10 +893,6 @@
|
||||
<runtime-relative-path>ext/commons-lang3-3.0.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/commons-lang3-3.0.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/jfxtras-common-8.0-r3-javadoc.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/jfxtras-common-8.0-r3-javadoc.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/imageio-iff-3.1.1.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/imageio-iff-3.1.1.jar</binary-origin>
|
||||
@ -918,8 +914,8 @@
|
||||
<binary-origin>release/modules/ext/gstreamer-java-1.5.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/jfxtras-controls-8.0-r3-sources.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/jfxtras-controls-8.0-r3-sources.jar</binary-origin>
|
||||
<runtime-relative-path>ext/jfxtras-controls-8.0-r3-javadoc.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/jfxtras-controls-8.0-r3-javadoc.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/imageio-metadata-3.1.1.jar</runtime-relative-path>
|
||||
@ -937,6 +933,10 @@
|
||||
<runtime-relative-path>ext/imgscalr-lib-4.2-sources.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/imgscalr-lib-4.2-sources.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/jfxtras-controls-8.0-r3-sources.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/jfxtras-controls-8.0-r3-sources.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
</data>
|
||||
</configuration>
|
||||
</project>
|
||||
|
@ -4,7 +4,7 @@
|
||||
<configuration>
|
||||
<data xmlns="http://www.netbeans.org/ns/nb-module-project/3">
|
||||
<code-name-base>org.sleuthkit.autopsy.imagegallery</code-name-base>
|
||||
<standalone/>
|
||||
<suite-component/>
|
||||
<module-dependencies>
|
||||
<dependency>
|
||||
<code-name-base>org.netbeans.api.progress</code-name-base>
|
||||
|
1
ImageGallery/nbproject/suite.properties
Normal file
1
ImageGallery/nbproject/suite.properties
Normal file
@ -0,0 +1 @@
|
||||
suite.dir=${basedir}/..
|
@ -248,6 +248,7 @@ public final class ImageGalleryController {
|
||||
return historyManager.getCanRetreat();
|
||||
}
|
||||
|
||||
@ThreadConfined(type = ThreadConfined.ThreadType.JFX)
|
||||
public void advance(GroupViewState newState, boolean forceShowTree) {
|
||||
if (Objects.nonNull(navPanel) && forceShowTree) {
|
||||
navPanel.showTree();
|
||||
@ -414,7 +415,7 @@ public final class ImageGalleryController {
|
||||
dbWorkerThread.addTask(innerTask);
|
||||
}
|
||||
|
||||
public DrawableFile<?> getFileFromId(Long fileID) throws TskCoreException {
|
||||
synchronized public DrawableFile<?> getFileFromId(Long fileID) throws TskCoreException {
|
||||
return db.getFileFromID(fileID);
|
||||
}
|
||||
|
||||
@ -776,7 +777,8 @@ public final class ImageGalleryController {
|
||||
+ " AND (blackboard_attributes.value_text LIKE 'video/%'"
|
||||
+ " OR blackboard_attributes.value_text LIKE 'image/%'"
|
||||
+ " OR " + MIMETYPE_CLAUSE
|
||||
+ " )";
|
||||
+ " )"
|
||||
+ ")";
|
||||
|
||||
private ProgressHandle progressHandle = ProgressHandleFactory.createHandle("populating analyzed image/video database");
|
||||
|
||||
|
@ -38,7 +38,6 @@ import javafx.scene.image.Image;
|
||||
import javafx.scene.image.WritableImage;
|
||||
import javax.annotation.Nullable;
|
||||
import javax.imageio.ImageIO;
|
||||
import org.openide.util.Exceptions;
|
||||
import org.sleuthkit.autopsy.coreutils.ImageUtils;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.imagegallery.datamodel.DrawableFile;
|
||||
@ -46,7 +45,8 @@ import org.sleuthkit.autopsy.imagegallery.gui.Toolbar;
|
||||
import org.sleuthkit.datamodel.ReadContentInputStream;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
/** Singleton to manage creation and access of icons. Keeps a cache in memory of
|
||||
/**
|
||||
* Singleton to manage creation and access of icons. Keeps a cache in memory of
|
||||
* most recently used icons, and a disk cache of all icons.
|
||||
*
|
||||
* TODO: this was only a singleton for convenience, convert this to
|
||||
@ -60,7 +60,8 @@ public enum ThumbnailCache {
|
||||
|
||||
private static final Logger LOGGER = Logger.getLogger(ThumbnailCache.class.getName());
|
||||
|
||||
/** in memory cache. keeps at most 1000 items each for up to 10 minutes.
|
||||
/**
|
||||
* in memory cache. keeps at most 1000 items each for up to 10 minutes.
|
||||
* items may be garbage collected if there are no strong references to them.
|
||||
*/
|
||||
private final Cache<Long, Optional<Image>> cache = CacheBuilder.newBuilder()
|
||||
@ -72,7 +73,9 @@ public enum ThumbnailCache {
|
||||
return instance;
|
||||
}
|
||||
|
||||
/** currently desired icon size. is bound in {@link Toolbar} */
|
||||
/**
|
||||
* currently desired icon size. is bound in {@link Toolbar}
|
||||
*/
|
||||
public final SimpleIntegerProperty iconSize = new SimpleIntegerProperty(200);
|
||||
|
||||
/**
|
||||
@ -82,7 +85,8 @@ public enum ThumbnailCache {
|
||||
cache.invalidateAll();
|
||||
}
|
||||
|
||||
/** get the cached thumbnail for the given file or generate a new one if
|
||||
/**
|
||||
* get the cached thumbnail for the given file or generate a new one if
|
||||
* needed
|
||||
*
|
||||
* @param file
|
||||
@ -132,16 +136,15 @@ public enum ThumbnailCache {
|
||||
if (cachFile.exists()) {
|
||||
// If a thumbnail file is already saved locally, load it
|
||||
try {
|
||||
BufferedImage read = ImageIO.read(cachFile);
|
||||
|
||||
if (read.getWidth() < MAX_THUMBNAIL_SIZE) {
|
||||
return read;
|
||||
BufferedImage cachedThumbnail = ImageIO.read(cachFile);
|
||||
|
||||
if (cachedThumbnail.getWidth() < MAX_THUMBNAIL_SIZE) {
|
||||
return cachedThumbnail;
|
||||
}
|
||||
} catch (MalformedURLException ex) {
|
||||
LOGGER.log(Level.WARNING, "Unable to parse cache file path..");
|
||||
LOGGER.log(Level.WARNING, "Unable to parse cache file path: " + cachFile.getPath(), ex);
|
||||
} catch (IOException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
LOGGER.log(Level.WARNING, "Unable to read cache file " + cachFile.getPath(), ex);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
@ -150,10 +153,6 @@ public enum ThumbnailCache {
|
||||
return (BufferedImage) ImageUtils.getThumbnail(file.getAbstractFile(), MAX_THUMBNAIL_SIZE);
|
||||
});
|
||||
|
||||
// } catch (IllegalStateException e) {
|
||||
// LOGGER.log(Level.WARNING, "can't load icon when no case is open");
|
||||
// return Optional.empty();
|
||||
// }
|
||||
WritableImage jfxthumbnail;
|
||||
if (thumbnail == ImageUtils.getDefaultThumbnail()) {
|
||||
// if we go the default icon, ignore it
|
||||
|
@ -62,6 +62,7 @@ public class NextUnseenGroup extends Action {
|
||||
});
|
||||
|
||||
setEventHandler((ActionEvent t) -> {
|
||||
//if there is a group assigned to the view, mark it as seen
|
||||
Optional.ofNullable(controller.viewState())
|
||||
.map(ObjectExpression<GroupViewState>::getValue)
|
||||
.map(GroupViewState::getGroup)
|
||||
|
@ -15,10 +15,14 @@ import org.sleuthkit.datamodel.TskCoreException;
|
||||
*/
|
||||
public class HashSetManager {
|
||||
|
||||
/** The db that initial values are loaded from. */
|
||||
/**
|
||||
* The db that initial values are loaded from.
|
||||
*/
|
||||
private DrawableDB db = null;
|
||||
|
||||
/** the internal cache from fileID to a set of hashset names. */
|
||||
/**
|
||||
* the internal cache from fileID to a set of hashset names.
|
||||
*/
|
||||
private final LoadingCache<Long, Set<String>> hashSetCache = CacheBuilder.newBuilder().build(CacheLoader.from(this::getHashSetsForFileHelper));
|
||||
|
||||
/**
|
||||
|
@ -27,6 +27,8 @@ import javafx.application.Platform;
|
||||
import javafx.collections.FXCollections;
|
||||
import javafx.scene.control.TreeItem;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.sleuthkit.autopsy.coreutils.ThreadConfined;
|
||||
import org.sleuthkit.autopsy.coreutils.ThreadConfined.ThreadType;
|
||||
import org.sleuthkit.autopsy.imagegallery.datamodel.grouping.DrawableGroup;
|
||||
|
||||
/**
|
||||
@ -37,11 +39,11 @@ import org.sleuthkit.autopsy.imagegallery.datamodel.grouping.DrawableGroup;
|
||||
*/
|
||||
class GroupTreeItem extends TreeItem<TreeNode> implements Comparable<GroupTreeItem> {
|
||||
|
||||
@ThreadConfined(type = ThreadType.JFX)
|
||||
static GroupTreeItem getTreeItemForGroup(GroupTreeItem root, DrawableGroup grouping) {
|
||||
if (Objects.equals(root.getValue().getGroup(), grouping)) {
|
||||
return root;
|
||||
} else {
|
||||
synchronized (root.getChildren()) {
|
||||
for (TreeItem<TreeNode> child : root.getChildren()) {
|
||||
final GroupTreeItem childGTI = (GroupTreeItem) child;
|
||||
|
||||
@ -51,7 +53,6 @@ class GroupTreeItem extends TreeItem<TreeNode> implements Comparable<GroupTreeIt
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@ -107,11 +108,8 @@ class GroupTreeItem extends TreeItem<TreeNode> implements Comparable<GroupTreeIt
|
||||
prefixTreeItem = newTreeItem;
|
||||
childMap.put(prefix, prefixTreeItem);
|
||||
Platform.runLater(() -> {
|
||||
synchronized (getChildren()) {
|
||||
getChildren().add(newTreeItem);
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
// recursively go into the path
|
||||
@ -125,14 +123,11 @@ class GroupTreeItem extends TreeItem<TreeNode> implements Comparable<GroupTreeIt
|
||||
childMap.put(path, newTreeItem);
|
||||
|
||||
Platform.runLater(() -> {
|
||||
synchronized (getChildren()) {
|
||||
getChildren().add(newTreeItem);
|
||||
if (comp != null) {
|
||||
FXCollections.sort(getChildren(), comp);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -160,11 +155,8 @@ class GroupTreeItem extends TreeItem<TreeNode> implements Comparable<GroupTreeIt
|
||||
childMap.put(prefix, prefixTreeItem);
|
||||
|
||||
Platform.runLater(() -> {
|
||||
synchronized (getChildren()) {
|
||||
getChildren().add(newTreeItem);
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
// recursively go into the path
|
||||
@ -179,12 +171,10 @@ class GroupTreeItem extends TreeItem<TreeNode> implements Comparable<GroupTreeIt
|
||||
childMap.put(path.get(0), newTreeItem);
|
||||
|
||||
Platform.runLater(() -> {
|
||||
synchronized (getChildren()) {
|
||||
getChildren().add(newTreeItem);
|
||||
if (comp != null) {
|
||||
FXCollections.sort(getChildren(), comp);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -195,12 +185,13 @@ class GroupTreeItem extends TreeItem<TreeNode> implements Comparable<GroupTreeIt
|
||||
return comp.compare(this, o);
|
||||
}
|
||||
|
||||
@ThreadConfined(type = ThreadConfined.ThreadType.JFX)
|
||||
GroupTreeItem getTreeItemForPath(List<String> path) {
|
||||
// end of recursion
|
||||
|
||||
if (path.isEmpty()) {
|
||||
// end of recursion
|
||||
return this;
|
||||
} else {
|
||||
synchronized (getChildren()) {
|
||||
String prefix = path.get(0);
|
||||
|
||||
GroupTreeItem prefixTreeItem = childMap.get(prefix);
|
||||
@ -213,17 +204,15 @@ class GroupTreeItem extends TreeItem<TreeNode> implements Comparable<GroupTreeIt
|
||||
return prefixTreeItem.getTreeItemForPath(path.subList(1, path.size()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ThreadConfined(type = ThreadConfined.ThreadType.JFX)
|
||||
void removeFromParent() {
|
||||
final GroupTreeItem parent = (GroupTreeItem) getParent();
|
||||
if (parent != null) {
|
||||
parent.childMap.remove(getValue().getPath());
|
||||
|
||||
Platform.runLater(() -> {
|
||||
synchronized (parent.getChildren()) {
|
||||
parent.getChildren().removeAll(Collections.singleton(GroupTreeItem.this));
|
||||
}
|
||||
});
|
||||
|
||||
if (parent.childMap.isEmpty()) {
|
||||
@ -232,14 +221,17 @@ class GroupTreeItem extends TreeItem<TreeNode> implements Comparable<GroupTreeIt
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* must be performed on fx thread because it manipualtes the tree directly.
|
||||
*
|
||||
* @param newComp
|
||||
*/
|
||||
@ThreadConfined(type = ThreadConfined.ThreadType.JFX)
|
||||
void resortChildren(TreeNodeComparators newComp) {
|
||||
this.comp = newComp;
|
||||
synchronized (getChildren()) {
|
||||
FXCollections.sort(getChildren(), comp);
|
||||
}
|
||||
for (GroupTreeItem ti : childMap.values()) {
|
||||
ti.resortChildren(comp);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -55,15 +55,15 @@ import org.sleuthkit.autopsy.imagegallery.datamodel.grouping.GroupViewState;
|
||||
*/
|
||||
public class NavPanel extends TabPane {
|
||||
|
||||
@FXML
|
||||
private ComboBox<TreeNodeComparators> sortByBox;
|
||||
@FXML
|
||||
private TabPane navTabPane;
|
||||
/**
|
||||
* TreeView for folders with hash hits
|
||||
*/
|
||||
@FXML
|
||||
private TreeView<TreeNode> hashTree;
|
||||
|
||||
@FXML
|
||||
private TabPane navTabPane;
|
||||
|
||||
/**
|
||||
* TreeView for all folders
|
||||
*/
|
||||
@ -76,17 +76,14 @@ public class NavPanel extends TabPane {
|
||||
@FXML
|
||||
private Tab navTab;
|
||||
|
||||
@FXML
|
||||
private ComboBox<TreeNodeComparators> sortByBox;
|
||||
|
||||
/**
|
||||
* contains the 'active tree'
|
||||
*/
|
||||
private final SimpleObjectProperty<TreeView<TreeNode>> activeTreeProperty = new SimpleObjectProperty<>();
|
||||
private GroupTreeItem hashTreeRoot;
|
||||
|
||||
private GroupTreeItem navTreeRoot;
|
||||
|
||||
private GroupTreeItem hashTreeRoot;
|
||||
/**
|
||||
* contains the 'active tree', three in the selected Tab.
|
||||
*/
|
||||
private final SimpleObjectProperty<TreeView<TreeNode>> activeTreeProperty = new SimpleObjectProperty<>();
|
||||
|
||||
private final ImageGalleryController controller;
|
||||
|
||||
@ -118,6 +115,7 @@ public class NavPanel extends TabPane {
|
||||
sortByBox.setItems(FXCollections.observableArrayList(FXCollections.observableArrayList(TreeNodeComparators.values())));
|
||||
sortByBox.getSelectionModel().select(TreeNodeComparators.HIT_COUNT);
|
||||
sortByBox.getSelectionModel().selectedItemProperty().addListener((Observable o) -> {
|
||||
//user action ->jfx thread
|
||||
resortHashTree();
|
||||
});
|
||||
|
||||
@ -143,14 +141,8 @@ public class NavPanel extends TabPane {
|
||||
});
|
||||
|
||||
controller.getGroupManager().getAnalyzedGroups().addListener((ListChangeListener.Change<? extends DrawableGroup> change) -> {
|
||||
TreeItem<TreeNode> selectedItem = activeTreeProperty.get().getSelectionModel().getSelectedItem();
|
||||
boolean wasPermuted = false;
|
||||
//analyzed groups shoud only be modified on jfx thread
|
||||
while (change.next()) {
|
||||
if (change.wasPermutated()) {
|
||||
// Handle this afterward
|
||||
wasPermuted = true;
|
||||
break;
|
||||
}
|
||||
for (DrawableGroup g : change.getAddedSubList()) {
|
||||
insertIntoNavTree(g);
|
||||
if (g.getHashSetHitsCount() > 0) {
|
||||
@ -162,15 +154,6 @@ public class NavPanel extends TabPane {
|
||||
removeFromHashTree(g);
|
||||
}
|
||||
}
|
||||
|
||||
if (wasPermuted) {
|
||||
rebuildTrees();
|
||||
}
|
||||
if (selectedItem != null && selectedItem.getValue().getGroup() != null) {
|
||||
Platform.runLater(() -> {
|
||||
setFocusedGroup(selectedItem.getValue().getGroup());
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
rebuildTrees();
|
||||
@ -208,6 +191,7 @@ public class NavPanel extends TabPane {
|
||||
}
|
||||
}
|
||||
|
||||
@ThreadConfined(type = ThreadType.JFX)
|
||||
private void resortHashTree() {
|
||||
hashTreeRoot.resortChildren(sortByBox.getSelectionModel().getSelectedItem());
|
||||
}
|
||||
@ -262,16 +246,19 @@ public class NavPanel extends TabPane {
|
||||
}
|
||||
}
|
||||
|
||||
@ThreadConfined(type = ThreadType.JFX)
|
||||
private void insertIntoHashTree(DrawableGroup g) {
|
||||
initHashTree();
|
||||
hashTreeRoot.insert(groupingToPath(g), g, false);
|
||||
}
|
||||
|
||||
@ThreadConfined(type = ThreadType.JFX)
|
||||
private void insertIntoNavTree(DrawableGroup g) {
|
||||
initNavTree();
|
||||
navTreeRoot.insert(groupingToPath(g), g, true);
|
||||
}
|
||||
|
||||
@ThreadConfined(type = ThreadType.JFX)
|
||||
private void removeFromNavTree(DrawableGroup g) {
|
||||
initNavTree();
|
||||
final GroupTreeItem treeItemForGroup = GroupTreeItem.getTreeItemForGroup(navTreeRoot, g);
|
||||
@ -280,6 +267,7 @@ public class NavPanel extends TabPane {
|
||||
}
|
||||
}
|
||||
|
||||
@ThreadConfined(type = ThreadType.JFX)
|
||||
private void removeFromHashTree(DrawableGroup g) {
|
||||
initHashTree();
|
||||
final GroupTreeItem treeItemForGroup = GroupTreeItem.getTreeItemForGroup(hashTreeRoot, g);
|
||||
@ -310,6 +298,7 @@ public class NavPanel extends TabPane {
|
||||
}
|
||||
}
|
||||
|
||||
@ThreadConfined(type = ThreadType.JFX)
|
||||
public void showTree() {
|
||||
getSelectionModel().select(navTab);
|
||||
}
|
||||
|
@ -27,11 +27,13 @@ modules=\
|
||||
${project.org.sleuthkit.autopsy.testing}:\
|
||||
${project.org.sleuthkit.autopsy.thunderbirdparser}:\
|
||||
${project.org.sleuthkit.autopsy.core}:\
|
||||
${project.org.sleuthkit.autopsy.corelibs}
|
||||
${project.org.sleuthkit.autopsy.corelibs}:\
|
||||
${project.org.sleuthkit.autopsy.imagegallery}
|
||||
project.org.sleuthkit.autopsy.core=Core
|
||||
project.org.sleuthkit.autopsy.corelibs=CoreLibs
|
||||
project.org.sleuthkit.autopsy.keywordsearch=KeywordSearch
|
||||
project.org.sleuthkit.autopsy.recentactivity=RecentActivity
|
||||
project.org.sleuthkit.autopsy.testing=Testing
|
||||
project.org.sleuthkit.autopsy.thunderbirdparser=thunderbirdparser
|
||||
project.org.sleuthkit.autopsy.imagegallery=ImageGallery
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user