mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-15 09:17:42 +00:00
Merge remote-tracking branch 'upstream/develop' into timeline_tags_visualiztion
Conflicts: Core/src/org/sleuthkit/autopsy/timeline/events/db/EventsRepository.java Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/AggregateEventNode.java
This commit is contained in:
commit
2241e29529
@ -22,7 +22,6 @@ import java.awt.CardLayout;
|
|||||||
import java.awt.Component;
|
import java.awt.Component;
|
||||||
import java.awt.Dimension;
|
import java.awt.Dimension;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import static java.util.Objects.nonNull;
|
|
||||||
import java.util.SortedSet;
|
import java.util.SortedSet;
|
||||||
import java.util.TreeSet;
|
import java.util.TreeSet;
|
||||||
import java.util.logging.Level;
|
import java.util.logging.Level;
|
||||||
@ -32,10 +31,7 @@ import org.openide.util.lookup.ServiceProvider;
|
|||||||
import org.openide.util.lookup.ServiceProviders;
|
import org.openide.util.lookup.ServiceProviders;
|
||||||
import org.sleuthkit.autopsy.corecomponentinterfaces.DataContentViewer;
|
import org.sleuthkit.autopsy.corecomponentinterfaces.DataContentViewer;
|
||||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||||
import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector;
|
|
||||||
import org.sleuthkit.datamodel.AbstractFile;
|
import org.sleuthkit.datamodel.AbstractFile;
|
||||||
import org.sleuthkit.datamodel.AbstractFile.MimeMatchEnum;
|
|
||||||
import org.sleuthkit.datamodel.TskCoreException;
|
|
||||||
import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM;
|
import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -52,11 +48,8 @@ public class DataContentViewerMedia extends javax.swing.JPanel implements DataCo
|
|||||||
private final MediaViewVideoPanel videoPanel;
|
private final MediaViewVideoPanel videoPanel;
|
||||||
private final boolean videoPanelInited;
|
private final boolean videoPanelInited;
|
||||||
private final SortedSet<String> videoExtensions; // get them from the panel
|
private final SortedSet<String> videoExtensions; // get them from the panel
|
||||||
private final SortedSet<String> videoMimes;
|
|
||||||
private final MediaViewImagePanel imagePanel;
|
private final MediaViewImagePanel imagePanel;
|
||||||
private final boolean imagePanelInited;
|
private final boolean imagePanelInited;
|
||||||
private final SortedSet<String> imageExtensions; // get them from the panel
|
|
||||||
private final SortedSet<String> imageMimes;
|
|
||||||
|
|
||||||
private static final String IMAGE_VIEWER_LAYER = "IMAGE"; //NON-NLS
|
private static final String IMAGE_VIEWER_LAYER = "IMAGE"; //NON-NLS
|
||||||
private static final String VIDEO_VIEWER_LAYER = "VIDEO"; //NON-NLS
|
private static final String VIDEO_VIEWER_LAYER = "VIDEO"; //NON-NLS
|
||||||
@ -72,12 +65,9 @@ public class DataContentViewerMedia extends javax.swing.JPanel implements DataCo
|
|||||||
videoPanel = MediaViewVideoPanel.createVideoPanel();
|
videoPanel = MediaViewVideoPanel.createVideoPanel();
|
||||||
videoPanelInited = videoPanel.isInited();
|
videoPanelInited = videoPanel.isInited();
|
||||||
videoExtensions = new TreeSet<>(videoPanel.getExtensionsList());
|
videoExtensions = new TreeSet<>(videoPanel.getExtensionsList());
|
||||||
videoMimes = new TreeSet<>(videoPanel.getMimeTypes());
|
|
||||||
|
|
||||||
imagePanel = new MediaViewImagePanel();
|
imagePanel = new MediaViewImagePanel();
|
||||||
imagePanelInited = imagePanel.isInited();
|
imagePanelInited = imagePanel.isInited();
|
||||||
imageExtensions = new TreeSet<>(imagePanel.getExtensionsList());
|
|
||||||
imageMimes = new TreeSet<>(imagePanel.getMimeTypes());
|
|
||||||
|
|
||||||
customizeComponents();
|
customizeComponents();
|
||||||
logger.log(Level.INFO, "Created MediaView instance: {0}", this); //NON-NLS
|
logger.log(Level.INFO, "Created MediaView instance: {0}", this); //NON-NLS
|
||||||
@ -239,10 +229,9 @@ public class DataContentViewerMedia extends javax.swing.JPanel implements DataCo
|
|||||||
if (file == null) {
|
if (file == null) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
String extension = file.getNameExtension();
|
|
||||||
boolean deleted = file.isDirNameFlagSet(TSK_FS_NAME_FLAG_ENUM.UNALLOC);
|
boolean deleted = file.isDirNameFlagSet(TSK_FS_NAME_FLAG_ENUM.UNALLOC);
|
||||||
|
|
||||||
if (videoExtensions.contains("." + extension) && deleted) {
|
if (videoPanel.isSupported(file) && deleted) {
|
||||||
return 0;
|
return 0;
|
||||||
} else {
|
} else {
|
||||||
return 7;
|
return 7;
|
||||||
@ -263,28 +252,6 @@ public class DataContentViewerMedia extends javax.swing.JPanel implements DataCo
|
|||||||
*/
|
*/
|
||||||
List<String> getExtensionsList();
|
List<String> getExtensionsList();
|
||||||
|
|
||||||
default boolean isSupported(AbstractFile file) {
|
boolean isSupported(AbstractFile file);
|
||||||
SortedSet<String> mimeTypes = new TreeSet<>(getMimeTypes());
|
|
||||||
try {
|
|
||||||
String mimeType = new FileTypeDetector().getFileType(file);
|
|
||||||
if (nonNull(mimeType)) {
|
|
||||||
return mimeTypes.contains(mimeType);
|
|
||||||
}
|
|
||||||
} catch (FileTypeDetector.FileTypeDetectorInitException | TskCoreException ex) {
|
|
||||||
logger.log(Level.WARNING, "Failed to look up mimetype for " + file.getName() + " using FileTypeDetector. Fallingback on AbstractFile.isMimeType", ex);
|
|
||||||
if (!mimeTypes.isEmpty() && file.isMimeType(mimeTypes) == MimeMatchEnum.TRUE) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
String extension = file.getNameExtension();
|
|
||||||
|
|
||||||
if (getExtensionsList().contains("." + extension)) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -220,8 +220,7 @@ public class MediaViewImagePanel extends JPanel implements DataContentViewerMedi
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean isSupported(AbstractFile file) {
|
public boolean isSupported(AbstractFile file) {
|
||||||
return DataContentViewerMedia.MediaViewPanel.super.isSupported(file)
|
return ImageUtils.isImageThumbnailSupported(file);
|
||||||
|| ImageUtils.hasImageFileHeader(file);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -21,12 +21,16 @@ package org.sleuthkit.autopsy.corecomponents;
|
|||||||
import java.awt.Dimension;
|
import java.awt.Dimension;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import static java.util.Objects.nonNull;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
import java.util.SortedSet;
|
||||||
import java.util.TreeSet;
|
import java.util.TreeSet;
|
||||||
import java.util.logging.Level;
|
import java.util.logging.Level;
|
||||||
import javax.swing.JPanel;
|
import javax.swing.JPanel;
|
||||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||||
|
import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector;
|
||||||
import org.sleuthkit.datamodel.AbstractFile;
|
import org.sleuthkit.datamodel.AbstractFile;
|
||||||
|
import org.sleuthkit.datamodel.TskCoreException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Video viewer part of the Media View layered pane.
|
* Video viewer part of the Media View layered pane.
|
||||||
@ -132,9 +136,21 @@ public abstract class MediaViewVideoPanel extends JPanel implements FrameCapture
|
|||||||
String extension = file.getNameExtension();
|
String extension = file.getNameExtension();
|
||||||
//TODO: is this what we want, to require both extension and mimetype support?
|
//TODO: is this what we want, to require both extension and mimetype support?
|
||||||
if (AUDIO_EXTENSIONS.contains("." + extension) || getExtensionsList().contains("." + extension)) {
|
if (AUDIO_EXTENSIONS.contains("." + extension) || getExtensionsList().contains("." + extension)) {
|
||||||
return DataContentViewerMedia.MediaViewPanel.super.isSupported(file); //To change body of generated methods, choose Tools | Templates.
|
SortedSet<String> mimeTypes = new TreeSet<>(getMimeTypes());
|
||||||
|
try {
|
||||||
|
String mimeType = new FileTypeDetector().getFileType(file);
|
||||||
|
if (nonNull(mimeType)) {
|
||||||
|
return mimeTypes.contains(mimeType);
|
||||||
|
}
|
||||||
|
} catch (FileTypeDetector.FileTypeDetectorInitException | TskCoreException ex) {
|
||||||
|
logger.log(Level.WARNING, "Failed to look up mimetype for " + file.getName() + " using FileTypeDetector. Fallingback on AbstractFile.isMimeType", ex);
|
||||||
|
if (!mimeTypes.isEmpty() && file.isMimeType(mimeTypes) == AbstractFile.MimeMatchEnum.TRUE) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return getExtensionsList().contains("." + extension);
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -1,61 +0,0 @@
|
|||||||
/*
|
|
||||||
* To change this license header, choose License Headers in Project Properties.
|
|
||||||
* To change this template file, choose Tools | Templates
|
|
||||||
* and open the template in the editor.
|
|
||||||
*/
|
|
||||||
package org.sleuthkit.autopsy.coreutils;
|
|
||||||
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashSet;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Set;
|
|
||||||
import java.util.logging.Level;
|
|
||||||
import javax.annotation.Nonnull;
|
|
||||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
|
||||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
|
||||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
|
||||||
import org.sleuthkit.datamodel.TskCoreException;
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
public class HashHitUtils {
|
|
||||||
|
|
||||||
private static final Logger LOGGER = Logger.getLogger(HashHitUtils.class.getName());
|
|
||||||
|
|
||||||
/**
|
|
||||||
* For the given objID, get the names of all the hashsets that the object is
|
|
||||||
* in.
|
|
||||||
*
|
|
||||||
* @param tskCase
|
|
||||||
* @param objID the obj_id to find all the hash sets for
|
|
||||||
*
|
|
||||||
* @return a set of names, each of which is a hashset that the given object
|
|
||||||
* is in.
|
|
||||||
*
|
|
||||||
* //TODO: Move this into sleuthkitcase?
|
|
||||||
*/
|
|
||||||
@Nonnull
|
|
||||||
static public Set<String> getHashSetNamesForFile(SleuthkitCase tskCase, long objID) {
|
|
||||||
try {
|
|
||||||
Set<String> hashNames = new HashSet<>();
|
|
||||||
List<BlackboardArtifact> arts = tskCase.getBlackboardArtifacts(BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT, objID);
|
|
||||||
|
|
||||||
for (BlackboardArtifact a : arts) {
|
|
||||||
List<BlackboardAttribute> attrs = a.getAttributes();
|
|
||||||
for (BlackboardAttribute attr : attrs) {
|
|
||||||
if (attr.getAttributeTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()) {
|
|
||||||
hashNames.add(attr.getValueString());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return Collections.unmodifiableSet(hashNames);
|
|
||||||
} catch (TskCoreException ex) {
|
|
||||||
LOGGER.log(Level.SEVERE, "failed to get hash sets for file", ex);
|
|
||||||
}
|
|
||||||
return Collections.emptySet();
|
|
||||||
}
|
|
||||||
|
|
||||||
private HashHitUtils() {
|
|
||||||
}
|
|
||||||
}
|
|
@ -30,7 +30,6 @@ import java.io.File;
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.nio.file.Paths;
|
import java.nio.file.Paths;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
@ -74,26 +73,11 @@ public class ImageUtils {
|
|||||||
|
|
||||||
private static final Logger logger = LOGGER;
|
private static final Logger logger = LOGGER;
|
||||||
private static final BufferedImage DEFAULT_THUMBNAIL;
|
private static final BufferedImage DEFAULT_THUMBNAIL;
|
||||||
private static final TreeSet<String> SUPPORTED_MIME_TYPES = new TreeSet<>();
|
|
||||||
private static final List<String> SUPPORTED_EXTENSIONS = new ArrayList<>();
|
|
||||||
private static final List<String> SUPPORTED_IMAGE_EXTENSIONS;
|
private static final List<String> SUPPORTED_IMAGE_EXTENSIONS;
|
||||||
private static final List<String> SUPPORTED_VIDEO_EXTENSIONS
|
|
||||||
= Arrays.asList("mov", "m4v", "flv", "mp4", "3gp", "avi", "mpg",
|
|
||||||
"mpeg", "asf", "divx", "rm", "moov", "wmv", "vob", "dat",
|
|
||||||
"m1v", "m2v", "m4v", "mkv", "mpe", "yop", "vqa", "xmv",
|
|
||||||
"mve", "wtv", "webm", "vivo", "vc1", "seq", "thp", "san",
|
|
||||||
"mjpg", "smk", "vmd", "sol", "cpk", "sdp", "sbg", "rtsp",
|
|
||||||
"rpl", "rl2", "r3d", "mlp", "mjpeg", "hevc", "h265", "265",
|
|
||||||
"h264", "h263", "h261", "drc", "avs", "pva", "pmp", "ogg",
|
|
||||||
"nut", "nuv", "nsv", "mxf", "mtv", "mvi", "mxg", "lxf",
|
|
||||||
"lvf", "ivf", "mve", "cin", "hnm", "gxf", "fli", "flc",
|
|
||||||
"flx", "ffm", "wve", "uv2", "dxa", "dv", "cdxl", "cdg",
|
|
||||||
"bfi", "jv", "bik", "vid", "vb", "son", "avs", "paf", "mm",
|
|
||||||
"flm", "tmv", "4xm"); //NON-NLS
|
|
||||||
private static final TreeSet<String> SUPPORTED_IMAGE_MIME_TYPES;
|
private static final TreeSet<String> SUPPORTED_IMAGE_MIME_TYPES;
|
||||||
private static final List<String> SUPPORTED_VIDEO_MIME_TYPES
|
private static final List<String> CONDITIONAL_MIME_TYPES = Arrays.asList("audio/x-aiff", "application/octet-stream");
|
||||||
= Arrays.asList("application/x-shockwave-flash", "video/x-m4v", "video/quicktime", "video/avi", "video/msvideo", "video/x-msvideo",
|
|
||||||
"video/mp4", "video/x-ms-wmv", "video/mpeg", "video/asf"); //NON-NLS
|
|
||||||
private static final boolean openCVLoaded;
|
private static final boolean openCVLoaded;
|
||||||
|
|
||||||
static {
|
static {
|
||||||
@ -128,31 +112,19 @@ public class ImageUtils {
|
|||||||
openCVLoaded = openCVLoadedTemp;
|
openCVLoaded = openCVLoadedTemp;
|
||||||
SUPPORTED_IMAGE_EXTENSIONS = Arrays.asList(ImageIO.getReaderFileSuffixes());
|
SUPPORTED_IMAGE_EXTENSIONS = Arrays.asList(ImageIO.getReaderFileSuffixes());
|
||||||
|
|
||||||
SUPPORTED_EXTENSIONS.addAll(SUPPORTED_IMAGE_EXTENSIONS);
|
|
||||||
SUPPORTED_EXTENSIONS.addAll(SUPPORTED_VIDEO_EXTENSIONS);
|
|
||||||
|
|
||||||
SUPPORTED_IMAGE_MIME_TYPES = new TreeSet<>(Arrays.asList(ImageIO.getReaderMIMETypes()));
|
SUPPORTED_IMAGE_MIME_TYPES = new TreeSet<>(Arrays.asList(ImageIO.getReaderMIMETypes()));
|
||||||
/* special cases and variants that we support, but don't get registered
|
/* special cases and variants that we support, but don't get registered
|
||||||
* with ImageIO automatically */
|
* with ImageIO automatically */
|
||||||
SUPPORTED_IMAGE_MIME_TYPES.addAll(Arrays.asList(
|
SUPPORTED_IMAGE_MIME_TYPES.addAll(Arrays.asList(
|
||||||
"image/x-rgb",
|
"image/x-rgb",
|
||||||
"image/x-ms-bmp",
|
"image/x-ms-bmp",
|
||||||
|
"image/x-portable-graymap",
|
||||||
|
"image/x-portable-bitmap",
|
||||||
"application/x-123"));
|
"application/x-123"));
|
||||||
SUPPORTED_MIME_TYPES.addAll(SUPPORTED_IMAGE_MIME_TYPES);
|
SUPPORTED_IMAGE_MIME_TYPES.removeIf("application/octet-stream"::equals);
|
||||||
SUPPORTED_MIME_TYPES.addAll(SUPPORTED_VIDEO_MIME_TYPES);
|
|
||||||
|
|
||||||
//this is rarely usefull
|
|
||||||
SUPPORTED_MIME_TYPES.removeIf("application/octet-stream"::equals);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/** initialized lazily */
|
||||||
* Get the default Icon, which is the icon for a file.
|
|
||||||
*
|
|
||||||
* @return
|
|
||||||
*
|
|
||||||
*
|
|
||||||
*
|
|
||||||
* /** initialized lazily */
|
|
||||||
private static FileTypeDetector fileTypeDetector;
|
private static FileTypeDetector fileTypeDetector;
|
||||||
|
|
||||||
/** thread that saves generated thumbnails to disk in the background */
|
/** thread that saves generated thumbnails to disk in the background */
|
||||||
@ -167,26 +139,10 @@ public class ImageUtils {
|
|||||||
return Collections.unmodifiableList(SUPPORTED_IMAGE_EXTENSIONS);
|
return Collections.unmodifiableList(SUPPORTED_IMAGE_EXTENSIONS);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static List<String> getSupportedVideoExtensions() {
|
|
||||||
return SUPPORTED_VIDEO_EXTENSIONS;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static SortedSet<String> getSupportedImageMimeTypes() {
|
public static SortedSet<String> getSupportedImageMimeTypes() {
|
||||||
return Collections.unmodifiableSortedSet(SUPPORTED_IMAGE_MIME_TYPES);
|
return Collections.unmodifiableSortedSet(SUPPORTED_IMAGE_MIME_TYPES);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static List<String> getSupportedVideoMimeTypes() {
|
|
||||||
return SUPPORTED_VIDEO_MIME_TYPES;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static List<String> getSupportedExtensions() {
|
|
||||||
return Collections.unmodifiableList(SUPPORTED_EXTENSIONS);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static SortedSet<String> getSupportedMimeTypes() {
|
|
||||||
return Collections.unmodifiableSortedSet(SUPPORTED_MIME_TYPES);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the default thumbnail, which is the icon for a file. Used when we can
|
* Get the default thumbnail, which is the icon for a file. Used when we can
|
||||||
* not
|
* not
|
||||||
@ -229,31 +185,59 @@ public class ImageUtils {
|
|||||||
}
|
}
|
||||||
AbstractFile file = (AbstractFile) content;
|
AbstractFile file = (AbstractFile) content;
|
||||||
|
|
||||||
|
return VideoUtils.isVideoThumbnailSupported(file)
|
||||||
|
|| isImageThumbnailSupported(file);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public static boolean isImageThumbnailSupported(AbstractFile file) {
|
||||||
|
|
||||||
|
return isMediaThumbnailSupported(file, SUPPORTED_IMAGE_MIME_TYPES, SUPPORTED_IMAGE_EXTENSIONS, CONDITIONAL_MIME_TYPES)
|
||||||
|
|| hasImageFileHeader(file);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a file is "supported" by checking it mimetype and extension
|
||||||
|
*
|
||||||
|
* //TODO: this should move to a better place. Should ImageUtils and
|
||||||
|
* VideoUtils both implement/extend some base interface/abstract class. That
|
||||||
|
* would be the natural place to put this.
|
||||||
|
*
|
||||||
|
* @param file
|
||||||
|
* @param supportedMimeTypes a set of mimetypes that the could have to be
|
||||||
|
* supported
|
||||||
|
* @param supportedExtension a set of extensions a file could have to be
|
||||||
|
* supported if the mime lookup fails or is
|
||||||
|
* inconclusive
|
||||||
|
* @param conditionalMimes a set of mimetypes that a file could have to be
|
||||||
|
* supoprted if it also has a supported extension
|
||||||
|
*
|
||||||
|
* @return true if a thumbnail can be generated for the given file with the
|
||||||
|
* given lists of supported mimetype and extensions
|
||||||
|
*/
|
||||||
|
static boolean isMediaThumbnailSupported(AbstractFile file, final SortedSet<String> supportedMimeTypes, final List<String> supportedExtension, List<String> conditionalMimes) {
|
||||||
|
if (file.getSize() == 0) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
final String extension = file.getNameExtension();
|
||||||
try {
|
try {
|
||||||
String mimeType = getFileTypeDetector().getFileType(file);
|
String mimeType = getFileTypeDetector().getFileType(file);
|
||||||
if (Objects.nonNull(mimeType)) {
|
if (Objects.nonNull(mimeType)) {
|
||||||
return SUPPORTED_MIME_TYPES.contains(mimeType)
|
return supportedMimeTypes.contains(mimeType)
|
||||||
|| (mimeType.equalsIgnoreCase("audio/x-aiff") && "iff".equalsIgnoreCase(file.getNameExtension()));
|
|| (conditionalMimes.contains(mimeType.toLowerCase()) && supportedExtension.contains(extension));
|
||||||
}
|
}
|
||||||
} catch (FileTypeDetector.FileTypeDetectorInitException | TskCoreException ex) {
|
} catch (FileTypeDetector.FileTypeDetectorInitException | TskCoreException ex) {
|
||||||
LOGGER.log(Level.WARNING, "Failed to look up mimetype for " + file.getName() + " using FileTypeDetector. Fallingback on AbstractFile.isMimeType", ex);
|
LOGGER.log(Level.WARNING, "Failed to look up mimetype for " + file.getName() + " using FileTypeDetector. Fallingback on AbstractFile.isMimeType", ex);
|
||||||
|
|
||||||
AbstractFile.MimeMatchEnum mimeMatch = file.isMimeType(SUPPORTED_MIME_TYPES);
|
AbstractFile.MimeMatchEnum mimeMatch = file.isMimeType(supportedMimeTypes);
|
||||||
if (mimeMatch == AbstractFile.MimeMatchEnum.TRUE) {
|
if (mimeMatch == AbstractFile.MimeMatchEnum.TRUE) {
|
||||||
return true;
|
return true;
|
||||||
} else if (mimeMatch == AbstractFile.MimeMatchEnum.FALSE) {
|
} else if (mimeMatch == AbstractFile.MimeMatchEnum.FALSE) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// if we have an extension, check it
|
// if we have an extension, check it
|
||||||
final String extension = file.getNameExtension();
|
return StringUtils.isNotBlank(extension) && supportedExtension.contains(extension);
|
||||||
if (StringUtils.isNotBlank(extension) && SUPPORTED_EXTENSIONS.contains(extension)) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
// if no extension or one that is not for an image, then read the content
|
|
||||||
return isJpegFileHeader(file) || isPngFileHeader(file);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -303,22 +287,27 @@ public class ImageUtils {
|
|||||||
* problem making a thumbnail.
|
* problem making a thumbnail.
|
||||||
*/
|
*/
|
||||||
public static Image getThumbnail(Content content, int iconSize) {
|
public static Image getThumbnail(Content content, int iconSize) {
|
||||||
// If a thumbnail file is already saved locally
|
if (content instanceof AbstractFile) {
|
||||||
File cacheFile = getCachedThumbnailLocation(content.getId());
|
AbstractFile file = (AbstractFile) content;
|
||||||
if (cacheFile.exists()) {
|
// If a thumbnail file is already saved locally
|
||||||
try {
|
File cacheFile = getCachedThumbnailLocation(content.getId());
|
||||||
BufferedImage thumbnail = ImageIO.read(cacheFile);
|
if (cacheFile.exists()) {
|
||||||
if (isNull(thumbnail) || thumbnail.getWidth() != iconSize) {
|
try {
|
||||||
return generateAndSaveThumbnail(content, iconSize, cacheFile);
|
BufferedImage thumbnail = ImageIO.read(cacheFile);
|
||||||
} else {
|
if (isNull(thumbnail) || thumbnail.getWidth() != iconSize) {
|
||||||
return thumbnail;
|
return generateAndSaveThumbnail(file, iconSize, cacheFile);
|
||||||
|
} else {
|
||||||
|
return thumbnail;
|
||||||
|
}
|
||||||
|
} catch (Exception ex) {
|
||||||
|
LOGGER.log(Level.WARNING, "Error while reading image: " + content.getName(), ex); //NON-NLS
|
||||||
|
return generateAndSaveThumbnail(file, iconSize, cacheFile);
|
||||||
}
|
}
|
||||||
} catch (Exception ex) {
|
} else {
|
||||||
LOGGER.log(Level.WARNING, "Error while reading image: " + content.getName(), ex); //NON-NLS
|
return generateAndSaveThumbnail(file, iconSize, cacheFile);
|
||||||
return generateAndSaveThumbnail(content, iconSize, cacheFile);
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
return generateAndSaveThumbnail(content, iconSize, cacheFile);
|
return DEFAULT_THUMBNAIL;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -458,25 +447,23 @@ public class ImageUtils {
|
|||||||
/**
|
/**
|
||||||
* Generate an icon and save it to specified location.
|
* Generate an icon and save it to specified location.
|
||||||
*
|
*
|
||||||
* @param content File to generate icon for
|
* @param file File to generate icon for
|
||||||
* @param iconSize
|
* @param iconSize
|
||||||
* @param cacheFile Location to save thumbnail to
|
* @param cacheFile Location to save thumbnail to
|
||||||
*
|
*
|
||||||
* @return Generated icon or null on error
|
* @return Generated icon or null on error
|
||||||
*/
|
*/
|
||||||
private static Image generateAndSaveThumbnail(Content content, int iconSize, File cacheFile) {
|
private static Image generateAndSaveThumbnail(AbstractFile file, int iconSize, File cacheFile) {
|
||||||
AbstractFile f = (AbstractFile) content;
|
|
||||||
final String extension = f.getNameExtension();
|
|
||||||
BufferedImage thumbnail = null;
|
BufferedImage thumbnail = null;
|
||||||
try {
|
try {
|
||||||
if (SUPPORTED_VIDEO_EXTENSIONS.contains(extension)) {
|
if (VideoUtils.isVideoThumbnailSupported(file)) {
|
||||||
if (openCVLoaded) {
|
if (openCVLoaded) {
|
||||||
thumbnail = VideoUtils.generateVideoThumbnail((AbstractFile) content, iconSize);
|
thumbnail = VideoUtils.generateVideoThumbnail(file, iconSize);
|
||||||
} else {
|
} else {
|
||||||
return DEFAULT_THUMBNAIL;
|
return DEFAULT_THUMBNAIL;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
thumbnail = generateImageThumbnail(content, iconSize);
|
thumbnail = generateImageThumbnail(file, iconSize);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (thumbnail == null) {
|
if (thumbnail == null) {
|
||||||
@ -492,12 +479,12 @@ public class ImageUtils {
|
|||||||
}
|
}
|
||||||
ImageIO.write(toSave, FORMAT, cacheFile);
|
ImageIO.write(toSave, FORMAT, cacheFile);
|
||||||
} catch (IllegalArgumentException | IOException ex1) {
|
} catch (IllegalArgumentException | IOException ex1) {
|
||||||
LOGGER.log(Level.WARNING, "Could not write cache thumbnail: " + content, ex1); //NON-NLS
|
LOGGER.log(Level.WARNING, "Could not write cache thumbnail: " + file, ex1); //NON-NLS
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} catch (NullPointerException ex) {
|
} catch (NullPointerException ex) {
|
||||||
logger.log(Level.WARNING, "Could not write cache thumbnail: " + content, ex); //NON-NLS
|
logger.log(Level.WARNING, "Could not write cache thumbnail: " + file, ex); //NON-NLS
|
||||||
}
|
}
|
||||||
return thumbnail;
|
return thumbnail;
|
||||||
}
|
}
|
||||||
|
@ -22,6 +22,11 @@ import java.awt.image.BufferedImage;
|
|||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.nio.file.Paths;
|
import java.nio.file.Paths;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.SortedSet;
|
||||||
|
import java.util.TreeSet;
|
||||||
import java.util.logging.Level;
|
import java.util.logging.Level;
|
||||||
import org.netbeans.api.progress.ProgressHandle;
|
import org.netbeans.api.progress.ProgressHandle;
|
||||||
import org.netbeans.api.progress.ProgressHandleFactory;
|
import org.netbeans.api.progress.ProgressHandleFactory;
|
||||||
@ -29,6 +34,7 @@ import org.opencv.core.Mat;
|
|||||||
import org.opencv.highgui.VideoCapture;
|
import org.opencv.highgui.VideoCapture;
|
||||||
import org.sleuthkit.autopsy.casemodule.Case;
|
import org.sleuthkit.autopsy.casemodule.Case;
|
||||||
import org.sleuthkit.autopsy.corelibs.ScalrWrapper;
|
import org.sleuthkit.autopsy.corelibs.ScalrWrapper;
|
||||||
|
import static org.sleuthkit.autopsy.coreutils.ImageUtils.isMediaThumbnailSupported;
|
||||||
import org.sleuthkit.autopsy.datamodel.ContentUtils;
|
import org.sleuthkit.autopsy.datamodel.ContentUtils;
|
||||||
import org.sleuthkit.datamodel.AbstractFile;
|
import org.sleuthkit.datamodel.AbstractFile;
|
||||||
|
|
||||||
@ -37,6 +43,34 @@ import org.sleuthkit.datamodel.AbstractFile;
|
|||||||
*/
|
*/
|
||||||
public class VideoUtils {
|
public class VideoUtils {
|
||||||
|
|
||||||
|
private static final List<String> SUPPORTED_VIDEO_EXTENSIONS
|
||||||
|
= Arrays.asList("mov", "m4v", "flv", "mp4", "3gp", "avi", "mpg",
|
||||||
|
"mpeg", "asf", "divx", "rm", "moov", "wmv", "vob", "dat",
|
||||||
|
"m1v", "m2v", "m4v", "mkv", "mpe", "yop", "vqa", "xmv",
|
||||||
|
"mve", "wtv", "webm", "vivo", "vc1", "seq", "thp", "san",
|
||||||
|
"mjpg", "smk", "vmd", "sol", "cpk", "sdp", "sbg", "rtsp",
|
||||||
|
"rpl", "rl2", "r3d", "mlp", "mjpeg", "hevc", "h265", "265",
|
||||||
|
"h264", "h263", "h261", "drc", "avs", "pva", "pmp", "ogg",
|
||||||
|
"nut", "nuv", "nsv", "mxf", "mtv", "mvi", "mxg", "lxf",
|
||||||
|
"lvf", "ivf", "mve", "cin", "hnm", "gxf", "fli", "flc",
|
||||||
|
"flx", "ffm", "wve", "uv2", "dxa", "dv", "cdxl", "cdg",
|
||||||
|
"bfi", "jv", "bik", "vid", "vb", "son", "avs", "paf", "mm",
|
||||||
|
"flm", "tmv", "4xm"); //NON-NLS
|
||||||
|
|
||||||
|
private static final SortedSet<String> SUPPORTED_VIDEO_MIME_TYPES = new TreeSet<>(
|
||||||
|
Arrays.asList("application/x-shockwave-flash", "video/x-m4v", "video/quicktime", "video/avi", "video/msvideo", "video/x-msvideo",
|
||||||
|
"video/mp4", "video/x-ms-wmv", "video/mpeg", "video/asf")); //NON-NLS
|
||||||
|
|
||||||
|
private static final List<String> CONDITIONAL_MIME_TYPES = Arrays.asList("application/octet-stream");
|
||||||
|
|
||||||
|
public static List<String> getSupportedVideoExtensions() {
|
||||||
|
return SUPPORTED_VIDEO_EXTENSIONS;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static SortedSet<String> getSupportedVideoMimeTypes() {
|
||||||
|
return Collections.unmodifiableSortedSet(SUPPORTED_VIDEO_MIME_TYPES);
|
||||||
|
}
|
||||||
|
|
||||||
private static final int THUMB_COLUMNS = 3;
|
private static final int THUMB_COLUMNS = 3;
|
||||||
private static final int THUMB_ROWS = 3;
|
private static final int THUMB_ROWS = 3;
|
||||||
private static final int CV_CAP_PROP_POS_MSEC = 0;
|
private static final int CV_CAP_PROP_POS_MSEC = 0;
|
||||||
@ -52,6 +86,10 @@ public class VideoUtils {
|
|||||||
return Paths.get(Case.getCurrentCase().getTempDirectory(), "videos", file.getId() + "." + file.getNameExtension()).toFile();
|
return Paths.get(Case.getCurrentCase().getTempDirectory(), "videos", file.getId() + "." + file.getNameExtension()).toFile();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static boolean isVideoThumbnailSupported(AbstractFile file) {
|
||||||
|
return isMediaThumbnailSupported(file, SUPPORTED_VIDEO_MIME_TYPES, SUPPORTED_VIDEO_EXTENSIONS, CONDITIONAL_MIME_TYPES);
|
||||||
|
}
|
||||||
|
|
||||||
static BufferedImage generateVideoThumbnail(AbstractFile file, int iconSize) {
|
static BufferedImage generateVideoThumbnail(AbstractFile file, int iconSize) {
|
||||||
java.io.File tempFile = getTempVideoFile(file);
|
java.io.File tempFile = getTempVideoFile(file);
|
||||||
|
|
||||||
@ -93,11 +131,11 @@ public class VideoUtils {
|
|||||||
for (int x = 0; x < THUMB_COLUMNS; x++) {
|
for (int x = 0; x < THUMB_COLUMNS; x++) {
|
||||||
for (int y = 0; y < THUMB_ROWS; y++) {
|
for (int y = 0; y < THUMB_ROWS; y++) {
|
||||||
if (!videoFile.set(CV_CAP_PROP_POS_MSEC, timestamp + x * framkeskip + y * framkeskip * THUMB_COLUMNS)) {
|
if (!videoFile.set(CV_CAP_PROP_POS_MSEC, timestamp + x * framkeskip + y * framkeskip * THUMB_COLUMNS)) {
|
||||||
break;
|
break; // if we can't set the time, return black for that frame
|
||||||
}
|
}
|
||||||
//read the frame into the image/matrix
|
//read the frame into the image/matrix
|
||||||
if (!videoFile.read(imageMatrix)) {
|
if (!videoFile.read(imageMatrix)) {
|
||||||
break; //if the image for some reason is bad, return default icon
|
break; //if the image for some reason is bad, return black for that frame
|
||||||
}
|
}
|
||||||
|
|
||||||
if (bufferedImage == null) {
|
if (bufferedImage == null) {
|
||||||
@ -122,6 +160,6 @@ public class VideoUtils {
|
|||||||
|
|
||||||
videoFile.release(); // close the file
|
videoFile.release(); // close the file
|
||||||
|
|
||||||
return ScalrWrapper.resizeFast(bufferedImage, iconSize);
|
return bufferedImage == null ? bufferedImage : ScalrWrapper.resizeFast(bufferedImage, iconSize);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -18,17 +18,12 @@
|
|||||||
*/
|
*/
|
||||||
package org.sleuthkit.autopsy.datamodel;
|
package org.sleuthkit.autopsy.datamodel;
|
||||||
|
|
||||||
import java.sql.ResultSet;
|
|
||||||
import java.sql.SQLException;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.logging.Level;
|
import java.util.logging.Level;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.openide.util.NbBundle;
|
import org.openide.util.NbBundle;
|
||||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||||
import org.sleuthkit.datamodel.AbstractFile;
|
import org.sleuthkit.datamodel.AbstractFile;
|
||||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
|
||||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
|
||||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
|
||||||
import org.sleuthkit.datamodel.SleuthkitCase.CaseDbQuery;
|
|
||||||
import org.sleuthkit.datamodel.TskCoreException;
|
import org.sleuthkit.datamodel.TskCoreException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -38,7 +33,7 @@ import org.sleuthkit.datamodel.TskCoreException;
|
|||||||
*/
|
*/
|
||||||
public abstract class AbstractAbstractFileNode<T extends AbstractFile> extends AbstractContentNode<T> {
|
public abstract class AbstractAbstractFileNode<T extends AbstractFile> extends AbstractContentNode<T> {
|
||||||
|
|
||||||
private static Logger logger = Logger.getLogger(AbstractAbstractFileNode.class.getName());
|
private static final Logger LOGGER = Logger.getLogger(AbstractAbstractFileNode.class.getName());
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param <T> type of the AbstractFile data to encapsulate
|
* @param <T> type of the AbstractFile data to encapsulate
|
||||||
@ -162,7 +157,6 @@ public abstract class AbstractAbstractFileNode<T extends AbstractFile> extends A
|
|||||||
MD5HASH {
|
MD5HASH {
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
|
|
||||||
return NbBundle.getMessage(this.getClass(), "AbstractAbstractFileNode.md5HashColLbl");
|
return NbBundle.getMessage(this.getClass(), "AbstractAbstractFileNode.md5HashColLbl");
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -189,7 +183,7 @@ public abstract class AbstractAbstractFileNode<T extends AbstractFile> extends A
|
|||||||
try {
|
try {
|
||||||
path = content.getUniquePath();
|
path = content.getUniquePath();
|
||||||
} catch (TskCoreException ex) {
|
} catch (TskCoreException ex) {
|
||||||
logger.log(Level.SEVERE, "Except while calling Content.getUniquePath() on {0}", content); //NON-NLS
|
LOGGER.log(Level.SEVERE, "Except while calling Content.getUniquePath() on {0}", content); //NON-NLS
|
||||||
}
|
}
|
||||||
|
|
||||||
map.put(AbstractFilePropertyType.NAME.toString(), AbstractAbstractFileNode.getContentDisplayName(content));
|
map.put(AbstractFilePropertyType.NAME.toString(), AbstractAbstractFileNode.getContentDisplayName(content));
|
||||||
@ -219,44 +213,22 @@ public abstract class AbstractAbstractFileNode<T extends AbstractFile> extends A
|
|||||||
String name = file.getName();
|
String name = file.getName();
|
||||||
switch (name) {
|
switch (name) {
|
||||||
case "..":
|
case "..":
|
||||||
name = DirectoryNode.DOTDOTDIR;
|
return DirectoryNode.DOTDOTDIR;
|
||||||
break;
|
|
||||||
case ".":
|
case ".":
|
||||||
name = DirectoryNode.DOTDIR;
|
return DirectoryNode.DOTDIR;
|
||||||
break;
|
default:
|
||||||
|
return name;
|
||||||
}
|
}
|
||||||
return name;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("deprecation")
|
@SuppressWarnings("deprecation")
|
||||||
private static String getHashSetHitsForFile(AbstractFile content) {
|
private static String getHashSetHitsForFile(AbstractFile content) {
|
||||||
String strList = "";
|
try {
|
||||||
SleuthkitCase skCase = content.getSleuthkitCase();
|
return StringUtils.join(content.getHashSetNames(), ", ");
|
||||||
long objId = content.getId();
|
} catch (TskCoreException tskCoreException) {
|
||||||
|
LOGGER.log(Level.WARNING, "Error getting hashset hits: ", tskCoreException); //NON-NLS
|
||||||
int setNameId = BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID();
|
return "";
|
||||||
int artId = BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID();
|
|
||||||
|
|
||||||
String query = "SELECT value_text,blackboard_attributes.artifact_id,attribute_type_id " //NON-NLS
|
|
||||||
+ "FROM blackboard_attributes,blackboard_artifacts WHERE " //NON-NLS
|
|
||||||
+ "attribute_type_id=" + setNameId //NON-NLS
|
|
||||||
+ " AND blackboard_attributes.artifact_id=blackboard_artifacts.artifact_id" //NON-NLS
|
|
||||||
+ " AND blackboard_artifacts.artifact_type_id=" + artId //NON-NLS
|
|
||||||
+ " AND blackboard_artifacts.obj_id=" + objId; //NON-NLS
|
|
||||||
|
|
||||||
try (CaseDbQuery dbQuery = skCase.executeQuery(query)) {
|
|
||||||
ResultSet resultSet = dbQuery.getResultSet();
|
|
||||||
int i = 0;
|
|
||||||
while (resultSet.next()) {
|
|
||||||
if (i++ > 0) {
|
|
||||||
strList += ", ";
|
|
||||||
}
|
|
||||||
strList += resultSet.getString("value_text"); //NON-NLS
|
|
||||||
}
|
|
||||||
} catch (TskCoreException | SQLException ex) {
|
|
||||||
logger.log(Level.WARNING, "Error getting hashset hits: ", ex); //NON-NLS
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
return strList;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -18,7 +18,6 @@
|
|||||||
*/
|
*/
|
||||||
package org.sleuthkit.autopsy.timeline.events.db;
|
package org.sleuthkit.autopsy.timeline.events.db;
|
||||||
|
|
||||||
import com.google.common.base.Stopwatch;
|
|
||||||
import com.google.common.collect.HashMultimap;
|
import com.google.common.collect.HashMultimap;
|
||||||
import com.google.common.collect.SetMultimap;
|
import com.google.common.collect.SetMultimap;
|
||||||
import java.nio.file.Paths;
|
import java.nio.file.Paths;
|
||||||
@ -82,14 +81,15 @@ import org.sqlite.SQLiteJDBCLoader;
|
|||||||
/**
|
/**
|
||||||
* Provides access to the Timeline SQLite database.
|
* Provides access to the Timeline SQLite database.
|
||||||
*
|
*
|
||||||
* This class borrows a lot of ideas and techniques from
|
* This class borrows a lot of ideas and techniques from {@link SleuthkitCase}.
|
||||||
* {@link SleuthkitCase}. Creating an abstract base class for SQLite
|
* Creating an abstract base class for SQLite databases, or using a higherlevel
|
||||||
* databases, or using a higherlevel persistence api may make sense in the
|
* persistence api may make sense in the future.
|
||||||
* future.
|
|
||||||
*/
|
*/
|
||||||
public class EventDB {
|
public class EventDB {
|
||||||
|
|
||||||
/** enum to represent keys stored in db_info table */
|
/**
|
||||||
|
* enum to represent keys stored in db_info table
|
||||||
|
*/
|
||||||
private enum DBInfoKey {
|
private enum DBInfoKey {
|
||||||
|
|
||||||
LAST_ARTIFACT_ID("last_artifact_id"), // NON-NLS
|
LAST_ARTIFACT_ID("last_artifact_id"), // NON-NLS
|
||||||
@ -221,8 +221,8 @@ public class EventDB {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return the total number of events in the database or,
|
* @return the total number of events in the database or, -1 if there is an
|
||||||
* -1 if there is an error.
|
* error.
|
||||||
*/
|
*/
|
||||||
int countAllEvents() {
|
int countAllEvents() {
|
||||||
DBLock.lock();
|
DBLock.lock();
|
||||||
@ -292,7 +292,6 @@ public class EventDB {
|
|||||||
if (end2 == 0) {
|
if (end2 == 0) {
|
||||||
end2 = getMaxTime();
|
end2 = getMaxTime();
|
||||||
}
|
}
|
||||||
//System.out.println(start2 + " " + start + " " + end + " " + end2);
|
|
||||||
return new Interval(start2 * 1000, (end2 + 1) * 1000, TimeLineController.getJodaTimeZone());
|
return new Interval(start2 * 1000, (end2 + 1) * 1000, TimeLineController.getJodaTimeZone());
|
||||||
}
|
}
|
||||||
} catch (SQLException ex) {
|
} catch (SQLException ex) {
|
||||||
@ -335,7 +334,6 @@ public class EventDB {
|
|||||||
|
|
||||||
DBLock.lock();
|
DBLock.lock();
|
||||||
final String query = "select event_id from from events" + useHashHitTablesHelper(filter) + " where time >= " + startTime + " and time <" + endTime + " and " + SQLHelper.getSQLWhere(filter); // NON-NLS
|
final String query = "select event_id from from events" + useHashHitTablesHelper(filter) + " where time >= " + startTime + " and time <" + endTime + " and " + SQLHelper.getSQLWhere(filter); // NON-NLS
|
||||||
//System.out.println(query);
|
|
||||||
try (Statement stmt = con.createStatement();
|
try (Statement stmt = con.createStatement();
|
||||||
ResultSet rs = stmt.executeQuery(query)) {
|
ResultSet rs = stmt.executeQuery(query)) {
|
||||||
|
|
||||||
@ -361,8 +359,10 @@ public class EventDB {
|
|||||||
}
|
}
|
||||||
|
|
||||||
boolean hasNewColumns() {
|
boolean hasNewColumns() {
|
||||||
/* this relies on the fact that no tskObj has ID 0 but 0 is the default
|
/*
|
||||||
* value for the datasource_id column in the events table. */
|
* this relies on the fact that no tskObj has ID 0 but 0 is the default
|
||||||
|
* value for the datasource_id column in the events table.
|
||||||
|
*/
|
||||||
return hasHashHitColumn() && hasDataSourceIDColumn() && hasTaggedColumn()
|
return hasHashHitColumn() && hasDataSourceIDColumn() && hasTaggedColumn()
|
||||||
&& (getDataSourceIDs().isEmpty() == false);
|
&& (getDataSourceIDs().isEmpty() == false);
|
||||||
}
|
}
|
||||||
@ -403,7 +403,9 @@ public class EventDB {
|
|||||||
return Collections.unmodifiableMap(hashSets);
|
return Collections.unmodifiableMap(hashSets);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @return maximum time in seconds from unix epoch */
|
/**
|
||||||
|
* @return maximum time in seconds from unix epoch
|
||||||
|
*/
|
||||||
Long getMaxTime() {
|
Long getMaxTime() {
|
||||||
DBLock.lock();
|
DBLock.lock();
|
||||||
try (ResultSet rs = getMaxTimeStmt.executeQuery()) {
|
try (ResultSet rs = getMaxTimeStmt.executeQuery()) {
|
||||||
@ -418,7 +420,9 @@ public class EventDB {
|
|||||||
return -1l;
|
return -1l;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @return maximum time in seconds from unix epoch */
|
/**
|
||||||
|
* @return maximum time in seconds from unix epoch
|
||||||
|
*/
|
||||||
Long getMinTime() {
|
Long getMinTime() {
|
||||||
DBLock.lock();
|
DBLock.lock();
|
||||||
try (ResultSet rs = getMinTimeStmt.executeQuery()) {
|
try (ResultSet rs = getMinTimeStmt.executeQuery()) {
|
||||||
@ -837,8 +841,8 @@ public class EventDB {
|
|||||||
* from unix epoch)
|
* from unix epoch)
|
||||||
* @param filter only events that pass this filter will be counted
|
* @param filter only events that pass this filter will be counted
|
||||||
* @param zoomLevel only events of this type or a subtype will be counted
|
* @param zoomLevel only events of this type or a subtype will be counted
|
||||||
* and the counts will be organized into bins for each of the subtypes of
|
* and the counts will be organized into bins for each of
|
||||||
* the given event type
|
* the subtypes of the given event type
|
||||||
*
|
*
|
||||||
* @return a map organizing the counts in a hierarchy from date > eventtype>
|
* @return a map organizing the counts in a hierarchy from date > eventtype>
|
||||||
* count
|
* count
|
||||||
@ -860,14 +864,7 @@ public class EventDB {
|
|||||||
|
|
||||||
ResultSet rs = null;
|
ResultSet rs = null;
|
||||||
DBLock.lock();
|
DBLock.lock();
|
||||||
//System.out.println(queryString);
|
|
||||||
try (Statement stmt = con.createStatement();) {
|
try (Statement stmt = con.createStatement();) {
|
||||||
Stopwatch stopwatch = new Stopwatch();
|
|
||||||
stopwatch.start();
|
|
||||||
System.out.println(queryString);
|
|
||||||
rs = stmt.executeQuery(queryString);
|
|
||||||
stopwatch.stop();
|
|
||||||
// System.out.println(stopwatch.elapsedMillis() / 1000.0 + " seconds");
|
|
||||||
while (rs.next()) {
|
while (rs.next()) {
|
||||||
|
|
||||||
EventType type = useSubTypes
|
EventType type = useSubTypes
|
||||||
@ -895,16 +892,15 @@ public class EventDB {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* //TODO: update javadoc //TODO: split this into helper methods
|
* //TODO: update javadoc, and split this into helper methods
|
||||||
*
|
*
|
||||||
* get a list of {@link AggregateEvent}s.
|
* get a list of {@link AggregateEvent}s.
|
||||||
*
|
*
|
||||||
* General algorithm is as follows:
|
* General algorithm is as follows:
|
||||||
*
|
*
|
||||||
* - get all aggregate events, via one db query.
|
* 1) get all aggregate events, via one db query. 2) sort them into a map
|
||||||
* - sort them into a map from (type, description)-> aggevent
|
* from (type, description)-> aggevent 3) for each key in map, merge the
|
||||||
* - for each key in map, merge the events and accumulate them in a list
|
* events and accumulate them in a list to return
|
||||||
* to return
|
|
||||||
*
|
*
|
||||||
*
|
*
|
||||||
* @param timeRange the Interval within in which all returned aggregate
|
* @param timeRange the Interval within in which all returned aggregate
|
||||||
@ -916,8 +912,8 @@ public class EventDB {
|
|||||||
*
|
*
|
||||||
*
|
*
|
||||||
* @return a list of aggregate events within the given timerange, that pass
|
* @return a list of aggregate events within the given timerange, that pass
|
||||||
* the supplied filter, aggregated according to the given event type and
|
* the supplied filter, aggregated according to the given event type
|
||||||
* description zoom levels
|
* and description zoom levels
|
||||||
*/
|
*/
|
||||||
private List<AggregateEvent> getAggregatedEvents(Interval timeRange, RootFilter filter, EventTypeZoomLevel zoomLevel, DescriptionLOD lod) {
|
private List<AggregateEvent> getAggregatedEvents(Interval timeRange, RootFilter filter, EventTypeZoomLevel zoomLevel, DescriptionLOD lod) {
|
||||||
String descriptionColumn = getDescriptionColumn(lod);
|
String descriptionColumn = getDescriptionColumn(lod);
|
||||||
@ -945,7 +941,6 @@ public class EventDB {
|
|||||||
+ " from events" + useHashHitTablesHelper(filter) + " where " + "time >= " + start + " and time < " + end + " and " + SQLHelper.getSQLWhere(filter) // NON-NLS
|
+ " from events" + useHashHitTablesHelper(filter) + " where " + "time >= " + start + " and time < " + end + " and " + SQLHelper.getSQLWhere(filter) // NON-NLS
|
||||||
+ " group by interval, " + useSubTypeHelper(useSubTypes) + " , " + descriptionColumn // NON-NLS
|
+ " group by interval, " + useSubTypeHelper(useSubTypes) + " , " + descriptionColumn // NON-NLS
|
||||||
+ " order by Min(time)"; // NON-NLS
|
+ " order by Min(time)"; // NON-NLS
|
||||||
System.out.println(query);
|
|
||||||
// scoop up requested events in groups organized by interval, type, and desription
|
// scoop up requested events in groups organized by interval, type, and desription
|
||||||
try (ResultSet rs = con.createStatement().executeQuery(query);) {
|
try (ResultSet rs = con.createStatement().executeQuery(query);) {
|
||||||
while (rs.next()) {
|
while (rs.next()) {
|
||||||
|
@ -44,7 +44,6 @@ import org.joda.time.Interval;
|
|||||||
import org.openide.util.NbBundle;
|
import org.openide.util.NbBundle;
|
||||||
import org.sleuthkit.autopsy.casemodule.Case;
|
import org.sleuthkit.autopsy.casemodule.Case;
|
||||||
import org.sleuthkit.autopsy.casemodule.services.TagsManager;
|
import org.sleuthkit.autopsy.casemodule.services.TagsManager;
|
||||||
import org.sleuthkit.autopsy.coreutils.HashHitUtils;
|
|
||||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||||
import org.sleuthkit.autopsy.timeline.ProgressWindow;
|
import org.sleuthkit.autopsy.timeline.ProgressWindow;
|
||||||
import org.sleuthkit.autopsy.timeline.events.AggregateEvent;
|
import org.sleuthkit.autopsy.timeline.events.AggregateEvent;
|
||||||
@ -284,9 +283,8 @@ public class EventsRepository {
|
|||||||
String rootFolder = StringUtils.substringBetween(parentPath, "/", "/");
|
String rootFolder = StringUtils.substringBetween(parentPath, "/", "/");
|
||||||
String shortDesc = datasourceName + "/" + StringUtils.defaultIfBlank(rootFolder, "");
|
String shortDesc = datasourceName + "/" + StringUtils.defaultIfBlank(rootFolder, "");
|
||||||
String medD = datasourceName + parentPath;
|
String medD = datasourceName + parentPath;
|
||||||
final TskData.FileKnown known = f.getKnown();
|
final TskData.FileKnown known = f.getKnown();
|
||||||
|
Set<String> hashSets = f.getHashSetNames() ;
|
||||||
Set<String> hashSets = HashHitUtils.getHashSetNamesForFile(skCase, f.getId());
|
|
||||||
boolean tagged = !tagsManager.getContentTagsByContent(f).isEmpty();
|
boolean tagged = !tagsManager.getContentTagsByContent(f).isEmpty();
|
||||||
|
|
||||||
//insert it into the db if time is > 0 => time is legitimate (drops logical files)
|
//insert it into the db if time is > 0 => time is legitimate (drops logical files)
|
||||||
@ -396,10 +394,8 @@ public class EventsRepository {
|
|||||||
long datasourceID = skCase.getContentById(bbart.getObjectID()).getDataSource().getId();
|
long datasourceID = skCase.getContentById(bbart.getObjectID()).getDataSource().getId();
|
||||||
|
|
||||||
AbstractFile f = skCase.getAbstractFileById(bbart.getObjectID());
|
AbstractFile f = skCase.getAbstractFileById(bbart.getObjectID());
|
||||||
Set<String> hashSets = HashHitUtils.getHashSetNamesForFile(skCase, f.getId()) ;
|
Set<String> hashSets = f.getHashSetNames();
|
||||||
|
boolean tagged = tagsManager.getBlackboardArtifactTagsByArtifact(bbart).isEmpty() == false;
|
||||||
boolean tagged = tagsManager.getContentTagsByContent(f).isEmpty() == false;
|
|
||||||
tagged |= tagsManager.getBlackboardArtifactTagsByArtifact(bbart).isEmpty() == false;
|
|
||||||
|
|
||||||
eventDB.insertEvent(eventDescription.getTime(), type, datasourceID, bbart.getObjectID(), bbart.getArtifactID(), eventDescription.getFullDescription(), eventDescription.getMedDescription(), eventDescription.getShortDescription(), null, hashSets, tagged, trans);
|
eventDB.insertEvent(eventDescription.getTime(), type, datasourceID, bbart.getObjectID(), bbart.getArtifactID(), eventDescription.getFullDescription(), eventDescription.getMedDescription(), eventDescription.getShortDescription(), null, hashSets, tagged, trans);
|
||||||
}
|
}
|
||||||
|
@ -19,10 +19,10 @@
|
|||||||
package org.sleuthkit.autopsy.timeline.ui.detailview;
|
package org.sleuthkit.autopsy.timeline.ui.detailview;
|
||||||
|
|
||||||
import com.google.common.eventbus.Subscribe;
|
import com.google.common.eventbus.Subscribe;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.Set;
|
||||||
import java.util.concurrent.ExecutionException;
|
import java.util.concurrent.ExecutionException;
|
||||||
import java.util.logging.Level;
|
import java.util.logging.Level;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
@ -77,10 +77,8 @@ import org.sleuthkit.autopsy.timeline.zooming.ZoomParams;
|
|||||||
import org.sleuthkit.datamodel.AbstractFile;
|
import org.sleuthkit.datamodel.AbstractFile;
|
||||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||||
import org.sleuthkit.datamodel.BlackboardArtifactTag;
|
import org.sleuthkit.datamodel.BlackboardArtifactTag;
|
||||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
|
||||||
import org.sleuthkit.datamodel.ContentTag;
|
import org.sleuthkit.datamodel.ContentTag;
|
||||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||||
import org.sleuthkit.datamodel.TagName;
|
|
||||||
import org.sleuthkit.datamodel.TskCoreException;
|
import org.sleuthkit.datamodel.TskCoreException;
|
||||||
|
|
||||||
/** Represents an {@link AggregateEvent} in a {@link EventDetailChart}. */
|
/** Represents an {@link AggregateEvent} in a {@link EventDetailChart}. */
|
||||||
@ -99,7 +97,7 @@ public class AggregateEventNode extends StackPane {
|
|||||||
private static final Border selectionBorder = new Border(new BorderStroke(Color.BLACK, BorderStrokeStyle.SOLID, CORNER_RADII, new BorderWidths(2)));
|
private static final Border selectionBorder = new Border(new BorderStroke(Color.BLACK, BorderStrokeStyle.SOLID, CORNER_RADII, new BorderWidths(2)));
|
||||||
|
|
||||||
/** The event this AggregateEventNode represents visually */
|
/** The event this AggregateEventNode represents visually */
|
||||||
private AggregateEvent event;
|
private AggregateEvent aggEvent;
|
||||||
|
|
||||||
private final AggregateEventNode parentEventNode;
|
private final AggregateEventNode parentEventNode;
|
||||||
|
|
||||||
@ -158,9 +156,9 @@ public class AggregateEventNode extends StackPane {
|
|||||||
private final ImageView hashIV = new ImageView(HASH_PIN);
|
private final ImageView hashIV = new ImageView(HASH_PIN);
|
||||||
private final ImageView tagIV = new ImageView(TAG);
|
private final ImageView tagIV = new ImageView(TAG);
|
||||||
|
|
||||||
public AggregateEventNode(final AggregateEvent event, AggregateEventNode parentEventNode, EventDetailChart chart) {
|
public AggregateEventNode(final AggregateEvent aggEvent, AggregateEventNode parentEventNode, EventDetailChart chart) {
|
||||||
this.event = event;
|
this.aggEvent = aggEvent;
|
||||||
descLOD.set(event.getLOD());
|
descLOD.set(aggEvent.getLOD());
|
||||||
this.parentEventNode = parentEventNode;
|
this.parentEventNode = parentEventNode;
|
||||||
this.chart = chart;
|
this.chart = chart;
|
||||||
sleuthkitCase = chart.getController().getAutopsyCase().getSleuthkitCase();
|
sleuthkitCase = chart.getController().getAutopsyCase().getSleuthkitCase();
|
||||||
@ -170,11 +168,11 @@ public class AggregateEventNode extends StackPane {
|
|||||||
HBox.setHgrow(region, Priority.ALWAYS);
|
HBox.setHgrow(region, Priority.ALWAYS);
|
||||||
|
|
||||||
final HBox hBox = new HBox(descrLabel, countLabel, region, hashIV, tagIV, minusButton, plusButton);
|
final HBox hBox = new HBox(descrLabel, countLabel, region, hashIV, tagIV, minusButton, plusButton);
|
||||||
if (event.getEventIDsWithHashHits().isEmpty()) {
|
if (aggEvent.getEventIDsWithHashHits().isEmpty()) {
|
||||||
hashIV.setManaged(false);
|
hashIV.setManaged(false);
|
||||||
hashIV.setVisible(false);
|
hashIV.setVisible(false);
|
||||||
}
|
}
|
||||||
if (event.getEventIDsWithTags().isEmpty()) {
|
if (aggEvent.getEventIDsWithTags().isEmpty()) {
|
||||||
tagIV.setManaged(false);
|
tagIV.setManaged(false);
|
||||||
tagIV.setVisible(false);
|
tagIV.setVisible(false);
|
||||||
}
|
}
|
||||||
@ -208,7 +206,7 @@ public class AggregateEventNode extends StackPane {
|
|||||||
subNodePane.setPickOnBounds(false);
|
subNodePane.setPickOnBounds(false);
|
||||||
|
|
||||||
//setup description label
|
//setup description label
|
||||||
eventTypeImageView.setImage(event.getType().getFXImage());
|
eventTypeImageView.setImage(aggEvent.getType().getFXImage());
|
||||||
descrLabel.setGraphic(eventTypeImageView);
|
descrLabel.setGraphic(eventTypeImageView);
|
||||||
descrLabel.setPrefWidth(USE_COMPUTED_SIZE);
|
descrLabel.setPrefWidth(USE_COMPUTED_SIZE);
|
||||||
descrLabel.setTextOverrun(OverrunStyle.CENTER_ELLIPSIS);
|
descrLabel.setTextOverrun(OverrunStyle.CENTER_ELLIPSIS);
|
||||||
@ -217,7 +215,7 @@ public class AggregateEventNode extends StackPane {
|
|||||||
setDescriptionVisibility(chart.getDescrVisibility().get());
|
setDescriptionVisibility(chart.getDescrVisibility().get());
|
||||||
|
|
||||||
//setup backgrounds
|
//setup backgrounds
|
||||||
final Color evtColor = event.getType().getColor();
|
final Color evtColor = aggEvent.getType().getColor();
|
||||||
spanFill = new Background(new BackgroundFill(evtColor.deriveColor(0, 1, 1, .1), CORNER_RADII, Insets.EMPTY));
|
spanFill = new Background(new BackgroundFill(evtColor.deriveColor(0, 1, 1, .1), CORNER_RADII, Insets.EMPTY));
|
||||||
setBackground(new Background(new BackgroundFill(evtColor.deriveColor(0, 1, 1, .1), CORNER_RADII, Insets.EMPTY)));
|
setBackground(new Background(new BackgroundFill(evtColor.deriveColor(0, 1, 1, .1), CORNER_RADII, Insets.EMPTY)));
|
||||||
setCursor(Cursor.HAND);
|
setCursor(Cursor.HAND);
|
||||||
@ -247,7 +245,7 @@ public class AggregateEventNode extends StackPane {
|
|||||||
setOnMouseClicked(new EventMouseHandler());
|
setOnMouseClicked(new EventMouseHandler());
|
||||||
|
|
||||||
plusButton.disableProperty().bind(descLOD.isEqualTo(DescriptionLOD.FULL));
|
plusButton.disableProperty().bind(descLOD.isEqualTo(DescriptionLOD.FULL));
|
||||||
minusButton.disableProperty().bind(descLOD.isEqualTo(event.getLOD()));
|
minusButton.disableProperty().bind(descLOD.isEqualTo(aggEvent.getLOD()));
|
||||||
|
|
||||||
plusButton.setOnMouseClicked(e -> {
|
plusButton.setOnMouseClicked(e -> {
|
||||||
final DescriptionLOD next = descLOD.get().next();
|
final DescriptionLOD next = descLOD.get().next();
|
||||||
@ -268,18 +266,14 @@ public class AggregateEventNode extends StackPane {
|
|||||||
synchronized private void installTooltip() {
|
synchronized private void installTooltip() {
|
||||||
//TODO: all this work should probably go on a background thread...
|
//TODO: all this work should probably go on a background thread...
|
||||||
if (tooltip == null) {
|
if (tooltip == null) {
|
||||||
|
|
||||||
HashMap<String, Long> hashSetCounts = new HashMap<>();
|
HashMap<String, Long> hashSetCounts = new HashMap<>();
|
||||||
if (!event.getEventIDsWithHashHits().isEmpty()) {
|
if (!aggEvent.getEventIDsWithHashHits().isEmpty()) {
|
||||||
|
hashSetCounts = new HashMap<>();
|
||||||
try {
|
try {
|
||||||
for (TimeLineEvent tle : eventsModel.getEventsById(event.getEventIDsWithHashHits())) {
|
for (TimeLineEvent tle : eventsModel.getEventsById(aggEvent.getEventIDsWithHashHits())) {
|
||||||
ArrayList<BlackboardArtifact> blackboardArtifacts = sleuthkitCase.getBlackboardArtifacts(BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT, tle.getFileID());
|
Set<String> hashSetNames = sleuthkitCase.getAbstractFileById(tle.getFileID()).getHashSetNames();
|
||||||
for (BlackboardArtifact artf : blackboardArtifacts) {
|
for (String hashSetName : hashSetNames) {
|
||||||
for (BlackboardAttribute attr : artf.getAttributes()) {
|
hashSetCounts.merge(hashSetName, 1L, Long::sum);
|
||||||
if (attr.getAttributeTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()) {
|
|
||||||
hashSetCounts.merge(attr.getValueString(), 1L, Long::sum);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (TskCoreException ex) {
|
} catch (TskCoreException ex) {
|
||||||
@ -287,23 +281,23 @@ public class AggregateEventNode extends StackPane {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<Long, TagName> tags = new HashMap<>();
|
Map<String, Long> tagCounts = new HashMap<>();
|
||||||
if (!event.getEventIDsWithTags().isEmpty()) {
|
if (!aggEvent.getEventIDsWithTags().isEmpty()) {
|
||||||
try {
|
try {
|
||||||
for (TimeLineEvent tle : eventsModel.getEventsById(event.getEventIDsWithTags())) {
|
for (TimeLineEvent tle : eventsModel.getEventsById(aggEvent.getEventIDsWithTags())) {
|
||||||
|
|
||||||
AbstractFile abstractFileById = sleuthkitCase.getAbstractFileById(tle.getFileID());
|
AbstractFile abstractFileById = sleuthkitCase.getAbstractFileById(tle.getFileID());
|
||||||
List<ContentTag> contentTagsByContent = sleuthkitCase.getContentTagsByContent(abstractFileById);
|
List<ContentTag> contentTags = sleuthkitCase.getContentTagsByContent(abstractFileById);
|
||||||
for (ContentTag tag : contentTagsByContent) {
|
for (ContentTag tag : contentTags) {
|
||||||
tags.putIfAbsent(tag.getId(), tag.getName());
|
tagCounts.merge(tag.getName().getDisplayName(), 1l, Long::sum);
|
||||||
}
|
}
|
||||||
|
|
||||||
Long artifactID = tle.getArtifactID();
|
Long artifactID = tle.getArtifactID();
|
||||||
if (artifactID != 0) {
|
if (artifactID != 0) {
|
||||||
BlackboardArtifact blackboardArtifact = sleuthkitCase.getBlackboardArtifact(artifactID);
|
BlackboardArtifact blackboardArtifact = sleuthkitCase.getBlackboardArtifact(artifactID);
|
||||||
List<BlackboardArtifactTag> blackboardArtifactTagsByArtifact = sleuthkitCase.getBlackboardArtifactTagsByArtifact(blackboardArtifact);
|
List<BlackboardArtifactTag> artifactTags = sleuthkitCase.getBlackboardArtifactTagsByArtifact(blackboardArtifact);
|
||||||
for (BlackboardArtifactTag tag : blackboardArtifactTagsByArtifact) {
|
for (BlackboardArtifactTag tag : artifactTags) {
|
||||||
tags.putIfAbsent(tag.getId(), tag.getName());
|
tagCounts.merge(tag.getName().getDisplayName(), 1l, Long::sum);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -312,9 +306,6 @@ public class AggregateEventNode extends StackPane {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<String, Long> tagCounts = tags.values().stream()
|
|
||||||
.collect(Collectors.toMap(TagName::getDisplayName, anything -> 1L, Long::sum));
|
|
||||||
|
|
||||||
String hashSetCountsString = hashSetCounts.entrySet().stream()
|
String hashSetCountsString = hashSetCounts.entrySet().stream()
|
||||||
.map((Map.Entry<String, Long> t) -> t.getKey() + " : " + t.getValue())
|
.map((Map.Entry<String, Long> t) -> t.getKey() + " : " + t.getValue())
|
||||||
.collect(Collectors.joining("\n"));
|
.collect(Collectors.joining("\n"));
|
||||||
@ -339,7 +330,7 @@ public class AggregateEventNode extends StackPane {
|
|||||||
}
|
}
|
||||||
|
|
||||||
synchronized public AggregateEvent getEvent() {
|
synchronized public AggregateEvent getEvent() {
|
||||||
return event;
|
return aggEvent;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -365,10 +356,9 @@ public class AggregateEventNode extends StackPane {
|
|||||||
/** @param descrVis the level of description that should be displayed */
|
/** @param descrVis the level of description that should be displayed */
|
||||||
synchronized final void setDescriptionVisibility(DescriptionVisibility descrVis) {
|
synchronized final void setDescriptionVisibility(DescriptionVisibility descrVis) {
|
||||||
this.descrVis = descrVis;
|
this.descrVis = descrVis;
|
||||||
final int size = event.getEventIDs().size();
|
final int size = aggEvent.getEventIDs().size();
|
||||||
|
|
||||||
switch (descrVis) {
|
switch (descrVis) {
|
||||||
|
|
||||||
case COUNT_ONLY:
|
case COUNT_ONLY:
|
||||||
descrLabel.setText("");
|
descrLabel.setText("");
|
||||||
countLabel.setText(String.valueOf(size));
|
countLabel.setText(String.valueOf(size));
|
||||||
@ -379,7 +369,7 @@ public class AggregateEventNode extends StackPane {
|
|||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
case SHOWN:
|
case SHOWN:
|
||||||
String description = event.getDescription();
|
String description = aggEvent.getDescription();
|
||||||
description = parentEventNode != null
|
description = parentEventNode != null
|
||||||
? " ..." + StringUtils.substringAfter(description, parentEventNode.getEvent().getDescription())
|
? " ..." + StringUtils.substringAfter(description, parentEventNode.getEvent().getDescription())
|
||||||
: description;
|
: description;
|
||||||
@ -411,14 +401,14 @@ public class AggregateEventNode extends StackPane {
|
|||||||
|
|
||||||
if (applied) {
|
if (applied) {
|
||||||
descrLabel.setStyle("-fx-font-weight: bold;"); // NON-NLS
|
descrLabel.setStyle("-fx-font-weight: bold;"); // NON-NLS
|
||||||
spanFill = new Background(new BackgroundFill(event.getType().getColor().deriveColor(0, 1, 1, .3), CORNER_RADII, Insets.EMPTY));
|
spanFill = new Background(new BackgroundFill(aggEvent.getType().getColor().deriveColor(0, 1, 1, .3), CORNER_RADII, Insets.EMPTY));
|
||||||
spanRegion.setBackground(spanFill);
|
spanRegion.setBackground(spanFill);
|
||||||
setBackground(new Background(new BackgroundFill(event.getType().getColor().deriveColor(0, 1, 1, .2), CORNER_RADII, Insets.EMPTY)));
|
setBackground(new Background(new BackgroundFill(aggEvent.getType().getColor().deriveColor(0, 1, 1, .2), CORNER_RADII, Insets.EMPTY)));
|
||||||
} else {
|
} else {
|
||||||
descrLabel.setStyle("-fx-font-weight: normal;"); // NON-NLS
|
descrLabel.setStyle("-fx-font-weight: normal;"); // NON-NLS
|
||||||
spanFill = new Background(new BackgroundFill(event.getType().getColor().deriveColor(0, 1, 1, .1), CORNER_RADII, Insets.EMPTY));
|
spanFill = new Background(new BackgroundFill(aggEvent.getType().getColor().deriveColor(0, 1, 1, .1), CORNER_RADII, Insets.EMPTY));
|
||||||
spanRegion.setBackground(spanFill);
|
spanRegion.setBackground(spanFill);
|
||||||
setBackground(new Background(new BackgroundFill(event.getType().getColor().deriveColor(0, 1, 1, .1), CORNER_RADII, Insets.EMPTY)));
|
setBackground(new Background(new BackgroundFill(aggEvent.getType().getColor().deriveColor(0, 1, 1, .1), CORNER_RADII, Insets.EMPTY)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -452,17 +442,17 @@ public class AggregateEventNode extends StackPane {
|
|||||||
*/
|
*/
|
||||||
synchronized private void loadSubClusters(DescriptionLOD newDescriptionLOD) {
|
synchronized private void loadSubClusters(DescriptionLOD newDescriptionLOD) {
|
||||||
getSubNodePane().getChildren().clear();
|
getSubNodePane().getChildren().clear();
|
||||||
if (newDescriptionLOD == event.getLOD()) {
|
if (newDescriptionLOD == aggEvent.getLOD()) {
|
||||||
chart.setRequiresLayout(true);
|
chart.setRequiresLayout(true);
|
||||||
chart.requestChartLayout();
|
chart.requestChartLayout();
|
||||||
} else {
|
} else {
|
||||||
RootFilter combinedFilter = eventsModel.filter().get().copyOf();
|
RootFilter combinedFilter = eventsModel.filter().get().copyOf();
|
||||||
//make a new filter intersecting the global filter with text(description) and type filters to restrict sub-clusters
|
//make a new filter intersecting the global filter with text(description) and type filters to restrict sub-clusters
|
||||||
combinedFilter.getSubFilters().addAll(new TextFilter(event.getDescription()),
|
combinedFilter.getSubFilters().addAll(new TextFilter(aggEvent.getDescription()),
|
||||||
new TypeFilter(event.getType()));
|
new TypeFilter(aggEvent.getType()));
|
||||||
|
|
||||||
//make a new end inclusive span (to 'filter' with)
|
//make a new end inclusive span (to 'filter' with)
|
||||||
final Interval span = event.getSpan().withEndMillis(event.getSpan().getEndMillis() + 1000);
|
final Interval span = aggEvent.getSpan().withEndMillis(aggEvent.getSpan().getEndMillis() + 1000);
|
||||||
|
|
||||||
//make a task to load the subnodes
|
//make a task to load the subnodes
|
||||||
LoggedTask<List<AggregateEventNode>> loggedTask = new LoggedTask<List<AggregateEventNode>>(
|
LoggedTask<List<AggregateEventNode>> loggedTask = new LoggedTask<List<AggregateEventNode>>(
|
||||||
@ -532,11 +522,11 @@ public class AggregateEventNode extends StackPane {
|
|||||||
|
|
||||||
@Subscribe
|
@Subscribe
|
||||||
synchronized public void handleEventsUnTagged(EventsUnTaggedEvent tagEvent) {
|
synchronized public void handleEventsUnTagged(EventsUnTaggedEvent tagEvent) {
|
||||||
AggregateEvent withTagsRemoved = event.withTagsRemoved(tagEvent.getEventIDs());
|
AggregateEvent withTagsRemoved = aggEvent.withTagsRemoved(tagEvent.getEventIDs());
|
||||||
if (withTagsRemoved != event) {
|
if (withTagsRemoved != aggEvent) {
|
||||||
event = withTagsRemoved;
|
aggEvent = withTagsRemoved;
|
||||||
tooltip = null;
|
tooltip = null;
|
||||||
boolean hasTags = event.getEventIDsWithTags().isEmpty() == false;
|
boolean hasTags = aggEvent.getEventIDsWithTags().isEmpty() == false;
|
||||||
Platform.runLater(() -> {
|
Platform.runLater(() -> {
|
||||||
tagIV.setManaged(hasTags);
|
tagIV.setManaged(hasTags);
|
||||||
tagIV.setVisible(hasTags);
|
tagIV.setVisible(hasTags);
|
||||||
@ -546,9 +536,9 @@ public class AggregateEventNode extends StackPane {
|
|||||||
|
|
||||||
@Subscribe
|
@Subscribe
|
||||||
synchronized public void handleEventsTagged(EventsTaggedEvent tagEvent) {
|
synchronized public void handleEventsTagged(EventsTaggedEvent tagEvent) {
|
||||||
AggregateEvent withTagsAdded = event.withTagsAdded(tagEvent.getEventIDs());
|
AggregateEvent withTagsAdded = aggEvent.withTagsAdded(tagEvent.getEventIDs());
|
||||||
if (withTagsAdded != event) {
|
if (withTagsAdded != aggEvent) {
|
||||||
event = withTagsAdded;
|
aggEvent = withTagsAdded;
|
||||||
tooltip = null;
|
tooltip = null;
|
||||||
Platform.runLater(() -> {
|
Platform.runLater(() -> {
|
||||||
tagIV.setManaged(true);
|
tagIV.setManaged(true);
|
||||||
|
@ -78,7 +78,7 @@ public class DrawableAttribute<T extends Comparable<T>> {
|
|||||||
= new DrawableAttribute<>(AttributeName.MODEL, "Camera Model", true, "camera.png", f -> Collections.singleton(f.getModel()));
|
= new DrawableAttribute<>(AttributeName.MODEL, "Camera Model", true, "camera.png", f -> Collections.singleton(f.getModel()));
|
||||||
|
|
||||||
public final static DrawableAttribute<String> HASHSET
|
public final static DrawableAttribute<String> HASHSET
|
||||||
= new DrawableAttribute<>(AttributeName.HASHSET, "Hashset", true, "hashset_hits.png", DrawableFile::getHashHitSetNames);
|
= new DrawableAttribute<>(AttributeName.HASHSET, "Hashset", true, "hashset_hits.png", DrawableFile::getHashSetNamesUnchecked);
|
||||||
|
|
||||||
public final static DrawableAttribute<Long> OBJ_ID
|
public final static DrawableAttribute<Long> OBJ_ID
|
||||||
= new DrawableAttribute<>(AttributeName.OBJ_ID, "Internal Object ID", true, "", f -> Collections.singleton(f.getId()));
|
= new DrawableAttribute<>(AttributeName.OBJ_ID, "Internal Object ID", true, "", f -> Collections.singleton(f.getId()));
|
||||||
|
@ -40,13 +40,11 @@ import java.util.concurrent.ConcurrentHashMap;
|
|||||||
import java.util.concurrent.locks.Lock;
|
import java.util.concurrent.locks.Lock;
|
||||||
import java.util.concurrent.locks.ReentrantReadWriteLock;
|
import java.util.concurrent.locks.ReentrantReadWriteLock;
|
||||||
import java.util.logging.Level;
|
import java.util.logging.Level;
|
||||||
import javax.annotation.Nonnull;
|
|
||||||
import javax.annotation.concurrent.GuardedBy;
|
import javax.annotation.concurrent.GuardedBy;
|
||||||
import javax.swing.SortOrder;
|
import javax.swing.SortOrder;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.openide.util.Exceptions;
|
import org.openide.util.Exceptions;
|
||||||
import org.sleuthkit.autopsy.casemodule.Case;
|
import org.sleuthkit.autopsy.casemodule.Case;
|
||||||
import org.sleuthkit.autopsy.coreutils.HashHitUtils;
|
|
||||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||||
import org.sleuthkit.autopsy.imagegallery.FileTypeUtils;
|
import org.sleuthkit.autopsy.imagegallery.FileTypeUtils;
|
||||||
import org.sleuthkit.autopsy.imagegallery.ImageGalleryController;
|
import org.sleuthkit.autopsy.imagegallery.ImageGalleryController;
|
||||||
@ -583,25 +581,29 @@ public final class DrawableDB {
|
|||||||
stmt.setBoolean(8, f.isAnalyzed());
|
stmt.setBoolean(8, f.isAnalyzed());
|
||||||
stmt.executeUpdate();
|
stmt.executeUpdate();
|
||||||
|
|
||||||
for (String name : HashHitUtils.getHashSetNamesForFile(tskCase, f.getId())) {
|
try {
|
||||||
|
for (String name : f.getHashSetNames()) {
|
||||||
|
|
||||||
// "insert or ignore into hash_sets (hash_set_name) values (?)"
|
// "insert or ignore into hash_sets (hash_set_name) values (?)"
|
||||||
insertHashSetStmt.setString(1, name);
|
insertHashSetStmt.setString(1, name);
|
||||||
insertHashSetStmt.executeUpdate();
|
insertHashSetStmt.executeUpdate();
|
||||||
|
|
||||||
//TODO: use nested select to get hash_set_id rather than seperate statement/query
|
//TODO: use nested select to get hash_set_id rather than seperate statement/query
|
||||||
//"select hash_set_id from hash_sets where hash_set_name = ?"
|
//"select hash_set_id from hash_sets where hash_set_name = ?"
|
||||||
selectHashSetStmt.setString(1, name);
|
selectHashSetStmt.setString(1, name);
|
||||||
try (ResultSet rs = selectHashSetStmt.executeQuery()) {
|
try (ResultSet rs = selectHashSetStmt.executeQuery()) {
|
||||||
while (rs.next()) {
|
while (rs.next()) {
|
||||||
int hashsetID = rs.getInt("hash_set_id");
|
int hashsetID = rs.getInt("hash_set_id");
|
||||||
//"insert or ignore into hash_set_hits (hash_set_id, obj_id) values (?,?)";
|
//"insert or ignore into hash_set_hits (hash_set_id, obj_id) values (?,?)";
|
||||||
insertHashHitStmt.setInt(1, hashsetID);
|
insertHashHitStmt.setInt(1, hashsetID);
|
||||||
insertHashHitStmt.setLong(2, f.getId());
|
insertHashHitStmt.setLong(2, f.getId());
|
||||||
insertHashHitStmt.executeUpdate();
|
insertHashHitStmt.executeUpdate();
|
||||||
break;
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} catch (TskCoreException ex) {
|
||||||
|
LOGGER.log(Level.SEVERE, "failed to insert/update hash hits for file" + f.getName(), ex);
|
||||||
}
|
}
|
||||||
|
|
||||||
//and update all groups this file is in
|
//and update all groups this file is in
|
||||||
@ -620,6 +622,7 @@ public final class DrawableDB {
|
|||||||
if (Case.isCaseOpen()) {
|
if (Case.isCaseOpen()) {
|
||||||
LOGGER.log(Level.SEVERE, "failed to insert/update file" + f.getName(), ex);
|
LOGGER.log(Level.SEVERE, "failed to insert/update file" + f.getName(), ex);
|
||||||
}
|
}
|
||||||
|
|
||||||
} finally {
|
} finally {
|
||||||
dbWriteUnlock();
|
dbWriteUnlock();
|
||||||
}
|
}
|
||||||
@ -875,7 +878,7 @@ public final class DrawableDB {
|
|||||||
|
|
||||||
query.append(orderByClause);
|
query.append(orderByClause);
|
||||||
|
|
||||||
if (orderByClause.equals("") == false) {
|
if (orderByClause.isEmpty() == false) {
|
||||||
String sortOrderClause = "";
|
String sortOrderClause = "";
|
||||||
|
|
||||||
switch (sortOrder) {
|
switch (sortOrder) {
|
||||||
@ -958,7 +961,7 @@ public final class DrawableDB {
|
|||||||
return DrawableFile.create(f,
|
return DrawableFile.create(f,
|
||||||
areFilesAnalyzed(Collections.singleton(id)), isVideoFile(f));
|
areFilesAnalyzed(Collections.singleton(id)), isVideoFile(f));
|
||||||
} catch (IllegalStateException ex) {
|
} catch (IllegalStateException ex) {
|
||||||
LOGGER.log(Level.SEVERE, "there is no case open; failed to load file with id: " + id);
|
LOGGER.log(Level.SEVERE, "there is no case open; failed to load file with id: {0}", id);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1113,21 +1116,6 @@ public final class DrawableDB {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* For the given fileID, get the names of all the hashsets that the file is
|
|
||||||
* in.
|
|
||||||
*
|
|
||||||
* @param fileID the fileID to file all the hash sets for
|
|
||||||
*
|
|
||||||
* @return a set of names, each of which is a hashset that the given file is
|
|
||||||
* in.
|
|
||||||
*/
|
|
||||||
@Nonnull
|
|
||||||
public Set<String> getHashSetsForFileFromAutopsy(long fileID) {
|
|
||||||
return HashHitUtils.getHashSetNamesForFile(tskCase, fileID);
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* For performance reasons, keep a list of all file IDs currently in the
|
* For performance reasons, keep a list of all file IDs currently in the
|
||||||
* drawable database. Otherwise the database is queried many times to
|
* drawable database. Otherwise the database is queried many times to
|
||||||
@ -1194,7 +1182,7 @@ public final class DrawableDB {
|
|||||||
public boolean isVideoFile(AbstractFile f) {
|
public boolean isVideoFile(AbstractFile f) {
|
||||||
return isNull(f) ? false
|
return isNull(f) ? false
|
||||||
: videoFileMap.computeIfAbsent(f.getId(), id -> FileTypeUtils.isVideoFile(f));
|
: videoFileMap.computeIfAbsent(f.getId(), id -> FileTypeUtils.isVideoFile(f));
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -31,12 +31,12 @@ import javafx.beans.property.SimpleBooleanProperty;
|
|||||||
import javafx.beans.property.SimpleObjectProperty;
|
import javafx.beans.property.SimpleObjectProperty;
|
||||||
import javafx.scene.image.Image;
|
import javafx.scene.image.Image;
|
||||||
import javafx.util.Pair;
|
import javafx.util.Pair;
|
||||||
|
import javax.annotation.Nonnull;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.apache.commons.lang3.text.WordUtils;
|
import org.apache.commons.lang3.text.WordUtils;
|
||||||
import org.sleuthkit.autopsy.casemodule.Case;
|
import org.sleuthkit.autopsy.casemodule.Case;
|
||||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||||
import org.sleuthkit.autopsy.imagegallery.FileTypeUtils;
|
import org.sleuthkit.autopsy.imagegallery.FileTypeUtils;
|
||||||
import org.sleuthkit.autopsy.imagegallery.ImageGalleryController;
|
|
||||||
import org.sleuthkit.autopsy.imagegallery.ThumbnailCache;
|
import org.sleuthkit.autopsy.imagegallery.ThumbnailCache;
|
||||||
import org.sleuthkit.datamodel.AbstractFile;
|
import org.sleuthkit.datamodel.AbstractFile;
|
||||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||||
@ -57,6 +57,8 @@ import org.sleuthkit.datamodel.TskCoreException;
|
|||||||
*/
|
*/
|
||||||
public abstract class DrawableFile<T extends AbstractFile> extends AbstractFile {
|
public abstract class DrawableFile<T extends AbstractFile> extends AbstractFile {
|
||||||
|
|
||||||
|
private static final Logger LOGGER = Logger.getLogger(DrawableFile.class.getName());
|
||||||
|
|
||||||
public static DrawableFile<?> create(AbstractFile abstractFileById, boolean analyzed) {
|
public static DrawableFile<?> create(AbstractFile abstractFileById, boolean analyzed) {
|
||||||
return create(abstractFileById, analyzed, FileTypeUtils.isVideoFile(abstractFileById));
|
return create(abstractFileById, analyzed, FileTypeUtils.isVideoFile(abstractFileById));
|
||||||
}
|
}
|
||||||
@ -101,10 +103,6 @@ public abstract class DrawableFile<T extends AbstractFile> extends AbstractFile
|
|||||||
|
|
||||||
public abstract boolean isVideo();
|
public abstract boolean isVideo();
|
||||||
|
|
||||||
public Collection<String> getHashHitSetNames() {
|
|
||||||
return ImageGalleryController.getDefault().getHashSetManager().getHashSetsForFile(getId());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean isRoot() {
|
public boolean isRoot() {
|
||||||
return false;
|
return false;
|
||||||
@ -226,13 +224,12 @@ public abstract class DrawableFile<T extends AbstractFile> extends AbstractFile
|
|||||||
.orElse(Category.ZERO)
|
.orElse(Category.ZERO)
|
||||||
);
|
);
|
||||||
} catch (TskCoreException ex) {
|
} catch (TskCoreException ex) {
|
||||||
Logger.getLogger(DrawableFile.class.getName()).log(Level.WARNING, "problem looking up category for file " + this.getName(), ex);
|
LOGGER.log(Level.WARNING, "problem looking up category for file " + this.getName(), ex);
|
||||||
} catch (IllegalStateException ex) {
|
} catch (IllegalStateException ex) {
|
||||||
// We get here many times if the case is closed during ingest, so don't print out a ton of warnings.
|
// We get here many times if the case is closed during ingest, so don't print out a ton of warnings.
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public Image getThumbnail() {
|
public Image getThumbnail() {
|
||||||
return ThumbnailCache.getDefault().get(this);
|
return ThumbnailCache.getDefault().get(this);
|
||||||
}
|
}
|
||||||
@ -263,7 +260,7 @@ public abstract class DrawableFile<T extends AbstractFile> extends AbstractFile
|
|||||||
drawablePath = StringUtils.removeEnd(getUniquePath(), getName());
|
drawablePath = StringUtils.removeEnd(getUniquePath(), getName());
|
||||||
return drawablePath;
|
return drawablePath;
|
||||||
} catch (TskCoreException ex) {
|
} catch (TskCoreException ex) {
|
||||||
Logger.getLogger(DrawableFile.class.getName()).log(Level.WARNING, "failed to get drawablePath from {0}", getName());
|
LOGGER.log(Level.WARNING, "failed to get drawablePath from {0}", getName());
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -273,4 +270,14 @@ public abstract class DrawableFile<T extends AbstractFile> extends AbstractFile
|
|||||||
Image thumbnail = getThumbnail();
|
Image thumbnail = getThumbnail();
|
||||||
return Objects.nonNull(thumbnail) && thumbnail.errorProperty().get() == false;
|
return Objects.nonNull(thumbnail) && thumbnail.errorProperty().get() == false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Nonnull
|
||||||
|
public Set<String> getHashSetNamesUnchecked() {
|
||||||
|
try {
|
||||||
|
return getHashSetNames();
|
||||||
|
} catch (TskCoreException ex) {
|
||||||
|
LOGGER.log(Level.WARNING, "Failed to get hash set names", ex);
|
||||||
|
return Collections.emptySet();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -3,8 +3,11 @@ package org.sleuthkit.autopsy.imagegallery.datamodel;
|
|||||||
import com.google.common.cache.CacheBuilder;
|
import com.google.common.cache.CacheBuilder;
|
||||||
import com.google.common.cache.CacheLoader;
|
import com.google.common.cache.CacheLoader;
|
||||||
import com.google.common.cache.LoadingCache;
|
import com.google.common.cache.LoadingCache;
|
||||||
|
import java.util.Collections;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import org.sleuthkit.autopsy.imagegallery.datamodel.DrawableDB;
|
import java.util.logging.Level;
|
||||||
|
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||||
|
import org.sleuthkit.datamodel.TskCoreException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Manages a cache of hashset hits as a map from fileID to hashset names.
|
* Manages a cache of hashset hits as a map from fileID to hashset names.
|
||||||
@ -36,7 +39,12 @@ public class HashSetManager {
|
|||||||
* @return the names of the hashsets the given fileID is in
|
* @return the names of the hashsets the given fileID is in
|
||||||
*/
|
*/
|
||||||
private Set<String> getHashSetsForFileHelper(long fileID) {
|
private Set<String> getHashSetsForFileHelper(long fileID) {
|
||||||
return db.getHashSetsForFileFromAutopsy(fileID);
|
try {
|
||||||
|
return db.getFileFromID(fileID).getHashSetNames();
|
||||||
|
} catch (TskCoreException ex) {
|
||||||
|
Logger.getLogger(HashSetManager.class.getName()).log(Level.SEVERE, "Failed to get Hash Sets for file", ex);
|
||||||
|
return Collections.emptySet();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -85,7 +85,7 @@ public interface DrawableView {
|
|||||||
|
|
||||||
default boolean hasHashHit() {
|
default boolean hasHashHit() {
|
||||||
try {
|
try {
|
||||||
return getFile().map(DrawableFile::getHashHitSetNames)
|
return getFile().map(DrawableFile::getHashSetNamesUnchecked)
|
||||||
.map((Collection<String> t) -> t.isEmpty() == false)
|
.map((Collection<String> t) -> t.isEmpty() == false)
|
||||||
.orElse(false);
|
.orElse(false);
|
||||||
|
|
||||||
|
@ -648,6 +648,12 @@ class ExtractRegistry extends Extract {
|
|||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|
||||||
|
case "shellfolders": // NON-NLS
|
||||||
|
// The User Shell Folders subkey stores the paths to Windows Explorer folders for the current user of the computer
|
||||||
|
// (https://technet.microsoft.com/en-us/library/Cc962613.aspx).
|
||||||
|
// No useful information. Skip.
|
||||||
|
break;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
logger.log(Level.WARNING, "Unrecognized node name: {0}", dataType); //NON-NLS
|
logger.log(Level.WARNING, "Unrecognized node name: {0}", dataType); //NON-NLS
|
||||||
break;
|
break;
|
||||||
|
Loading…
x
Reference in New Issue
Block a user