Merge remote-tracking branch 'upstream/develop' into timeline_tags_visualiztion

Conflicts:
	Core/src/org/sleuthkit/autopsy/timeline/events/db/EventsRepository.java
	Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/AggregateEventNode.java
This commit is contained in:
jmillman 2015-08-05 11:23:51 -04:00
commit 2241e29529
16 changed files with 277 additions and 369 deletions

View File

@ -22,7 +22,6 @@ import java.awt.CardLayout;
import java.awt.Component;
import java.awt.Dimension;
import java.util.List;
import static java.util.Objects.nonNull;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.logging.Level;
@ -32,10 +31,7 @@ import org.openide.util.lookup.ServiceProvider;
import org.openide.util.lookup.ServiceProviders;
import org.sleuthkit.autopsy.corecomponentinterfaces.DataContentViewer;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.AbstractFile.MimeMatchEnum;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM;
/**
@ -52,11 +48,8 @@ public class DataContentViewerMedia extends javax.swing.JPanel implements DataCo
private final MediaViewVideoPanel videoPanel;
private final boolean videoPanelInited;
private final SortedSet<String> videoExtensions; // get them from the panel
private final SortedSet<String> videoMimes;
private final MediaViewImagePanel imagePanel;
private final boolean imagePanelInited;
private final SortedSet<String> imageExtensions; // get them from the panel
private final SortedSet<String> imageMimes;
private static final String IMAGE_VIEWER_LAYER = "IMAGE"; //NON-NLS
private static final String VIDEO_VIEWER_LAYER = "VIDEO"; //NON-NLS
@ -72,12 +65,9 @@ public class DataContentViewerMedia extends javax.swing.JPanel implements DataCo
videoPanel = MediaViewVideoPanel.createVideoPanel();
videoPanelInited = videoPanel.isInited();
videoExtensions = new TreeSet<>(videoPanel.getExtensionsList());
videoMimes = new TreeSet<>(videoPanel.getMimeTypes());
imagePanel = new MediaViewImagePanel();
imagePanelInited = imagePanel.isInited();
imageExtensions = new TreeSet<>(imagePanel.getExtensionsList());
imageMimes = new TreeSet<>(imagePanel.getMimeTypes());
customizeComponents();
logger.log(Level.INFO, "Created MediaView instance: {0}", this); //NON-NLS
@ -239,10 +229,9 @@ public class DataContentViewerMedia extends javax.swing.JPanel implements DataCo
if (file == null) {
return 0;
}
String extension = file.getNameExtension();
boolean deleted = file.isDirNameFlagSet(TSK_FS_NAME_FLAG_ENUM.UNALLOC);
if (videoExtensions.contains("." + extension) && deleted) {
if (videoPanel.isSupported(file) && deleted) {
return 0;
} else {
return 7;
@ -263,28 +252,6 @@ public class DataContentViewerMedia extends javax.swing.JPanel implements DataCo
*/
List<String> getExtensionsList();
default boolean isSupported(AbstractFile file) {
SortedSet<String> mimeTypes = new TreeSet<>(getMimeTypes());
try {
String mimeType = new FileTypeDetector().getFileType(file);
if (nonNull(mimeType)) {
return mimeTypes.contains(mimeType);
}
} catch (FileTypeDetector.FileTypeDetectorInitException | TskCoreException ex) {
logger.log(Level.WARNING, "Failed to look up mimetype for " + file.getName() + " using FileTypeDetector. Fallingback on AbstractFile.isMimeType", ex);
if (!mimeTypes.isEmpty() && file.isMimeType(mimeTypes) == MimeMatchEnum.TRUE) {
return true;
}
}
String extension = file.getNameExtension();
if (getExtensionsList().contains("." + extension)) {
return true;
}
return false;
}
boolean isSupported(AbstractFile file);
}
}

View File

@ -220,8 +220,7 @@ public class MediaViewImagePanel extends JPanel implements DataContentViewerMedi
@Override
public boolean isSupported(AbstractFile file) {
return DataContentViewerMedia.MediaViewPanel.super.isSupported(file)
|| ImageUtils.hasImageFileHeader(file);
return ImageUtils.isImageThumbnailSupported(file);
}
/**

View File

@ -21,12 +21,16 @@ package org.sleuthkit.autopsy.corecomponents;
import java.awt.Dimension;
import java.util.Arrays;
import java.util.List;
import static java.util.Objects.nonNull;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.logging.Level;
import javax.swing.JPanel;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.TskCoreException;
/**
* Video viewer part of the Media View layered pane.
@ -132,9 +136,21 @@ public abstract class MediaViewVideoPanel extends JPanel implements FrameCapture
String extension = file.getNameExtension();
//TODO: is this what we want, to require both extension and mimetype support?
if (AUDIO_EXTENSIONS.contains("." + extension) || getExtensionsList().contains("." + extension)) {
return DataContentViewerMedia.MediaViewPanel.super.isSupported(file); //To change body of generated methods, choose Tools | Templates.
SortedSet<String> mimeTypes = new TreeSet<>(getMimeTypes());
try {
String mimeType = new FileTypeDetector().getFileType(file);
if (nonNull(mimeType)) {
return mimeTypes.contains(mimeType);
}
} catch (FileTypeDetector.FileTypeDetectorInitException | TskCoreException ex) {
logger.log(Level.WARNING, "Failed to look up mimetype for " + file.getName() + " using FileTypeDetector. Fallingback on AbstractFile.isMimeType", ex);
if (!mimeTypes.isEmpty() && file.isMimeType(mimeTypes) == AbstractFile.MimeMatchEnum.TRUE) {
return true;
}
}
return getExtensionsList().contains("." + extension);
}
return false;
}
}

View File

@ -1,61 +0,0 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.sleuthkit.autopsy.coreutils;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import javax.annotation.Nonnull;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
/**
*
*/
public class HashHitUtils {
private static final Logger LOGGER = Logger.getLogger(HashHitUtils.class.getName());
/**
* For the given objID, get the names of all the hashsets that the object is
* in.
*
* @param tskCase
* @param objID the obj_id to find all the hash sets for
*
* @return a set of names, each of which is a hashset that the given object
* is in.
*
* //TODO: Move this into sleuthkitcase?
*/
@Nonnull
static public Set<String> getHashSetNamesForFile(SleuthkitCase tskCase, long objID) {
try {
Set<String> hashNames = new HashSet<>();
List<BlackboardArtifact> arts = tskCase.getBlackboardArtifacts(BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT, objID);
for (BlackboardArtifact a : arts) {
List<BlackboardAttribute> attrs = a.getAttributes();
for (BlackboardAttribute attr : attrs) {
if (attr.getAttributeTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()) {
hashNames.add(attr.getValueString());
}
}
}
return Collections.unmodifiableSet(hashNames);
} catch (TskCoreException ex) {
LOGGER.log(Level.SEVERE, "failed to get hash sets for file", ex);
}
return Collections.emptySet();
}
private HashHitUtils() {
}
}

View File

@ -30,7 +30,6 @@ import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
@ -74,26 +73,11 @@ public class ImageUtils {
private static final Logger logger = LOGGER;
private static final BufferedImage DEFAULT_THUMBNAIL;
private static final TreeSet<String> SUPPORTED_MIME_TYPES = new TreeSet<>();
private static final List<String> SUPPORTED_EXTENSIONS = new ArrayList<>();
private static final List<String> SUPPORTED_IMAGE_EXTENSIONS;
private static final List<String> SUPPORTED_VIDEO_EXTENSIONS
= Arrays.asList("mov", "m4v", "flv", "mp4", "3gp", "avi", "mpg",
"mpeg", "asf", "divx", "rm", "moov", "wmv", "vob", "dat",
"m1v", "m2v", "m4v", "mkv", "mpe", "yop", "vqa", "xmv",
"mve", "wtv", "webm", "vivo", "vc1", "seq", "thp", "san",
"mjpg", "smk", "vmd", "sol", "cpk", "sdp", "sbg", "rtsp",
"rpl", "rl2", "r3d", "mlp", "mjpeg", "hevc", "h265", "265",
"h264", "h263", "h261", "drc", "avs", "pva", "pmp", "ogg",
"nut", "nuv", "nsv", "mxf", "mtv", "mvi", "mxg", "lxf",
"lvf", "ivf", "mve", "cin", "hnm", "gxf", "fli", "flc",
"flx", "ffm", "wve", "uv2", "dxa", "dv", "cdxl", "cdg",
"bfi", "jv", "bik", "vid", "vb", "son", "avs", "paf", "mm",
"flm", "tmv", "4xm"); //NON-NLS
private static final TreeSet<String> SUPPORTED_IMAGE_MIME_TYPES;
private static final List<String> SUPPORTED_VIDEO_MIME_TYPES
= Arrays.asList("application/x-shockwave-flash", "video/x-m4v", "video/quicktime", "video/avi", "video/msvideo", "video/x-msvideo",
"video/mp4", "video/x-ms-wmv", "video/mpeg", "video/asf"); //NON-NLS
private static final List<String> CONDITIONAL_MIME_TYPES = Arrays.asList("audio/x-aiff", "application/octet-stream");
private static final boolean openCVLoaded;
static {
@ -128,31 +112,19 @@ public class ImageUtils {
openCVLoaded = openCVLoadedTemp;
SUPPORTED_IMAGE_EXTENSIONS = Arrays.asList(ImageIO.getReaderFileSuffixes());
SUPPORTED_EXTENSIONS.addAll(SUPPORTED_IMAGE_EXTENSIONS);
SUPPORTED_EXTENSIONS.addAll(SUPPORTED_VIDEO_EXTENSIONS);
SUPPORTED_IMAGE_MIME_TYPES = new TreeSet<>(Arrays.asList(ImageIO.getReaderMIMETypes()));
/* special cases and variants that we support, but don't get registered
* with ImageIO automatically */
SUPPORTED_IMAGE_MIME_TYPES.addAll(Arrays.asList(
"image/x-rgb",
"image/x-ms-bmp",
"image/x-portable-graymap",
"image/x-portable-bitmap",
"application/x-123"));
SUPPORTED_MIME_TYPES.addAll(SUPPORTED_IMAGE_MIME_TYPES);
SUPPORTED_MIME_TYPES.addAll(SUPPORTED_VIDEO_MIME_TYPES);
//this is rarely usefull
SUPPORTED_MIME_TYPES.removeIf("application/octet-stream"::equals);
SUPPORTED_IMAGE_MIME_TYPES.removeIf("application/octet-stream"::equals);
}
/**
* Get the default Icon, which is the icon for a file.
*
* @return
*
*
*
* /** initialized lazily */
/** initialized lazily */
private static FileTypeDetector fileTypeDetector;
/** thread that saves generated thumbnails to disk in the background */
@ -167,26 +139,10 @@ public class ImageUtils {
return Collections.unmodifiableList(SUPPORTED_IMAGE_EXTENSIONS);
}
public static List<String> getSupportedVideoExtensions() {
return SUPPORTED_VIDEO_EXTENSIONS;
}
public static SortedSet<String> getSupportedImageMimeTypes() {
return Collections.unmodifiableSortedSet(SUPPORTED_IMAGE_MIME_TYPES);
}
public static List<String> getSupportedVideoMimeTypes() {
return SUPPORTED_VIDEO_MIME_TYPES;
}
public static List<String> getSupportedExtensions() {
return Collections.unmodifiableList(SUPPORTED_EXTENSIONS);
}
public static SortedSet<String> getSupportedMimeTypes() {
return Collections.unmodifiableSortedSet(SUPPORTED_MIME_TYPES);
}
/**
* Get the default thumbnail, which is the icon for a file. Used when we can
* not
@ -229,31 +185,59 @@ public class ImageUtils {
}
AbstractFile file = (AbstractFile) content;
return VideoUtils.isVideoThumbnailSupported(file)
|| isImageThumbnailSupported(file);
}
public static boolean isImageThumbnailSupported(AbstractFile file) {
return isMediaThumbnailSupported(file, SUPPORTED_IMAGE_MIME_TYPES, SUPPORTED_IMAGE_EXTENSIONS, CONDITIONAL_MIME_TYPES)
|| hasImageFileHeader(file);
}
/**
* Check if a file is "supported" by checking it mimetype and extension
*
* //TODO: this should move to a better place. Should ImageUtils and
* VideoUtils both implement/extend some base interface/abstract class. That
* would be the natural place to put this.
*
* @param file
* @param supportedMimeTypes a set of mimetypes that the could have to be
* supported
* @param supportedExtension a set of extensions a file could have to be
* supported if the mime lookup fails or is
* inconclusive
* @param conditionalMimes a set of mimetypes that a file could have to be
* supoprted if it also has a supported extension
*
* @return true if a thumbnail can be generated for the given file with the
* given lists of supported mimetype and extensions
*/
static boolean isMediaThumbnailSupported(AbstractFile file, final SortedSet<String> supportedMimeTypes, final List<String> supportedExtension, List<String> conditionalMimes) {
if (file.getSize() == 0) {
return false;
}
final String extension = file.getNameExtension();
try {
String mimeType = getFileTypeDetector().getFileType(file);
if (Objects.nonNull(mimeType)) {
return SUPPORTED_MIME_TYPES.contains(mimeType)
|| (mimeType.equalsIgnoreCase("audio/x-aiff") && "iff".equalsIgnoreCase(file.getNameExtension()));
return supportedMimeTypes.contains(mimeType)
|| (conditionalMimes.contains(mimeType.toLowerCase()) && supportedExtension.contains(extension));
}
} catch (FileTypeDetector.FileTypeDetectorInitException | TskCoreException ex) {
LOGGER.log(Level.WARNING, "Failed to look up mimetype for " + file.getName() + " using FileTypeDetector. Fallingback on AbstractFile.isMimeType", ex);
AbstractFile.MimeMatchEnum mimeMatch = file.isMimeType(SUPPORTED_MIME_TYPES);
AbstractFile.MimeMatchEnum mimeMatch = file.isMimeType(supportedMimeTypes);
if (mimeMatch == AbstractFile.MimeMatchEnum.TRUE) {
return true;
} else if (mimeMatch == AbstractFile.MimeMatchEnum.FALSE) {
return false;
}
}
// if we have an extension, check it
final String extension = file.getNameExtension();
if (StringUtils.isNotBlank(extension) && SUPPORTED_EXTENSIONS.contains(extension)) {
return true;
}
// if no extension or one that is not for an image, then read the content
return isJpegFileHeader(file) || isPngFileHeader(file);
return StringUtils.isNotBlank(extension) && supportedExtension.contains(extension);
}
/**
@ -303,22 +287,27 @@ public class ImageUtils {
* problem making a thumbnail.
*/
public static Image getThumbnail(Content content, int iconSize) {
// If a thumbnail file is already saved locally
File cacheFile = getCachedThumbnailLocation(content.getId());
if (cacheFile.exists()) {
try {
BufferedImage thumbnail = ImageIO.read(cacheFile);
if (isNull(thumbnail) || thumbnail.getWidth() != iconSize) {
return generateAndSaveThumbnail(content, iconSize, cacheFile);
} else {
return thumbnail;
if (content instanceof AbstractFile) {
AbstractFile file = (AbstractFile) content;
// If a thumbnail file is already saved locally
File cacheFile = getCachedThumbnailLocation(content.getId());
if (cacheFile.exists()) {
try {
BufferedImage thumbnail = ImageIO.read(cacheFile);
if (isNull(thumbnail) || thumbnail.getWidth() != iconSize) {
return generateAndSaveThumbnail(file, iconSize, cacheFile);
} else {
return thumbnail;
}
} catch (Exception ex) {
LOGGER.log(Level.WARNING, "Error while reading image: " + content.getName(), ex); //NON-NLS
return generateAndSaveThumbnail(file, iconSize, cacheFile);
}
} catch (Exception ex) {
LOGGER.log(Level.WARNING, "Error while reading image: " + content.getName(), ex); //NON-NLS
return generateAndSaveThumbnail(content, iconSize, cacheFile);
} else {
return generateAndSaveThumbnail(file, iconSize, cacheFile);
}
} else {
return generateAndSaveThumbnail(content, iconSize, cacheFile);
return DEFAULT_THUMBNAIL;
}
}
@ -458,25 +447,23 @@ public class ImageUtils {
/**
* Generate an icon and save it to specified location.
*
* @param content File to generate icon for
* @param file File to generate icon for
* @param iconSize
* @param cacheFile Location to save thumbnail to
*
* @return Generated icon or null on error
*/
private static Image generateAndSaveThumbnail(Content content, int iconSize, File cacheFile) {
AbstractFile f = (AbstractFile) content;
final String extension = f.getNameExtension();
private static Image generateAndSaveThumbnail(AbstractFile file, int iconSize, File cacheFile) {
BufferedImage thumbnail = null;
try {
if (SUPPORTED_VIDEO_EXTENSIONS.contains(extension)) {
if (VideoUtils.isVideoThumbnailSupported(file)) {
if (openCVLoaded) {
thumbnail = VideoUtils.generateVideoThumbnail((AbstractFile) content, iconSize);
thumbnail = VideoUtils.generateVideoThumbnail(file, iconSize);
} else {
return DEFAULT_THUMBNAIL;
}
} else {
thumbnail = generateImageThumbnail(content, iconSize);
thumbnail = generateImageThumbnail(file, iconSize);
}
if (thumbnail == null) {
@ -492,12 +479,12 @@ public class ImageUtils {
}
ImageIO.write(toSave, FORMAT, cacheFile);
} catch (IllegalArgumentException | IOException ex1) {
LOGGER.log(Level.WARNING, "Could not write cache thumbnail: " + content, ex1); //NON-NLS
LOGGER.log(Level.WARNING, "Could not write cache thumbnail: " + file, ex1); //NON-NLS
}
});
}
} catch (NullPointerException ex) {
logger.log(Level.WARNING, "Could not write cache thumbnail: " + content, ex); //NON-NLS
logger.log(Level.WARNING, "Could not write cache thumbnail: " + file, ex); //NON-NLS
}
return thumbnail;
}

View File

@ -22,6 +22,11 @@ import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.logging.Level;
import org.netbeans.api.progress.ProgressHandle;
import org.netbeans.api.progress.ProgressHandleFactory;
@ -29,6 +34,7 @@ import org.opencv.core.Mat;
import org.opencv.highgui.VideoCapture;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.corelibs.ScalrWrapper;
import static org.sleuthkit.autopsy.coreutils.ImageUtils.isMediaThumbnailSupported;
import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.datamodel.AbstractFile;
@ -37,6 +43,34 @@ import org.sleuthkit.datamodel.AbstractFile;
*/
public class VideoUtils {
private static final List<String> SUPPORTED_VIDEO_EXTENSIONS
= Arrays.asList("mov", "m4v", "flv", "mp4", "3gp", "avi", "mpg",
"mpeg", "asf", "divx", "rm", "moov", "wmv", "vob", "dat",
"m1v", "m2v", "m4v", "mkv", "mpe", "yop", "vqa", "xmv",
"mve", "wtv", "webm", "vivo", "vc1", "seq", "thp", "san",
"mjpg", "smk", "vmd", "sol", "cpk", "sdp", "sbg", "rtsp",
"rpl", "rl2", "r3d", "mlp", "mjpeg", "hevc", "h265", "265",
"h264", "h263", "h261", "drc", "avs", "pva", "pmp", "ogg",
"nut", "nuv", "nsv", "mxf", "mtv", "mvi", "mxg", "lxf",
"lvf", "ivf", "mve", "cin", "hnm", "gxf", "fli", "flc",
"flx", "ffm", "wve", "uv2", "dxa", "dv", "cdxl", "cdg",
"bfi", "jv", "bik", "vid", "vb", "son", "avs", "paf", "mm",
"flm", "tmv", "4xm"); //NON-NLS
private static final SortedSet<String> SUPPORTED_VIDEO_MIME_TYPES = new TreeSet<>(
Arrays.asList("application/x-shockwave-flash", "video/x-m4v", "video/quicktime", "video/avi", "video/msvideo", "video/x-msvideo",
"video/mp4", "video/x-ms-wmv", "video/mpeg", "video/asf")); //NON-NLS
private static final List<String> CONDITIONAL_MIME_TYPES = Arrays.asList("application/octet-stream");
public static List<String> getSupportedVideoExtensions() {
return SUPPORTED_VIDEO_EXTENSIONS;
}
public static SortedSet<String> getSupportedVideoMimeTypes() {
return Collections.unmodifiableSortedSet(SUPPORTED_VIDEO_MIME_TYPES);
}
private static final int THUMB_COLUMNS = 3;
private static final int THUMB_ROWS = 3;
private static final int CV_CAP_PROP_POS_MSEC = 0;
@ -52,6 +86,10 @@ public class VideoUtils {
return Paths.get(Case.getCurrentCase().getTempDirectory(), "videos", file.getId() + "." + file.getNameExtension()).toFile();
}
public static boolean isVideoThumbnailSupported(AbstractFile file) {
return isMediaThumbnailSupported(file, SUPPORTED_VIDEO_MIME_TYPES, SUPPORTED_VIDEO_EXTENSIONS, CONDITIONAL_MIME_TYPES);
}
static BufferedImage generateVideoThumbnail(AbstractFile file, int iconSize) {
java.io.File tempFile = getTempVideoFile(file);
@ -93,11 +131,11 @@ public class VideoUtils {
for (int x = 0; x < THUMB_COLUMNS; x++) {
for (int y = 0; y < THUMB_ROWS; y++) {
if (!videoFile.set(CV_CAP_PROP_POS_MSEC, timestamp + x * framkeskip + y * framkeskip * THUMB_COLUMNS)) {
break;
break; // if we can't set the time, return black for that frame
}
//read the frame into the image/matrix
if (!videoFile.read(imageMatrix)) {
break; //if the image for some reason is bad, return default icon
break; //if the image for some reason is bad, return black for that frame
}
if (bufferedImage == null) {
@ -122,6 +160,6 @@ public class VideoUtils {
videoFile.release(); // close the file
return ScalrWrapper.resizeFast(bufferedImage, iconSize);
return bufferedImage == null ? bufferedImage : ScalrWrapper.resizeFast(bufferedImage, iconSize);
}
}

View File

@ -18,17 +18,12 @@
*/
package org.sleuthkit.autopsy.datamodel;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Map;
import java.util.logging.Level;
import org.apache.commons.lang3.StringUtils;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.SleuthkitCase.CaseDbQuery;
import org.sleuthkit.datamodel.TskCoreException;
/**
@ -38,7 +33,7 @@ import org.sleuthkit.datamodel.TskCoreException;
*/
public abstract class AbstractAbstractFileNode<T extends AbstractFile> extends AbstractContentNode<T> {
private static Logger logger = Logger.getLogger(AbstractAbstractFileNode.class.getName());
private static final Logger LOGGER = Logger.getLogger(AbstractAbstractFileNode.class.getName());
/**
* @param <T> type of the AbstractFile data to encapsulate
@ -162,7 +157,6 @@ public abstract class AbstractAbstractFileNode<T extends AbstractFile> extends A
MD5HASH {
@Override
public String toString() {
return NbBundle.getMessage(this.getClass(), "AbstractAbstractFileNode.md5HashColLbl");
}
},
@ -189,7 +183,7 @@ public abstract class AbstractAbstractFileNode<T extends AbstractFile> extends A
try {
path = content.getUniquePath();
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Except while calling Content.getUniquePath() on {0}", content); //NON-NLS
LOGGER.log(Level.SEVERE, "Except while calling Content.getUniquePath() on {0}", content); //NON-NLS
}
map.put(AbstractFilePropertyType.NAME.toString(), AbstractAbstractFileNode.getContentDisplayName(content));
@ -219,44 +213,22 @@ public abstract class AbstractAbstractFileNode<T extends AbstractFile> extends A
String name = file.getName();
switch (name) {
case "..":
name = DirectoryNode.DOTDOTDIR;
break;
return DirectoryNode.DOTDOTDIR;
case ".":
name = DirectoryNode.DOTDIR;
break;
return DirectoryNode.DOTDIR;
default:
return name;
}
return name;
}
@SuppressWarnings("deprecation")
private static String getHashSetHitsForFile(AbstractFile content) {
String strList = "";
SleuthkitCase skCase = content.getSleuthkitCase();
long objId = content.getId();
int setNameId = BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID();
int artId = BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID();
String query = "SELECT value_text,blackboard_attributes.artifact_id,attribute_type_id " //NON-NLS
+ "FROM blackboard_attributes,blackboard_artifacts WHERE " //NON-NLS
+ "attribute_type_id=" + setNameId //NON-NLS
+ " AND blackboard_attributes.artifact_id=blackboard_artifacts.artifact_id" //NON-NLS
+ " AND blackboard_artifacts.artifact_type_id=" + artId //NON-NLS
+ " AND blackboard_artifacts.obj_id=" + objId; //NON-NLS
try (CaseDbQuery dbQuery = skCase.executeQuery(query)) {
ResultSet resultSet = dbQuery.getResultSet();
int i = 0;
while (resultSet.next()) {
if (i++ > 0) {
strList += ", ";
}
strList += resultSet.getString("value_text"); //NON-NLS
}
} catch (TskCoreException | SQLException ex) {
logger.log(Level.WARNING, "Error getting hashset hits: ", ex); //NON-NLS
try {
return StringUtils.join(content.getHashSetNames(), ", ");
} catch (TskCoreException tskCoreException) {
LOGGER.log(Level.WARNING, "Error getting hashset hits: ", tskCoreException); //NON-NLS
return "";
}
return strList;
}
}
}

View File

@ -18,7 +18,6 @@
*/
package org.sleuthkit.autopsy.timeline.events.db;
import com.google.common.base.Stopwatch;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.SetMultimap;
import java.nio.file.Paths;
@ -82,14 +81,15 @@ import org.sqlite.SQLiteJDBCLoader;
/**
* Provides access to the Timeline SQLite database.
*
* This class borrows a lot of ideas and techniques from
* {@link SleuthkitCase}. Creating an abstract base class for SQLite
* databases, or using a higherlevel persistence api may make sense in the
* future.
* This class borrows a lot of ideas and techniques from {@link SleuthkitCase}.
* Creating an abstract base class for SQLite databases, or using a higherlevel
* persistence api may make sense in the future.
*/
public class EventDB {
/** enum to represent keys stored in db_info table */
/**
* enum to represent keys stored in db_info table
*/
private enum DBInfoKey {
LAST_ARTIFACT_ID("last_artifact_id"), // NON-NLS
@ -221,8 +221,8 @@ public class EventDB {
}
/**
* @return the total number of events in the database or,
* -1 if there is an error.
* @return the total number of events in the database or, -1 if there is an
* error.
*/
int countAllEvents() {
DBLock.lock();
@ -292,7 +292,6 @@ public class EventDB {
if (end2 == 0) {
end2 = getMaxTime();
}
//System.out.println(start2 + " " + start + " " + end + " " + end2);
return new Interval(start2 * 1000, (end2 + 1) * 1000, TimeLineController.getJodaTimeZone());
}
} catch (SQLException ex) {
@ -335,7 +334,6 @@ public class EventDB {
DBLock.lock();
final String query = "select event_id from from events" + useHashHitTablesHelper(filter) + " where time >= " + startTime + " and time <" + endTime + " and " + SQLHelper.getSQLWhere(filter); // NON-NLS
//System.out.println(query);
try (Statement stmt = con.createStatement();
ResultSet rs = stmt.executeQuery(query)) {
@ -361,8 +359,10 @@ public class EventDB {
}
boolean hasNewColumns() {
/* this relies on the fact that no tskObj has ID 0 but 0 is the default
* value for the datasource_id column in the events table. */
/*
* this relies on the fact that no tskObj has ID 0 but 0 is the default
* value for the datasource_id column in the events table.
*/
return hasHashHitColumn() && hasDataSourceIDColumn() && hasTaggedColumn()
&& (getDataSourceIDs().isEmpty() == false);
}
@ -403,7 +403,9 @@ public class EventDB {
return Collections.unmodifiableMap(hashSets);
}
/** @return maximum time in seconds from unix epoch */
/**
* @return maximum time in seconds from unix epoch
*/
Long getMaxTime() {
DBLock.lock();
try (ResultSet rs = getMaxTimeStmt.executeQuery()) {
@ -418,7 +420,9 @@ public class EventDB {
return -1l;
}
/** @return maximum time in seconds from unix epoch */
/**
* @return maximum time in seconds from unix epoch
*/
Long getMinTime() {
DBLock.lock();
try (ResultSet rs = getMinTimeStmt.executeQuery()) {
@ -837,8 +841,8 @@ public class EventDB {
* from unix epoch)
* @param filter only events that pass this filter will be counted
* @param zoomLevel only events of this type or a subtype will be counted
* and the counts will be organized into bins for each of the subtypes of
* the given event type
* and the counts will be organized into bins for each of
* the subtypes of the given event type
*
* @return a map organizing the counts in a hierarchy from date > eventtype>
* count
@ -860,14 +864,7 @@ public class EventDB {
ResultSet rs = null;
DBLock.lock();
//System.out.println(queryString);
try (Statement stmt = con.createStatement();) {
Stopwatch stopwatch = new Stopwatch();
stopwatch.start();
System.out.println(queryString);
rs = stmt.executeQuery(queryString);
stopwatch.stop();
// System.out.println(stopwatch.elapsedMillis() / 1000.0 + " seconds");
while (rs.next()) {
EventType type = useSubTypes
@ -895,16 +892,15 @@ public class EventDB {
}
/**
* //TODO: update javadoc //TODO: split this into helper methods
* //TODO: update javadoc, and split this into helper methods
*
* get a list of {@link AggregateEvent}s.
*
* General algorithm is as follows:
*
* - get all aggregate events, via one db query.
* - sort them into a map from (type, description)-> aggevent
* - for each key in map, merge the events and accumulate them in a list
* to return
* 1) get all aggregate events, via one db query. 2) sort them into a map
* from (type, description)-> aggevent 3) for each key in map, merge the
* events and accumulate them in a list to return
*
*
* @param timeRange the Interval within in which all returned aggregate
@ -916,8 +912,8 @@ public class EventDB {
*
*
* @return a list of aggregate events within the given timerange, that pass
* the supplied filter, aggregated according to the given event type and
* description zoom levels
* the supplied filter, aggregated according to the given event type
* and description zoom levels
*/
private List<AggregateEvent> getAggregatedEvents(Interval timeRange, RootFilter filter, EventTypeZoomLevel zoomLevel, DescriptionLOD lod) {
String descriptionColumn = getDescriptionColumn(lod);
@ -945,7 +941,6 @@ public class EventDB {
+ " from events" + useHashHitTablesHelper(filter) + " where " + "time >= " + start + " and time < " + end + " and " + SQLHelper.getSQLWhere(filter) // NON-NLS
+ " group by interval, " + useSubTypeHelper(useSubTypes) + " , " + descriptionColumn // NON-NLS
+ " order by Min(time)"; // NON-NLS
System.out.println(query);
// scoop up requested events in groups organized by interval, type, and desription
try (ResultSet rs = con.createStatement().executeQuery(query);) {
while (rs.next()) {

View File

@ -44,7 +44,6 @@ import org.joda.time.Interval;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.services.TagsManager;
import org.sleuthkit.autopsy.coreutils.HashHitUtils;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.timeline.ProgressWindow;
import org.sleuthkit.autopsy.timeline.events.AggregateEvent;
@ -284,9 +283,8 @@ public class EventsRepository {
String rootFolder = StringUtils.substringBetween(parentPath, "/", "/");
String shortDesc = datasourceName + "/" + StringUtils.defaultIfBlank(rootFolder, "");
String medD = datasourceName + parentPath;
final TskData.FileKnown known = f.getKnown();
Set<String> hashSets = HashHitUtils.getHashSetNamesForFile(skCase, f.getId());
final TskData.FileKnown known = f.getKnown();
Set<String> hashSets = f.getHashSetNames() ;
boolean tagged = !tagsManager.getContentTagsByContent(f).isEmpty();
//insert it into the db if time is > 0 => time is legitimate (drops logical files)
@ -396,10 +394,8 @@ public class EventsRepository {
long datasourceID = skCase.getContentById(bbart.getObjectID()).getDataSource().getId();
AbstractFile f = skCase.getAbstractFileById(bbart.getObjectID());
Set<String> hashSets = HashHitUtils.getHashSetNamesForFile(skCase, f.getId()) ;
boolean tagged = tagsManager.getContentTagsByContent(f).isEmpty() == false;
tagged |= tagsManager.getBlackboardArtifactTagsByArtifact(bbart).isEmpty() == false;
Set<String> hashSets = f.getHashSetNames();
boolean tagged = tagsManager.getBlackboardArtifactTagsByArtifact(bbart).isEmpty() == false;
eventDB.insertEvent(eventDescription.getTime(), type, datasourceID, bbart.getObjectID(), bbart.getArtifactID(), eventDescription.getFullDescription(), eventDescription.getMedDescription(), eventDescription.getShortDescription(), null, hashSets, tagged, trans);
}

View File

@ -19,10 +19,10 @@
package org.sleuthkit.autopsy.timeline.ui.detailview;
import com.google.common.eventbus.Subscribe;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.logging.Level;
import java.util.stream.Collectors;
@ -77,10 +77,8 @@ import org.sleuthkit.autopsy.timeline.zooming.ZoomParams;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifactTag;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.ContentTag;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TagName;
import org.sleuthkit.datamodel.TskCoreException;
/** Represents an {@link AggregateEvent} in a {@link EventDetailChart}. */
@ -99,7 +97,7 @@ public class AggregateEventNode extends StackPane {
private static final Border selectionBorder = new Border(new BorderStroke(Color.BLACK, BorderStrokeStyle.SOLID, CORNER_RADII, new BorderWidths(2)));
/** The event this AggregateEventNode represents visually */
private AggregateEvent event;
private AggregateEvent aggEvent;
private final AggregateEventNode parentEventNode;
@ -158,9 +156,9 @@ public class AggregateEventNode extends StackPane {
private final ImageView hashIV = new ImageView(HASH_PIN);
private final ImageView tagIV = new ImageView(TAG);
public AggregateEventNode(final AggregateEvent event, AggregateEventNode parentEventNode, EventDetailChart chart) {
this.event = event;
descLOD.set(event.getLOD());
public AggregateEventNode(final AggregateEvent aggEvent, AggregateEventNode parentEventNode, EventDetailChart chart) {
this.aggEvent = aggEvent;
descLOD.set(aggEvent.getLOD());
this.parentEventNode = parentEventNode;
this.chart = chart;
sleuthkitCase = chart.getController().getAutopsyCase().getSleuthkitCase();
@ -170,11 +168,11 @@ public class AggregateEventNode extends StackPane {
HBox.setHgrow(region, Priority.ALWAYS);
final HBox hBox = new HBox(descrLabel, countLabel, region, hashIV, tagIV, minusButton, plusButton);
if (event.getEventIDsWithHashHits().isEmpty()) {
if (aggEvent.getEventIDsWithHashHits().isEmpty()) {
hashIV.setManaged(false);
hashIV.setVisible(false);
}
if (event.getEventIDsWithTags().isEmpty()) {
if (aggEvent.getEventIDsWithTags().isEmpty()) {
tagIV.setManaged(false);
tagIV.setVisible(false);
}
@ -208,7 +206,7 @@ public class AggregateEventNode extends StackPane {
subNodePane.setPickOnBounds(false);
//setup description label
eventTypeImageView.setImage(event.getType().getFXImage());
eventTypeImageView.setImage(aggEvent.getType().getFXImage());
descrLabel.setGraphic(eventTypeImageView);
descrLabel.setPrefWidth(USE_COMPUTED_SIZE);
descrLabel.setTextOverrun(OverrunStyle.CENTER_ELLIPSIS);
@ -217,7 +215,7 @@ public class AggregateEventNode extends StackPane {
setDescriptionVisibility(chart.getDescrVisibility().get());
//setup backgrounds
final Color evtColor = event.getType().getColor();
final Color evtColor = aggEvent.getType().getColor();
spanFill = new Background(new BackgroundFill(evtColor.deriveColor(0, 1, 1, .1), CORNER_RADII, Insets.EMPTY));
setBackground(new Background(new BackgroundFill(evtColor.deriveColor(0, 1, 1, .1), CORNER_RADII, Insets.EMPTY)));
setCursor(Cursor.HAND);
@ -247,7 +245,7 @@ public class AggregateEventNode extends StackPane {
setOnMouseClicked(new EventMouseHandler());
plusButton.disableProperty().bind(descLOD.isEqualTo(DescriptionLOD.FULL));
minusButton.disableProperty().bind(descLOD.isEqualTo(event.getLOD()));
minusButton.disableProperty().bind(descLOD.isEqualTo(aggEvent.getLOD()));
plusButton.setOnMouseClicked(e -> {
final DescriptionLOD next = descLOD.get().next();
@ -268,18 +266,14 @@ public class AggregateEventNode extends StackPane {
synchronized private void installTooltip() {
//TODO: all this work should probably go on a background thread...
if (tooltip == null) {
HashMap<String, Long> hashSetCounts = new HashMap<>();
if (!event.getEventIDsWithHashHits().isEmpty()) {
if (!aggEvent.getEventIDsWithHashHits().isEmpty()) {
hashSetCounts = new HashMap<>();
try {
for (TimeLineEvent tle : eventsModel.getEventsById(event.getEventIDsWithHashHits())) {
ArrayList<BlackboardArtifact> blackboardArtifacts = sleuthkitCase.getBlackboardArtifacts(BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT, tle.getFileID());
for (BlackboardArtifact artf : blackboardArtifacts) {
for (BlackboardAttribute attr : artf.getAttributes()) {
if (attr.getAttributeTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()) {
hashSetCounts.merge(attr.getValueString(), 1L, Long::sum);
}
}
for (TimeLineEvent tle : eventsModel.getEventsById(aggEvent.getEventIDsWithHashHits())) {
Set<String> hashSetNames = sleuthkitCase.getAbstractFileById(tle.getFileID()).getHashSetNames();
for (String hashSetName : hashSetNames) {
hashSetCounts.merge(hashSetName, 1L, Long::sum);
}
}
} catch (TskCoreException ex) {
@ -287,23 +281,23 @@ public class AggregateEventNode extends StackPane {
}
}
Map<Long, TagName> tags = new HashMap<>();
if (!event.getEventIDsWithTags().isEmpty()) {
Map<String, Long> tagCounts = new HashMap<>();
if (!aggEvent.getEventIDsWithTags().isEmpty()) {
try {
for (TimeLineEvent tle : eventsModel.getEventsById(event.getEventIDsWithTags())) {
for (TimeLineEvent tle : eventsModel.getEventsById(aggEvent.getEventIDsWithTags())) {
AbstractFile abstractFileById = sleuthkitCase.getAbstractFileById(tle.getFileID());
List<ContentTag> contentTagsByContent = sleuthkitCase.getContentTagsByContent(abstractFileById);
for (ContentTag tag : contentTagsByContent) {
tags.putIfAbsent(tag.getId(), tag.getName());
List<ContentTag> contentTags = sleuthkitCase.getContentTagsByContent(abstractFileById);
for (ContentTag tag : contentTags) {
tagCounts.merge(tag.getName().getDisplayName(), 1l, Long::sum);
}
Long artifactID = tle.getArtifactID();
if (artifactID != 0) {
BlackboardArtifact blackboardArtifact = sleuthkitCase.getBlackboardArtifact(artifactID);
List<BlackboardArtifactTag> blackboardArtifactTagsByArtifact = sleuthkitCase.getBlackboardArtifactTagsByArtifact(blackboardArtifact);
for (BlackboardArtifactTag tag : blackboardArtifactTagsByArtifact) {
tags.putIfAbsent(tag.getId(), tag.getName());
List<BlackboardArtifactTag> artifactTags = sleuthkitCase.getBlackboardArtifactTagsByArtifact(blackboardArtifact);
for (BlackboardArtifactTag tag : artifactTags) {
tagCounts.merge(tag.getName().getDisplayName(), 1l, Long::sum);
}
}
}
@ -312,9 +306,6 @@ public class AggregateEventNode extends StackPane {
}
}
Map<String, Long> tagCounts = tags.values().stream()
.collect(Collectors.toMap(TagName::getDisplayName, anything -> 1L, Long::sum));
String hashSetCountsString = hashSetCounts.entrySet().stream()
.map((Map.Entry<String, Long> t) -> t.getKey() + " : " + t.getValue())
.collect(Collectors.joining("\n"));
@ -339,7 +330,7 @@ public class AggregateEventNode extends StackPane {
}
synchronized public AggregateEvent getEvent() {
return event;
return aggEvent;
}
/**
@ -365,10 +356,9 @@ public class AggregateEventNode extends StackPane {
/** @param descrVis the level of description that should be displayed */
synchronized final void setDescriptionVisibility(DescriptionVisibility descrVis) {
this.descrVis = descrVis;
final int size = event.getEventIDs().size();
final int size = aggEvent.getEventIDs().size();
switch (descrVis) {
case COUNT_ONLY:
descrLabel.setText("");
countLabel.setText(String.valueOf(size));
@ -379,7 +369,7 @@ public class AggregateEventNode extends StackPane {
break;
default:
case SHOWN:
String description = event.getDescription();
String description = aggEvent.getDescription();
description = parentEventNode != null
? " ..." + StringUtils.substringAfter(description, parentEventNode.getEvent().getDescription())
: description;
@ -411,14 +401,14 @@ public class AggregateEventNode extends StackPane {
if (applied) {
descrLabel.setStyle("-fx-font-weight: bold;"); // NON-NLS
spanFill = new Background(new BackgroundFill(event.getType().getColor().deriveColor(0, 1, 1, .3), CORNER_RADII, Insets.EMPTY));
spanFill = new Background(new BackgroundFill(aggEvent.getType().getColor().deriveColor(0, 1, 1, .3), CORNER_RADII, Insets.EMPTY));
spanRegion.setBackground(spanFill);
setBackground(new Background(new BackgroundFill(event.getType().getColor().deriveColor(0, 1, 1, .2), CORNER_RADII, Insets.EMPTY)));
setBackground(new Background(new BackgroundFill(aggEvent.getType().getColor().deriveColor(0, 1, 1, .2), CORNER_RADII, Insets.EMPTY)));
} else {
descrLabel.setStyle("-fx-font-weight: normal;"); // NON-NLS
spanFill = new Background(new BackgroundFill(event.getType().getColor().deriveColor(0, 1, 1, .1), CORNER_RADII, Insets.EMPTY));
spanFill = new Background(new BackgroundFill(aggEvent.getType().getColor().deriveColor(0, 1, 1, .1), CORNER_RADII, Insets.EMPTY));
spanRegion.setBackground(spanFill);
setBackground(new Background(new BackgroundFill(event.getType().getColor().deriveColor(0, 1, 1, .1), CORNER_RADII, Insets.EMPTY)));
setBackground(new Background(new BackgroundFill(aggEvent.getType().getColor().deriveColor(0, 1, 1, .1), CORNER_RADII, Insets.EMPTY)));
}
}
@ -452,17 +442,17 @@ public class AggregateEventNode extends StackPane {
*/
synchronized private void loadSubClusters(DescriptionLOD newDescriptionLOD) {
getSubNodePane().getChildren().clear();
if (newDescriptionLOD == event.getLOD()) {
if (newDescriptionLOD == aggEvent.getLOD()) {
chart.setRequiresLayout(true);
chart.requestChartLayout();
} else {
RootFilter combinedFilter = eventsModel.filter().get().copyOf();
//make a new filter intersecting the global filter with text(description) and type filters to restrict sub-clusters
combinedFilter.getSubFilters().addAll(new TextFilter(event.getDescription()),
new TypeFilter(event.getType()));
combinedFilter.getSubFilters().addAll(new TextFilter(aggEvent.getDescription()),
new TypeFilter(aggEvent.getType()));
//make a new end inclusive span (to 'filter' with)
final Interval span = event.getSpan().withEndMillis(event.getSpan().getEndMillis() + 1000);
final Interval span = aggEvent.getSpan().withEndMillis(aggEvent.getSpan().getEndMillis() + 1000);
//make a task to load the subnodes
LoggedTask<List<AggregateEventNode>> loggedTask = new LoggedTask<List<AggregateEventNode>>(
@ -532,11 +522,11 @@ public class AggregateEventNode extends StackPane {
@Subscribe
synchronized public void handleEventsUnTagged(EventsUnTaggedEvent tagEvent) {
AggregateEvent withTagsRemoved = event.withTagsRemoved(tagEvent.getEventIDs());
if (withTagsRemoved != event) {
event = withTagsRemoved;
AggregateEvent withTagsRemoved = aggEvent.withTagsRemoved(tagEvent.getEventIDs());
if (withTagsRemoved != aggEvent) {
aggEvent = withTagsRemoved;
tooltip = null;
boolean hasTags = event.getEventIDsWithTags().isEmpty() == false;
boolean hasTags = aggEvent.getEventIDsWithTags().isEmpty() == false;
Platform.runLater(() -> {
tagIV.setManaged(hasTags);
tagIV.setVisible(hasTags);
@ -546,9 +536,9 @@ public class AggregateEventNode extends StackPane {
@Subscribe
synchronized public void handleEventsTagged(EventsTaggedEvent tagEvent) {
AggregateEvent withTagsAdded = event.withTagsAdded(tagEvent.getEventIDs());
if (withTagsAdded != event) {
event = withTagsAdded;
AggregateEvent withTagsAdded = aggEvent.withTagsAdded(tagEvent.getEventIDs());
if (withTagsAdded != aggEvent) {
aggEvent = withTagsAdded;
tooltip = null;
Platform.runLater(() -> {
tagIV.setManaged(true);

View File

@ -78,7 +78,7 @@ public class DrawableAttribute<T extends Comparable<T>> {
= new DrawableAttribute<>(AttributeName.MODEL, "Camera Model", true, "camera.png", f -> Collections.singleton(f.getModel()));
public final static DrawableAttribute<String> HASHSET
= new DrawableAttribute<>(AttributeName.HASHSET, "Hashset", true, "hashset_hits.png", DrawableFile::getHashHitSetNames);
= new DrawableAttribute<>(AttributeName.HASHSET, "Hashset", true, "hashset_hits.png", DrawableFile::getHashSetNamesUnchecked);
public final static DrawableAttribute<Long> OBJ_ID
= new DrawableAttribute<>(AttributeName.OBJ_ID, "Internal Object ID", true, "", f -> Collections.singleton(f.getId()));

View File

@ -40,13 +40,11 @@ import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.logging.Level;
import javax.annotation.Nonnull;
import javax.annotation.concurrent.GuardedBy;
import javax.swing.SortOrder;
import org.apache.commons.lang3.StringUtils;
import org.openide.util.Exceptions;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.coreutils.HashHitUtils;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.imagegallery.FileTypeUtils;
import org.sleuthkit.autopsy.imagegallery.ImageGalleryController;
@ -583,25 +581,29 @@ public final class DrawableDB {
stmt.setBoolean(8, f.isAnalyzed());
stmt.executeUpdate();
for (String name : HashHitUtils.getHashSetNamesForFile(tskCase, f.getId())) {
try {
for (String name : f.getHashSetNames()) {
// "insert or ignore into hash_sets (hash_set_name) values (?)"
insertHashSetStmt.setString(1, name);
insertHashSetStmt.executeUpdate();
// "insert or ignore into hash_sets (hash_set_name) values (?)"
insertHashSetStmt.setString(1, name);
insertHashSetStmt.executeUpdate();
//TODO: use nested select to get hash_set_id rather than seperate statement/query
//"select hash_set_id from hash_sets where hash_set_name = ?"
selectHashSetStmt.setString(1, name);
try (ResultSet rs = selectHashSetStmt.executeQuery()) {
while (rs.next()) {
int hashsetID = rs.getInt("hash_set_id");
//"insert or ignore into hash_set_hits (hash_set_id, obj_id) values (?,?)";
insertHashHitStmt.setInt(1, hashsetID);
insertHashHitStmt.setLong(2, f.getId());
insertHashHitStmt.executeUpdate();
break;
//TODO: use nested select to get hash_set_id rather than seperate statement/query
//"select hash_set_id from hash_sets where hash_set_name = ?"
selectHashSetStmt.setString(1, name);
try (ResultSet rs = selectHashSetStmt.executeQuery()) {
while (rs.next()) {
int hashsetID = rs.getInt("hash_set_id");
//"insert or ignore into hash_set_hits (hash_set_id, obj_id) values (?,?)";
insertHashHitStmt.setInt(1, hashsetID);
insertHashHitStmt.setLong(2, f.getId());
insertHashHitStmt.executeUpdate();
break;
}
}
}
} catch (TskCoreException ex) {
LOGGER.log(Level.SEVERE, "failed to insert/update hash hits for file" + f.getName(), ex);
}
//and update all groups this file is in
@ -620,6 +622,7 @@ public final class DrawableDB {
if (Case.isCaseOpen()) {
LOGGER.log(Level.SEVERE, "failed to insert/update file" + f.getName(), ex);
}
} finally {
dbWriteUnlock();
}
@ -875,7 +878,7 @@ public final class DrawableDB {
query.append(orderByClause);
if (orderByClause.equals("") == false) {
if (orderByClause.isEmpty() == false) {
String sortOrderClause = "";
switch (sortOrder) {
@ -958,7 +961,7 @@ public final class DrawableDB {
return DrawableFile.create(f,
areFilesAnalyzed(Collections.singleton(id)), isVideoFile(f));
} catch (IllegalStateException ex) {
LOGGER.log(Level.SEVERE, "there is no case open; failed to load file with id: " + id);
LOGGER.log(Level.SEVERE, "there is no case open; failed to load file with id: {0}", id);
return null;
}
}
@ -1113,21 +1116,6 @@ public final class DrawableDB {
}
}
/**
* For the given fileID, get the names of all the hashsets that the file is
* in.
*
* @param fileID the fileID to file all the hash sets for
*
* @return a set of names, each of which is a hashset that the given file is
* in.
*/
@Nonnull
public Set<String> getHashSetsForFileFromAutopsy(long fileID) {
return HashHitUtils.getHashSetNamesForFile(tskCase, fileID);
}
/**
* For performance reasons, keep a list of all file IDs currently in the
* drawable database. Otherwise the database is queried many times to
@ -1194,7 +1182,7 @@ public final class DrawableDB {
public boolean isVideoFile(AbstractFile f) {
return isNull(f) ? false
: videoFileMap.computeIfAbsent(f.getId(), id -> FileTypeUtils.isVideoFile(f));
}
/**

View File

@ -31,12 +31,12 @@ import javafx.beans.property.SimpleBooleanProperty;
import javafx.beans.property.SimpleObjectProperty;
import javafx.scene.image.Image;
import javafx.util.Pair;
import javax.annotation.Nonnull;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.text.WordUtils;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.imagegallery.FileTypeUtils;
import org.sleuthkit.autopsy.imagegallery.ImageGalleryController;
import org.sleuthkit.autopsy.imagegallery.ThumbnailCache;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
@ -57,6 +57,8 @@ import org.sleuthkit.datamodel.TskCoreException;
*/
public abstract class DrawableFile<T extends AbstractFile> extends AbstractFile {
private static final Logger LOGGER = Logger.getLogger(DrawableFile.class.getName());
public static DrawableFile<?> create(AbstractFile abstractFileById, boolean analyzed) {
return create(abstractFileById, analyzed, FileTypeUtils.isVideoFile(abstractFileById));
}
@ -101,10 +103,6 @@ public abstract class DrawableFile<T extends AbstractFile> extends AbstractFile
public abstract boolean isVideo();
public Collection<String> getHashHitSetNames() {
return ImageGalleryController.getDefault().getHashSetManager().getHashSetsForFile(getId());
}
@Override
public boolean isRoot() {
return false;
@ -226,13 +224,12 @@ public abstract class DrawableFile<T extends AbstractFile> extends AbstractFile
.orElse(Category.ZERO)
);
} catch (TskCoreException ex) {
Logger.getLogger(DrawableFile.class.getName()).log(Level.WARNING, "problem looking up category for file " + this.getName(), ex);
LOGGER.log(Level.WARNING, "problem looking up category for file " + this.getName(), ex);
} catch (IllegalStateException ex) {
// We get here many times if the case is closed during ingest, so don't print out a ton of warnings.
}
}
public Image getThumbnail() {
return ThumbnailCache.getDefault().get(this);
}
@ -263,7 +260,7 @@ public abstract class DrawableFile<T extends AbstractFile> extends AbstractFile
drawablePath = StringUtils.removeEnd(getUniquePath(), getName());
return drawablePath;
} catch (TskCoreException ex) {
Logger.getLogger(DrawableFile.class.getName()).log(Level.WARNING, "failed to get drawablePath from {0}", getName());
LOGGER.log(Level.WARNING, "failed to get drawablePath from {0}", getName());
return "";
}
}
@ -273,4 +270,14 @@ public abstract class DrawableFile<T extends AbstractFile> extends AbstractFile
Image thumbnail = getThumbnail();
return Objects.nonNull(thumbnail) && thumbnail.errorProperty().get() == false;
}
@Nonnull
public Set<String> getHashSetNamesUnchecked() {
try {
return getHashSetNames();
} catch (TskCoreException ex) {
LOGGER.log(Level.WARNING, "Failed to get hash set names", ex);
return Collections.emptySet();
}
}
}

View File

@ -3,8 +3,11 @@ package org.sleuthkit.autopsy.imagegallery.datamodel;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import java.util.Collections;
import java.util.Set;
import org.sleuthkit.autopsy.imagegallery.datamodel.DrawableDB;
import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.TskCoreException;
/**
* Manages a cache of hashset hits as a map from fileID to hashset names.
@ -36,7 +39,12 @@ public class HashSetManager {
* @return the names of the hashsets the given fileID is in
*/
private Set<String> getHashSetsForFileHelper(long fileID) {
return db.getHashSetsForFileFromAutopsy(fileID);
try {
return db.getFileFromID(fileID).getHashSetNames();
} catch (TskCoreException ex) {
Logger.getLogger(HashSetManager.class.getName()).log(Level.SEVERE, "Failed to get Hash Sets for file", ex);
return Collections.emptySet();
}
}
/**

View File

@ -85,7 +85,7 @@ public interface DrawableView {
default boolean hasHashHit() {
try {
return getFile().map(DrawableFile::getHashHitSetNames)
return getFile().map(DrawableFile::getHashSetNamesUnchecked)
.map((Collection<String> t) -> t.isEmpty() == false)
.orElse(false);

View File

@ -648,6 +648,12 @@ class ExtractRegistry extends Extract {
}
break;
case "shellfolders": // NON-NLS
// The User Shell Folders subkey stores the paths to Windows Explorer folders for the current user of the computer
// (https://technet.microsoft.com/en-us/library/Cc962613.aspx).
// No useful information. Skip.
break;
default:
logger.log(Level.WARNING, "Unrecognized node name: {0}", dataType); //NON-NLS
break;