Merge pull request #850 from millmanorama/with_image_analyzer_module
updated ImageAnayzer doxygen
@ -58,6 +58,16 @@ ContentTagNode.createSheet.filePath.name=File Path
|
||||
ContentTagNode.createSheet.filePath.displayName=File Path
|
||||
ContentTagNode.createSheet.comment.name=Comment
|
||||
ContentTagNode.createSheet.comment.displayName=Comment
|
||||
ContentTagNode.createSheet.fileModifiedTime.name=Modified Time
|
||||
ContentTagNode.createSheet.fileModifiedTime.displayName=Modified Time
|
||||
ContentTagNode.createSheet.fileChangedTime.name=Changed Time
|
||||
ContentTagNode.createSheet.fileChangedTime.displayName=Changed Time
|
||||
ContentTagNode.createSheet.fileAccessedTime.name=Accessed Time
|
||||
ContentTagNode.createSheet.fileAccessedTime.displayName=Accessed Time
|
||||
ContentTagNode.createSheet.fileCreatedTime.name=Created Time
|
||||
ContentTagNode.createSheet.fileCreatedTime.displayName=Created Time
|
||||
ContentTagNode.createSheet.fileSize.name=Size
|
||||
ContentTagNode.createSheet.fileSize.displayName=Size
|
||||
ContentTagTypeNode.displayName.text=File Tags
|
||||
ContentTagTypeNode.createSheet.name.name=Name
|
||||
ContentTagTypeNode.createSheet.name.displayName=Name
|
||||
|
@ -20,13 +20,15 @@ package org.sleuthkit.autopsy.datamodel;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.logging.Level;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import javax.swing.Action;
|
||||
import org.openide.nodes.Children;
|
||||
import org.openide.nodes.Sheet;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.openide.util.lookup.Lookups;
|
||||
import org.sleuthkit.autopsy.actions.DeleteContentTagAction;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.ContentTag;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
@ -51,24 +53,26 @@ class ContentTagNode extends DisplayableItemNode {
|
||||
|
||||
@Override
|
||||
protected Sheet createSheet() {
|
||||
Content content = tag.getContent();
|
||||
String contentPath;
|
||||
try {
|
||||
contentPath = content.getUniquePath();
|
||||
} catch (TskCoreException ex) {
|
||||
Logger.getLogger(ContentTagNode.class.getName()).log(Level.SEVERE, "Failed to get path for content (id = " + content.getId() + ")", ex); //NON-NLS
|
||||
contentPath = NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.unavail.path");
|
||||
}
|
||||
AbstractFile file = content instanceof AbstractFile ? (AbstractFile)content : null;
|
||||
|
||||
Sheet propertySheet = super.createSheet();
|
||||
Sheet.Set properties = propertySheet.get(Sheet.PROPERTIES);
|
||||
if (properties == null) {
|
||||
properties = Sheet.createPropertiesSet();
|
||||
propertySheet.put(properties);
|
||||
}
|
||||
|
||||
properties.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.file.name"),
|
||||
NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.file.displayName"),
|
||||
"",
|
||||
tag.getContent().getName()));
|
||||
String contentPath;
|
||||
try {
|
||||
contentPath = tag.getContent().getUniquePath();
|
||||
} catch (TskCoreException ex) {
|
||||
Logger.getLogger(ContentTagNode.class.getName()).log(Level.SEVERE, "Failed to get path for content (id = " + tag.getContent().getId() + ")", ex); //NON-NLS
|
||||
contentPath = NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.unavail.path");
|
||||
}
|
||||
content.getName()));
|
||||
properties.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.filePath.name"),
|
||||
NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.filePath.displayName"),
|
||||
"",
|
||||
@ -77,7 +81,26 @@ class ContentTagNode extends DisplayableItemNode {
|
||||
NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.comment.displayName"),
|
||||
"",
|
||||
tag.getComment()));
|
||||
|
||||
properties.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.fileModifiedTime.name"),
|
||||
NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.fileModifiedTime.displayName"),
|
||||
"",
|
||||
file != null ? ContentUtils.getStringTime(file.getMtime(), file) : ""));
|
||||
properties.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.fileChangedTime.name"),
|
||||
NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.fileChangedTime.displayName"),
|
||||
"",
|
||||
file != null ? ContentUtils.getStringTime(file.getCtime(), file) : ""));
|
||||
properties.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.fileAccessedTime.name"),
|
||||
NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.fileAccessedTime.displayName"),
|
||||
"",
|
||||
file != null ? ContentUtils.getStringTime(file.getAtime(), file) : ""));
|
||||
properties.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.fileCreatedTime.name"),
|
||||
NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.fileCreatedTime.displayName"),
|
||||
"",
|
||||
file != null ? ContentUtils.getStringTime(file.getCrtime(), file) : ""));
|
||||
properties.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.fileSize.name"),
|
||||
NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.fileSize.displayName"),
|
||||
"",
|
||||
content.getSize()));
|
||||
return propertySheet;
|
||||
}
|
||||
|
||||
|
@ -193,46 +193,54 @@ final class IngestJob {
|
||||
}
|
||||
|
||||
void process(DataSourceIngestTask task) throws InterruptedException {
|
||||
if (!isCancelled() && !dataSourceIngestPipeline.isEmpty()) {
|
||||
List<IngestModuleError> errors = new ArrayList<>();
|
||||
errors.addAll(dataSourceIngestPipeline.process(task, dataSourceIngestProgress));
|
||||
if (!errors.isEmpty()) {
|
||||
logIngestModuleErrors(errors);
|
||||
}
|
||||
}
|
||||
if (null != dataSourceIngestProgress) {
|
||||
dataSourceIngestProgress.finish();
|
||||
// This is safe because this method will be called at most once per
|
||||
// ingest job and finish() will not be called while that single
|
||||
// data source ingest task has not been reported complete by this
|
||||
// code to the ingest scheduler.
|
||||
dataSourceIngestProgress = null;
|
||||
}
|
||||
ingestTaskScheduler.notifyTaskCompleted(task);
|
||||
}
|
||||
|
||||
void process(FileIngestTask task) throws InterruptedException {
|
||||
if (!isCancelled()) {
|
||||
FileIngestPipeline pipeline = fileIngestPipelines.take();
|
||||
if (!pipeline.isEmpty()) {
|
||||
AbstractFile file = task.getFile();
|
||||
synchronized (this) {
|
||||
++processedFiles;
|
||||
if (processedFiles <= estimatedFilesToProcess) {
|
||||
fileIngestProgress.progress(file.getName(), (int) processedFiles);
|
||||
} else {
|
||||
fileIngestProgress.progress(file.getName(), (int) estimatedFilesToProcess);
|
||||
}
|
||||
}
|
||||
try {
|
||||
if (!isCancelled() && !dataSourceIngestPipeline.isEmpty()) {
|
||||
List<IngestModuleError> errors = new ArrayList<>();
|
||||
errors.addAll(pipeline.process(task));
|
||||
errors.addAll(dataSourceIngestPipeline.process(task, dataSourceIngestProgress));
|
||||
if (!errors.isEmpty()) {
|
||||
logIngestModuleErrors(errors);
|
||||
}
|
||||
}
|
||||
fileIngestPipelines.put(pipeline);
|
||||
if (null != dataSourceIngestProgress) {
|
||||
dataSourceIngestProgress.finish();
|
||||
// This is safe because this method will be called at most once per
|
||||
// ingest job and finish() will not be called while that single
|
||||
// data source ingest task has not been reported complete by this
|
||||
// code to the ingest scheduler.
|
||||
dataSourceIngestProgress = null;
|
||||
}
|
||||
}
|
||||
finally {
|
||||
ingestTaskScheduler.notifyTaskCompleted(task);
|
||||
}
|
||||
}
|
||||
|
||||
void process(FileIngestTask task) throws InterruptedException {
|
||||
try {
|
||||
if (!isCancelled()) {
|
||||
FileIngestPipeline pipeline = fileIngestPipelines.take();
|
||||
if (!pipeline.isEmpty()) {
|
||||
AbstractFile file = task.getFile();
|
||||
synchronized (this) {
|
||||
++processedFiles;
|
||||
if (processedFiles <= estimatedFilesToProcess) {
|
||||
fileIngestProgress.progress(file.getName(), (int) processedFiles);
|
||||
} else {
|
||||
fileIngestProgress.progress(file.getName(), (int) estimatedFilesToProcess);
|
||||
}
|
||||
}
|
||||
List<IngestModuleError> errors = new ArrayList<>();
|
||||
errors.addAll(pipeline.process(task));
|
||||
if (!errors.isEmpty()) {
|
||||
logIngestModuleErrors(errors);
|
||||
}
|
||||
}
|
||||
fileIngestPipelines.put(pipeline);
|
||||
}
|
||||
}
|
||||
finally {
|
||||
ingestTaskScheduler.notifyTaskCompleted(task);
|
||||
}
|
||||
ingestTaskScheduler.notifyTaskCompleted(task);
|
||||
}
|
||||
|
||||
void finish() {
|
||||
|
@ -52,30 +52,34 @@ final class IngestScheduler {
|
||||
private final AtomicLong nextIngestJobId = new AtomicLong(0L);
|
||||
private final ConcurrentHashMap<Long, IngestJob> ingestJobsById = new ConcurrentHashMap<>();
|
||||
private volatile boolean enabled = false;
|
||||
// private volatile boolean cancellingAllTasks = false; TODO: Uncomment this with related code, if desired
|
||||
// private volatile boolean cancellingAllTasks = false; TODO: Uncomment this with related code, if desired
|
||||
private final DataSourceIngestTaskQueue dataSourceTaskDispenser = new DataSourceIngestTaskQueue();
|
||||
private final FileIngestTaskQueue fileTaskDispenser = new FileIngestTaskQueue();
|
||||
|
||||
// The following five collections lie at the heart of the scheduler.
|
||||
//
|
||||
// The pending tasks queues are used to schedule tasks for an ingest job. If
|
||||
// multiple jobs are scheduled, tasks from different jobs may become
|
||||
// interleaved in these queues. Data source tasks go into a simple FIFO
|
||||
// queue that is consumed by the ingest threads. File tasks are "shuffled"
|
||||
// interleaved in these queues.
|
||||
|
||||
// FIFO queue for data source-level tasks.
|
||||
private final LinkedBlockingQueue<DataSourceIngestTask> pendingDataSourceTasks = new LinkedBlockingQueue<>(); // Guarded by this
|
||||
|
||||
// File tasks are "shuffled"
|
||||
// through root directory (priority queue), directory (LIFO), and file tasks
|
||||
// queues (LIFO). If a file task makes it into the pending file tasks queue,
|
||||
// it is consumed by the ingest threads.
|
||||
//
|
||||
// The "tasks in progress" list is used to determine when an ingest job is
|
||||
// completed and should be shut down, i.e., the job should shut down its
|
||||
// ingest pipelines and finish its progress bars. Tasks stay in the "tasks
|
||||
// in progress" list either until discarded by the scheduler or the ingest
|
||||
// thread that is working on the task notifies the scheduler that the task
|
||||
// is completed.
|
||||
private final LinkedBlockingQueue<DataSourceIngestTask> pendingDataSourceTasks = new LinkedBlockingQueue<>();
|
||||
private final TreeSet<FileIngestTask> pendingRootDirectoryTasks = new TreeSet<>(new RootDirectoryTaskComparator()); // Guarded by this
|
||||
private final List<FileIngestTask> pendingDirectoryTasks = new ArrayList<>(); // Guarded by this
|
||||
private final BlockingDeque<FileIngestTask> pendingFileTasks = new LinkedBlockingDeque<>();
|
||||
private final List<IngestTask> tasksInProgress = new ArrayList<>(); // Guarded by this
|
||||
private final BlockingDeque<FileIngestTask> pendingFileTasks = new LinkedBlockingDeque<>(); // Not guarded
|
||||
|
||||
// The "tasks in progress" list has:
|
||||
// - File and data source tasks that are running
|
||||
// - File tasks that are in the pending file queue
|
||||
// It is used to determine when a job is done. It has both pending and running
|
||||
// tasks because we do not lock the 'pendingFileTasks' and a task needs to be in
|
||||
// at least one of the pending or inprogress lists at all times before it is completed.
|
||||
// files are added to this when the are added to pendingFilesTasks and removed when they complete
|
||||
private final List<IngestTask> tasksInProgressAndPending = new ArrayList<>(); // Guarded by this
|
||||
|
||||
synchronized static IngestScheduler getInstance() {
|
||||
if (instance == null) {
|
||||
@ -133,12 +137,12 @@ final class IngestScheduler {
|
||||
|
||||
synchronized private void scheduleDataSourceIngestTask(IngestJob job) throws InterruptedException {
|
||||
DataSourceIngestTask task = new DataSourceIngestTask(job);
|
||||
tasksInProgress.add(task);
|
||||
tasksInProgressAndPending.add(task);
|
||||
try {
|
||||
// Should not block, queue is (theoretically) unbounded.
|
||||
pendingDataSourceTasks.put(task);
|
||||
} catch (InterruptedException ex) {
|
||||
tasksInProgress.remove(task);
|
||||
tasksInProgressAndPending.remove(task);
|
||||
Logger.getLogger(IngestScheduler.class.getName()).log(Level.SEVERE, "Interruption of unexpected block on pending data source tasks queue", ex); //NON-NLS
|
||||
throw ex;
|
||||
}
|
||||
@ -149,7 +153,6 @@ final class IngestScheduler {
|
||||
for (AbstractFile firstLevelFile : topLevelFiles) {
|
||||
FileIngestTask task = new FileIngestTask(job, firstLevelFile);
|
||||
if (shouldEnqueueFileTask(task)) {
|
||||
tasksInProgress.add(task);
|
||||
pendingRootDirectoryTasks.add(task);
|
||||
}
|
||||
}
|
||||
@ -212,9 +215,7 @@ final class IngestScheduler {
|
||||
if (shouldEnqueueFileTask(directoryTask)) {
|
||||
addToPendingFileTasksQueue(directoryTask);
|
||||
tasksEnqueuedForDirectory = true;
|
||||
} else {
|
||||
tasksInProgress.remove(directoryTask);
|
||||
}
|
||||
}
|
||||
|
||||
// If the directory contains subdirectories or files, try to
|
||||
// enqueue tasks for them as well.
|
||||
@ -227,13 +228,11 @@ final class IngestScheduler {
|
||||
if (file.hasChildren()) {
|
||||
// Found a subdirectory, put the task in the
|
||||
// pending directory tasks queue.
|
||||
tasksInProgress.add(childTask);
|
||||
pendingDirectoryTasks.add(childTask);
|
||||
tasksEnqueuedForDirectory = true;
|
||||
} else if (shouldEnqueueFileTask(childTask)) {
|
||||
// Found a file, put the task directly into the
|
||||
// pending file tasks queue.
|
||||
tasksInProgress.add(childTask);
|
||||
addToPendingFileTasksQueue(childTask);
|
||||
tasksEnqueuedForDirectory = true;
|
||||
}
|
||||
@ -304,6 +303,7 @@ final class IngestScheduler {
|
||||
}
|
||||
|
||||
synchronized private void addToPendingFileTasksQueue(FileIngestTask task) throws IllegalStateException {
|
||||
tasksInProgressAndPending.add(task);
|
||||
try {
|
||||
// Should not block, queue is (theoretically) unbounded.
|
||||
/* add to top of list because we had one image that had a folder with
|
||||
@ -313,7 +313,7 @@ final class IngestScheduler {
|
||||
*/
|
||||
pendingFileTasks.addFirst(task);
|
||||
} catch (IllegalStateException ex) {
|
||||
tasksInProgress.remove(task);
|
||||
tasksInProgressAndPending.remove(task);
|
||||
Logger.getLogger(IngestScheduler.class.getName()).log(Level.SEVERE, "Interruption of unexpected block on pending file tasks queue", ex); //NON-NLS
|
||||
throw ex;
|
||||
}
|
||||
@ -326,7 +326,6 @@ final class IngestScheduler {
|
||||
// Send the file task directly to file tasks queue, no need to
|
||||
// update the pending root directory or pending directory tasks
|
||||
// queues.
|
||||
tasksInProgress.add(task);
|
||||
addToPendingFileTasksQueue(task);
|
||||
}
|
||||
}
|
||||
@ -344,7 +343,7 @@ final class IngestScheduler {
|
||||
boolean jobIsCompleted;
|
||||
IngestJob job = task.getIngestJob();
|
||||
synchronized (this) {
|
||||
tasksInProgress.remove(task);
|
||||
tasksInProgressAndPending.remove(task);
|
||||
jobIsCompleted = ingestJobIsComplete(job);
|
||||
}
|
||||
if (jobIsCompleted) {
|
||||
@ -382,7 +381,7 @@ final class IngestScheduler {
|
||||
while (iterator.hasNext()) {
|
||||
IngestTask task = (IngestTask) iterator.next();
|
||||
if (task.getIngestJob().getId() == jobId) {
|
||||
tasksInProgress.remove((IngestTask) task);
|
||||
tasksInProgressAndPending.remove((IngestTask) task);
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
@ -420,13 +419,13 @@ final class IngestScheduler {
|
||||
synchronized private <T> void removeAllPendingTasks(Collection<T> taskQueue) {
|
||||
Iterator<T> iterator = taskQueue.iterator();
|
||||
while (iterator.hasNext()) {
|
||||
tasksInProgress.remove((IngestTask) iterator.next());
|
||||
tasksInProgressAndPending.remove((IngestTask) iterator.next());
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
|
||||
synchronized private boolean ingestJobIsComplete(IngestJob job) {
|
||||
for (IngestTask task : tasksInProgress) {
|
||||
for (IngestTask task : tasksInProgressAndPending) {
|
||||
if (task.getIngestJob().getId() == job.getId()) {
|
||||
return false;
|
||||
}
|
||||
|
@ -47,6 +47,7 @@ import org.sleuthkit.autopsy.imageanalyzer.FXMLConstructor;
|
||||
import org.sleuthkit.autopsy.imageanalyzer.FileIDSelectionModel;
|
||||
import org.sleuthkit.autopsy.imageanalyzer.IconCache;
|
||||
import org.sleuthkit.autopsy.imageanalyzer.TagUtils;
|
||||
import org.sleuthkit.autopsy.imageanalyzer.ThreadUtils;
|
||||
import org.sleuthkit.autopsy.imageanalyzer.datamodel.Category;
|
||||
import org.sleuthkit.autopsy.imageanalyzer.datamodel.DrawableAttribute;
|
||||
import org.sleuthkit.autopsy.imageanalyzer.grouping.GroupSortBy;
|
||||
@ -142,8 +143,10 @@ public class EurekaToolbar extends ToolBar {
|
||||
assert tagSelectedMenuButton != null : "fx:id=\"tagSelectedMenubutton\" was not injected: check your FXML file 'EurekaToolbar.fxml'.";
|
||||
|
||||
FileIDSelectionModel.getInstance().getSelected().addListener((Observable o) -> {
|
||||
tagSelectedMenuButton.setDisable(FileIDSelectionModel.getInstance().getSelected().isEmpty());
|
||||
catSelectedMenuButton.setDisable(FileIDSelectionModel.getInstance().getSelected().isEmpty());
|
||||
ThreadUtils.runNowOrLater(() -> {
|
||||
tagSelectedMenuButton.setDisable(FileIDSelectionModel.getInstance().getSelected().isEmpty());
|
||||
catSelectedMenuButton.setDisable(FileIDSelectionModel.getInstance().getSelected().isEmpty());
|
||||
});
|
||||
});
|
||||
|
||||
tagSelectedMenuButton.setOnAction((ActionEvent t) -> {
|
||||
|
BIN
docs/doxygen-user/ImageAnalyzer/application_view_tile.png
Normal file
After Width: | Height: | Size: 465 B |
BIN
docs/doxygen-user/ImageAnalyzer/bisque.png
Normal file
After Width: | Height: | Size: 276 B |
BIN
docs/doxygen-user/ImageAnalyzer/drawabletile.png
Normal file
After Width: | Height: | Size: 21 KiB |
BIN
docs/doxygen-user/ImageAnalyzer/flag_gray.png
Normal file
After Width: | Height: | Size: 15 KiB |
BIN
docs/doxygen-user/ImageAnalyzer/flag_red.png
Normal file
After Width: | Height: | Size: 665 B |
BIN
docs/doxygen-user/ImageAnalyzer/folder_picture.png
Normal file
After Width: | Height: | Size: 713 B |
BIN
docs/doxygen-user/ImageAnalyzer/gray.png
Normal file
After Width: | Height: | Size: 280 B |
BIN
docs/doxygen-user/ImageAnalyzer/green.png
Normal file
After Width: | Height: | Size: 276 B |
BIN
docs/doxygen-user/ImageAnalyzer/hashset_hits.png
Normal file
After Width: | Height: | Size: 1.4 KiB |
BIN
docs/doxygen-user/ImageAnalyzer/orange.png
Normal file
After Width: | Height: | Size: 276 B |
BIN
docs/doxygen-user/ImageAnalyzer/purpledash.png
Normal file
After Width: | Height: | Size: 445 B |
BIN
docs/doxygen-user/ImageAnalyzer/red.png
Normal file
After Width: | Height: | Size: 276 B |
BIN
docs/doxygen-user/ImageAnalyzer/slide.png
Normal file
After Width: | Height: | Size: 620 B |
BIN
docs/doxygen-user/ImageAnalyzer/video-file.png
Normal file
After Width: | Height: | Size: 718 B |
BIN
docs/doxygen-user/ImageAnalyzer/yellow.png
Normal file
After Width: | Height: | Size: 276 B |
@ -1,10 +1,117 @@
|
||||
/*! \page image_viewer_page Image and Video Viewer
|
||||
|
||||
Overview
|
||||
---------
|
||||
========
|
||||
This document outlines the use of the new Image Analyzer feature of Autopsy. This feature was funded by DHS S&T to help provide free and open source digital forensics tools to law enforcement.
|
||||
|
||||
The new image analyzer feature has been designed specifically with child-exploitation cases in mind, but can be used for a variety of other investigation types that involve images and videos. It offers the following features beyond the traditional long list of thumbnails that Autopsy and other tools currently provide.
|
||||
- Groups images by folder (and other attributes) to help examiner break the large set of images into smaller groups and to help focus on areas with images of interest.
|
||||
- Allows examiner to start viewing images immediately upon adding them to the case. As images are hashed, they are updated in the interface. You do not need to wait until the entire image is ingested.
|
||||
|
||||
This document assumes basic familiarity with Autopsy.
|
||||
|
||||
Quick Start
|
||||
===========
|
||||
1. The Image Analysis tool can be configured to collect data about images/videos as ingest runs or all at once after ingest. To change this setting go to Tools->Options->Image /Video Analyzer. This setting is saved per case, but can not be changed during ingest.
|
||||
2. Create a case as normal and add a disk image (or folder of files) as a data source. Ensure that you have the hash lookup module enabled with NSRL and known bad hashsets, the EXIF module enabled, and the File Type module enabled.
|
||||
3. Click Tools->Analyze Images/Videos in the menu. This will open the Autopsy Image/Video Analysis tool in a new window.
|
||||
4. Groups of images will be presented as they are analyzed by the background ingest modules. You can later resort and regroup, but it is required to keep it grouped by folder while ingest is still ongoing.
|
||||
5. As each group is reviewed, the next highest priority group is presented, according to a sorting criteria (the default is the density of hash set hits).
|
||||
6. Images that were hits from hashsets, will have a dashed border around them.
|
||||
7. You can use the menu bar on the top of the group to categorize the entire group.
|
||||
8. You can right click on an image to categorize or tag the individual image.
|
||||
9. Tag files with customizable tags. A ‘Follow Up’ tag is already built into the tool and integrated into the filter options. Tags can be applied in addition to categorization. An image can only have one categorization, but can have many tags to support your work-flow.
|
||||
10. Create a report containing the details of every tagged and/or categorized file, via the standard Autopsy report generation feature.
|
||||
|
||||
Use Case Details
|
||||
===============
|
||||
In addition to the basic ideas presented in the previous section, here are some hints on use cases that were designed into the tool.
|
||||
- When you are viewing the groups, they are presented in an order based on density of hash hits(by default). If you find a group that has lots of interesting files and you want to see what is in the parent folder or nearby folders, use the navigation tree on the left.
|
||||
- At any time, you can use the list on the left-hand side to see the groups with the largest hashset hits.
|
||||
- To see which folders have the most images in them, sort the groups by group size (descending).
|
||||
- Files that have hashset hits are not automatically tagged or categorized. You need to do that after reviewing them. The easiest way to do that is to wait until ingest is over and then group by hashsets. You can then review each group and categorize the entire group at a time using the group header.
|
||||
|
||||
Categories
|
||||
==========
|
||||
The tool has been designed specifically with child-exploitation cases in mind and has a notion of categorizes. We will be changing this in the future to be more flexible with custom category names, but currently it is hard coded to use the names that Project Vic (and other international groups) use. We have assigned colors to each category to highlight each image.
|
||||
|
||||
|
||||
Name|Description|Color
|
||||
----|-----------------|------
|
||||
CAT-0|Uncategorized|
|
||||
CAT-1|Child Abuse Material |
|
||||
CAT-2|Child Exploitative / Age Difficult|
|
||||
CAT-3|CGI / Animation|
|
||||
CAT-4|Comparison Images |
|
||||
CAT-5|Non-pertinent|
|
||||
|
||||
GUI controls
|
||||
=================
|
||||
You can do your entire investigation using the mouse, but many examiners like to use keyboard shortcuts to quickly process large amounts of images.
|
||||
|
||||
Keyboard Shortcuts
|
||||
-----------------
|
||||
shortcut | action
|
||||
-----------|------
|
||||
digits 0-5 | assign the correspondingly numbered category to the selected file(s)
|
||||
alt + 0-5 | assign the correspondingly numbered category to all files in the focused group
|
||||
arrows | select the next file in the direction pressed
|
||||
page up/down | scroll the list of files
|
||||
|
||||
Additional Mouse Controls
|
||||
-------------------------
|
||||
mouse gesture| action
|
||||
----------|----------
|
||||
ctrl + left click|toggle selection of clicked file, select multiple files
|
||||
right click on file|bring up context menu allowing per file actions (tag, categorize, extract to local file, view in external viewer, view in Autopsy content viewer, add file to HashDB)
|
||||
right click empty space of group|bring up context menu allowing per group actions (tag, categorize, extract to local file(s), add file(s) to HashDB)
|
||||
double click on file|open selected file in slide show mode
|
||||
|
||||
UI Details
|
||||
==========
|
||||
Group Display Area
|
||||
-------------------
|
||||
The central display area contains the list of files in the current group. Images in the group can be displayed in either thumbnail mode or slide show mode. Slide show mode provides larger images and playback of video files. At the right of the group header is a toggle for changing the viewing mode of the group (tiles vs slide-show ).
|
||||
|
||||
Image/Video Tiles
|
||||
-----------------
|
||||
|
||||
Each file is represented in the main display area via a small tile. The tile shows:
|
||||
- Thumbnail of the image/video
|
||||
- Name of the file
|
||||
- Indicators of other important details:
|
||||
|
||||
| image | description | meaning|
|
||||
|----|----|-----|
|
||||
| | solid colored border | file’s assigned category.|
|
||||
|  "" | purple dashed border | file has a known bad hashset hit, but has not yet been categorized. |
|
||||
|  ""|pushpin | file has a known bad hashset hit|
|
||||
|  ""| clapboard on document | video file|
|
||||
|  ""| a red flag | file has been 'flagged' as with the follow up tag|
|
||||
|
||||
|
||||
Slide Show Mode
|
||||
---------------
|
||||
In slide show mode a group shows only one file at a time at an increased size. Per file tag/category controls above the top right corner of the image, and large left and right buttons allow cycling through the files in the group. If the active file is an Autopsy supported video format, video playback controls appear below the video.
|
||||
|
||||
Table/Tree of contents
|
||||
----------------------
|
||||
The section in the top left with tabs labeled “Contents” and “Hash Hits” provides an overview of the groups of files in the case. It changes to reflect the current Group By setting: for hierarchical groupings (path) it shows a tree of folders (folders containing images/videos (groups) are marked with a distinctive icon ), and for other groupings it shows only a flat list.
|
||||
|
||||
Each group shows the number of files that hit against configured Hash DBs during ingest (hash hits) and the total number of image/video files as a ratio (hash hits / total) after its name. By selecting groups in the tree/list you can navigate directly to them in the main display area. If the Hash Hits tab is selected only groups containing files that have hash hits are shown.
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Quick Start
|
||||
----------
|
||||
|
||||
|
||||
|
||||
|