Merge branch 'documentation_and_cleanup' into timeline_and_image_analyzer

This commit is contained in:
jmillman 2014-09-05 14:03:40 -04:00
commit 66b9776c8f
26 changed files with 650 additions and 773 deletions

View File

@ -1,166 +0,0 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2013 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.imageanalyzer;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import javax.swing.SwingUtilities;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
/**
* Singleton aggregator for listeners that hook into case and ingest modules.
* This class depends on clients to hook up listeners to Autopsy.
*/
public class AutopsyListener {
private static final Logger LOGGER = Logger.getLogger(AutopsyListener.class.getName());
private final ImageAnalyzerController controller = ImageAnalyzerController.getDefault();
private final PropertyChangeListener ingestJobEventListener = new IngestJobEventListener();
private final PropertyChangeListener ingestModuleEventListener = new IngestModuleEventListener();
private final PropertyChangeListener caseListener = new CaseListener();
public PropertyChangeListener getIngestJobEventListener() {
return ingestJobEventListener;
}
public PropertyChangeListener getIngestModuleEventListener() {
return ingestModuleEventListener;
}
public PropertyChangeListener getCaseListener() {
return caseListener;
}
private static AutopsyListener instance;
private AutopsyListener() {
}
synchronized public static AutopsyListener getDefault() {
if (instance == null) {
instance = new AutopsyListener();
}
return instance;
}
/**
* listener for ingest events
*/
private class IngestJobEventListener implements PropertyChangeListener {
@Override
synchronized public void propertyChange(PropertyChangeEvent evt) {
switch (IngestManager.IngestJobEvent.valueOf(evt.getPropertyName())) {
//TODO can we do anything usefull here?
}
}
}
/**
* listener for ingest events
*/
private class IngestModuleEventListener implements PropertyChangeListener {
@Override
synchronized public void propertyChange(PropertyChangeEvent evt) {
switch (IngestManager.IngestModuleEvent.valueOf(evt.getPropertyName())) {
case CONTENT_CHANGED:
//TODO: do we need to do anything here? -jm
break;
case DATA_ADDED:
/* we could listen to DATA events and progressivly update
* files, and get data from DataSource ingest modules, but
* given that most modules don't post new artifacts in the
* events and we would have to query for them, without
* knowing which are the new ones, we just ignore these
* events for now. The relevant data should all be captured
* by file done event, anyways -jm */
break;
case FILE_DONE:
/**
* getOldValue has fileID, getNewValue has {@link Abstractfile}
*
* {@link IngestManager#fireModuleDataEvent(org.sleuthkit.autopsy.ingest.ModuleDataEvent) fireModuleDataEvent}
*/
AbstractFile file = (AbstractFile) evt.getNewValue();
if (controller.isListeningEnabled()) {
if (ImageAnalyzerModule.isSupportedAndNotKnown(file)) {
//this file should be included and we don't already know about it from hash sets (NSRL)
controller.queueTask(controller.new UpdateFileTask(file));
} else if (ImageAnalyzerModule.getAllSupportedExtensions().contains(file.getNameExtension())) {
//doing this check results in fewer tasks queued up, and faster completion of db update
//this file would have gotten scooped up in initial grab, but actually we don't need it
controller.queueTask(controller.new RemoveFile(file));
}
} else {
controller.setStale(true);
//TODO: keep track of waht we missed for later
}
break;
}
}
}
/**
* listener for case events
*/
private class CaseListener implements PropertyChangeListener {
@Override
synchronized public void propertyChange(PropertyChangeEvent evt) {
switch (Case.Events.valueOf(evt.getPropertyName())) {
case CURRENT_CASE:
Case newCase = (Case) evt.getNewValue();
if (newCase != null) { // case has been opened
//connect db, groupmanager, start worker thread
controller.setCase(newCase);
} else { // case is closing
//close window
SwingUtilities.invokeLater(ImageAnalyzerModule::closeTopComponent);
controller.reset();
}
break;
case DATA_SOURCE_ADDED:
//copy all file data to drawable databse
Content newDataSource = (Content) evt.getNewValue();
if (controller.isListeningEnabled()) {
controller.queueTask(controller.new PrePopulateDataSourceFiles(newDataSource.getId()));
} else {
controller.setStale(true);
//TODO: keep track of what we missed for later
}
break;
}
}
}
}

View File

@ -1,207 +0,0 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2013 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.imageanalyzer;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.util.Optional;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import javafx.beans.property.SimpleIntegerProperty;
import javafx.embed.swing.SwingFXUtils;
import javafx.scene.image.Image;
import javafx.scene.image.WritableImage;
import javax.imageio.IIOException;
import javax.imageio.ImageIO;
import org.apache.commons.lang3.concurrent.BasicThreadFactory;
import org.openide.util.Exceptions;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.corelibs.ScalrWrapper;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.imageanalyzer.datamodel.DrawableFile;
import org.sleuthkit.datamodel.ReadContentInputStream;
import org.sleuthkit.datamodel.TskCoreException;
/** Manages creation and access of icons. Keeps a cache in memory of most
* recently used icons, and a disk cache of all icons. */
public class IconCache {
static private IconCache instance;
private static final int MAX_ICON_SIZE = 300;
private static final Logger LOGGER = Logger.getLogger(IconCache.class.getName());
private static final Cache<Long, Optional<Image>> cache = CacheBuilder.newBuilder().maximumSize(1000).softValues().expireAfterAccess(10, TimeUnit.MINUTES).build();
public SimpleIntegerProperty iconSize = new SimpleIntegerProperty(200);
private final Executor imageSaver = Executors.newSingleThreadExecutor(new BasicThreadFactory.Builder().namingPattern("icon saver-%d").build());
private IconCache() {
}
synchronized static public IconCache getDefault() {
if (instance == null) {
instance = new IconCache();
}
return instance;
}
public Image get(DrawableFile<?> file) {
try {
return cache.get(file.getId(), () -> load(file)).orElse(null);
} catch (CacheLoader.InvalidCacheLoadException | ExecutionException ex) {
LOGGER.log(Level.WARNING, "failed to load icon for file: " + file.getName(), ex);
return null;
}
}
public Image get(Long fileID) {
try {
return get(ImageAnalyzerController.getDefault().getFileFromId(fileID));
} catch (TskCoreException ex) {
Exceptions.printStackTrace(ex);
return null;
}
}
public Optional<Image> load(DrawableFile<?> file) throws IIOException {
Image icon = null;
File cacheFile;
try {
cacheFile = getCacheFile(file.getId());
} catch (IllegalStateException e) {
LOGGER.log(Level.WARNING, "can't load icon when no case is open");
return Optional.empty();
}
// If a thumbnail file is already saved locally
if (cacheFile.exists()) {
try {
int dim = iconSize.get();
icon = new Image(cacheFile.toURI().toURL().toString(), dim, dim, true, false, true);
} catch (MalformedURLException ex) {
Exceptions.printStackTrace(ex);
}
}
if (icon == null) {
// Logger.getAnonymousLogger().warning("wrong size cache found for image " + getName());
icon = generateAndSaveIcon(file);
}
return Optional.ofNullable(icon);
}
private static File getCacheFile(long id) {
return new File(Case.getCurrentCase().getCacheDirectory() + File.separator + id + ".png");
}
private Image generateAndSaveIcon(final DrawableFile<?> file) {
Image img;
//TODO: should we wrap this in a BufferedInputStream? -jm
try (ReadContentInputStream inputStream = new ReadContentInputStream(file.getAbstractFile())) {
img = new Image(inputStream, MAX_ICON_SIZE, MAX_ICON_SIZE, true, true);
if (img.isError()) {
LOGGER.log(Level.WARNING, "problem loading image: {0}. {1}", new Object[]{file.getName(), img.getException().getLocalizedMessage()});
return fallbackToSwingImage(file);
} else {
imageSaver.execute(() -> {
saveIcon(file, img);
});
}
} catch (IOException ex) {
return fallbackToSwingImage(file);
}
return img;
}
/* Generate a scaled image */
private Image fallbackToSwingImage(final DrawableFile<?> file) {
final BufferedImage generateSwingIcon = generateSwingIcon(file);
if (generateSwingIcon != null) {
WritableImage toFXImage = SwingFXUtils.toFXImage(generateSwingIcon, null);
if (toFXImage != null) {
imageSaver.execute(() -> {
saveIcon(file, toFXImage);
});
}
return toFXImage;
} else {
return null;
}
}
private BufferedImage generateSwingIcon(DrawableFile<?> file) {
try (ReadContentInputStream inputStream = new ReadContentInputStream(file.getAbstractFile())) {
BufferedImage bi = ImageIO.read(inputStream);
if (bi == null) {
LOGGER.log(Level.WARNING, "No image reader for file: {0}", file.getName());
return null;
} else {
try {
if (Math.max(bi.getWidth(), bi.getHeight()) > MAX_ICON_SIZE) {
bi = ScalrWrapper.resizeFast(bi, iconSize.get());
}
} catch (IllegalArgumentException e) {
LOGGER.log(Level.WARNING, "scalr could not scale image to 0: {0}", file.getName());
} catch (OutOfMemoryError e) {
LOGGER.log(Level.WARNING, "scalr could not scale image (too large): {0}", file.getName());
return null;
}
}
return bi;
} catch (IOException ex) {
LOGGER.log(Level.WARNING, "Could not read image: " + file.getName(), ex);
return null;
}
}
private void saveIcon(final DrawableFile<?> file, final Image bi) {
try {
/* save the icon in a background thread. profiling
* showed that it can take as much time as making
* the icon? -bc
*
* We don't do this now as it doesn't fit the
* current model of ui-related backgroiund tasks,
* and there might be complications to not just
* blocking (eg having more than one task to
* create the same icon -jm */
File f = getCacheFile(file.getId());
ImageIO.write(SwingFXUtils.fromFXImage(bi, null), "png", f);
} catch (IOException ex) {
LOGGER.log(Level.WARNING, "failed to save generated icon ", ex);
}
}
}

View File

@ -19,38 +19,25 @@
package org.sleuthkit.autopsy.imageanalyzer;
import java.beans.PropertyChangeEvent;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.logging.Level;
import javafx.application.Platform;
import javafx.beans.Observable;
import javafx.beans.property.ReadOnlyBooleanProperty;
import javafx.beans.property.ReadOnlyBooleanWrapper;
import javafx.beans.property.ReadOnlyDoubleProperty;
import javafx.beans.property.ReadOnlyIntegerProperty;
import javafx.beans.property.ReadOnlyIntegerWrapper;
import javafx.beans.property.ReadOnlyObjectProperty;
import javafx.beans.property.ReadOnlyObjectWrapper;
import javafx.beans.property.SimpleDoubleProperty;
import javafx.beans.property.SimpleListProperty;
import javafx.beans.property.SimpleObjectProperty;
import javafx.beans.property.SimpleStringProperty;
import javafx.beans.value.ObservableValue;
import javafx.collections.FXCollections;
import javafx.concurrent.Task;
import javafx.concurrent.Worker;
import static javafx.concurrent.Worker.State.CANCELLED;
import static javafx.concurrent.Worker.State.FAILED;
import static javafx.concurrent.Worker.State.READY;
import static javafx.concurrent.Worker.State.RUNNING;
import static javafx.concurrent.Worker.State.SCHEDULED;
import static javafx.concurrent.Worker.State.SUCCEEDED;
import javafx.geometry.Insets;
import javafx.scene.Node;
import javafx.scene.control.ProgressIndicator;
@ -61,22 +48,18 @@ import javafx.scene.layout.Region;
import javafx.scene.layout.StackPane;
import javafx.scene.paint.Color;
import javax.annotation.concurrent.GuardedBy;
import javax.swing.SwingUtilities;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.concurrent.BasicThreadFactory;
import org.netbeans.api.progress.ProgressHandle;
import org.netbeans.api.progress.ProgressHandleFactory;
import org.openide.util.Exceptions;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.coreutils.History;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.imageanalyzer.datamodel.Category;
import org.sleuthkit.autopsy.imageanalyzer.datamodel.DrawableAttribute;
import org.sleuthkit.autopsy.imageanalyzer.datamodel.DrawableDB;
import org.sleuthkit.autopsy.imageanalyzer.datamodel.DrawableFile;
import org.sleuthkit.autopsy.imageanalyzer.grouping.GroupKey;
import org.sleuthkit.autopsy.imageanalyzer.grouping.GroupManager;
import org.sleuthkit.autopsy.imageanalyzer.grouping.GroupViewState;
import org.sleuthkit.autopsy.imageanalyzer.grouping.Grouping;
import org.sleuthkit.autopsy.imageanalyzer.gui.NoGroupsDialog;
import org.sleuthkit.autopsy.imageanalyzer.gui.SummaryTablePane;
import org.sleuthkit.autopsy.imageanalyzer.gui.Toolbar;
@ -84,6 +67,7 @@ import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
@ -92,7 +76,7 @@ import org.sleuthkit.datamodel.TskData;
* Connects different parts of ImageAnalyzer together and is hub for flow of
* control.
*/
public final class ImageAnalyzerController implements FileUpdateEvent.FileUpdateListener {
public final class ImageAnalyzerController {
private static final Logger LOGGER = Logger.getLogger(ImageAnalyzerController.class.getName());
@ -159,18 +143,6 @@ public final class ImageAnalyzerController implements FileUpdateEvent.FileUpdate
return historyManager.currentState();
}
/**
* the list of tasks queued to run in the uiBGTaskExecutor. By keeping this
* list we can cancel them more gracefully than by {@link ExecutorService#shutdownNow()
*/
@GuardedBy("bgTasks")
private final SimpleListProperty<Future<?>> bgTasks = new SimpleListProperty<>(FXCollections.observableArrayList());
/**
* an executor to submit async ui related background tasks to.
*/
final ExecutorService bgTaskExecutor = Executors.newSingleThreadExecutor(new BasicThreadFactory.Builder().namingPattern("ui task -%d").build());
public synchronized FileIDSelectionModel getSelectionModel() {
return selectionModel;
@ -213,7 +185,7 @@ public final class ImageAnalyzerController implements FileUpdateEvent.FileUpdate
listeningEnabled.addListener((ObservableValue<? extends Boolean> observable, Boolean oldValue, Boolean newValue) -> {
if (newValue && !oldValue && Case.existsCurrentCase() && ImageAnalyzerModule.isCaseStale(Case.getCurrentCase())) {
queueTask(new CopyAnalyzedFiles());
queueDBWorkerTask(new CopyAnalyzedFiles());
}
});
@ -245,41 +217,6 @@ public final class ImageAnalyzerController implements FileUpdateEvent.FileUpdate
// metaDataCollapsed.bind(Toolbar.getDefault().showMetaDataProperty());
}
/**
* submit a background {@link Task} to be queued for execution by the thread
* pool.
*
* @param task
*/
@SuppressWarnings("fallthrough")
public void submitBGTask(final Task<?> task) {
//listen to task state and remove task from list of tasks once it is 'done'
task.stateProperty().addListener((ObservableValue<? extends Worker.State> observableState, Worker.State oldState, Worker.State newState) -> {
switch (newState) {
case READY:
case SCHEDULED:
case RUNNING:
break;
case FAILED:
LOGGER.log(Level.WARNING, "task :" + task.getTitle() + " failed", task.getException());
case CANCELLED:
case SUCCEEDED:
Platform.runLater(() -> {
synchronized (bgTasks) {
bgTasks.remove(task);
}
});
break;
}
});
synchronized (bgTasks) {
bgTasks.add(task);
}
bgTaskExecutor.execute(task);
}
synchronized public ReadOnlyBooleanProperty getCanAdvance() {
return historyManager.getCanAdvance();
}
@ -378,14 +315,14 @@ public final class ImageAnalyzerController implements FileUpdateEvent.FileUpdate
}
/**
* initialize the controller for a specific case.
* onStart the controller for a specific case.
*
* @param c
*/
public synchronized void setCase(Case c) {
this.db = DrawableDB.getDrawableDB(c.getCaseDirectory(), this);
db.addUpdatedFileListener(this);
setListeningEnabled(ImageAnalyzerModule.isEnabledforCase(c));
setStale(ImageAnalyzerModule.isCaseStale(c));
@ -397,70 +334,6 @@ public final class ImageAnalyzerController implements FileUpdateEvent.FileUpdate
SummaryTablePane.getDefault().handleCategoryChanged(Collections.emptyList());
}
/**
* handle {@link FileUpdateEvent} sent from Db when files are
* inserted/updated
*
* @param evt
*/
@Override
synchronized public void handleFileUpdate(FileUpdateEvent evt) {
final Collection<Long> fileIDs = evt.getUpdatedFiles();
switch (evt.getUpdateType()) {
case FILE_REMOVED:
for (final long fileId : fileIDs) {
//get grouping(s) this file would be in
Set<GroupKey<?>> groupsForFile = groupManager.getGroupKeysForFileID(fileId);
for (GroupKey<?> gk : groupsForFile) {
groupManager.removeFromGroup(gk, fileId);
}
}
break;
case FILE_UPDATED:
/**
* TODO: is there a way to optimize this to avoid quering to db
* so much. the problem is that as a new files are analyzed they
* might be in new groups( if we are grouping by say make or
* model)
*
* TODO: Should this be a InnerTask so it can be done by the
* WorkerThread? Is it already done by worker thread because
* handlefileUpdate is invoked through call on db in UpdateTask
* innertask? -jm
*/
for (final long fileId : fileIDs) {
//get grouping(s) this file would be in
Set<GroupKey<?>> groupsForFile = groupManager.getGroupKeysForFileID(fileId);
for (GroupKey<?> gk : groupsForFile) {
Grouping g = groupManager.getGroupForKey(gk);
if (g != null) {
//if there is aleady a group that was previously deemed fully analyzed, then add this newly analyzed file to it.
g.addFile(fileId);
} else {
//if there wasn't already a group check if there should be one now
//TODO: use method in groupmanager ?
List<Long> checkAnalyzed = groupManager.checkAnalyzed(gk);
if (checkAnalyzed != null) { // => the group is analyzed, so add it to the ui
groupManager.populateAnalyzedGroup(gk, checkAnalyzed);
}
}
}
}
Category.fireChange(fileIDs);
if (evt.getChangedAttribute() == DrawableAttribute.TAGS) {
TagUtils.fireChange(fileIDs);
}
break;
}
}
/**
* reset the state of the controller (eg if the case is closed)
*/
@ -484,7 +357,7 @@ public final class ImageAnalyzerController implements FileUpdateEvent.FileUpdate
*
* @param innerTask
*/
final void queueTask(InnerTask innerTask) {
final void queueDBWorkerTask(InnerTask innerTask) {
// @@@ We could make a lock for the worker thread
if (dbWorkerThread == null) {
restartWorker();
@ -506,14 +379,147 @@ public final class ImageAnalyzerController implements FileUpdateEvent.FileUpdate
return queueSizeProperty.getReadOnlyProperty();
}
public ReadOnlyIntegerProperty bgTaskQueueSizeProperty() {
return bgTasks.sizeProperty();
public ReadOnlyDoubleProperty regroupProgress() {
return groupManager.regroupProgress();
}
/** invoked by {@link OnStart} to make sure that the ImageAnalyzer listeners
* get setup as early as possible, and do other setup stuff. */
void onStart() {
Platform.setImplicitExit(false);
LOGGER.info("setting up ImageAnalyzer listeners");
//TODO can we do anything usefull in an InjestJobEventListener?
//IngestManager.getInstance().addIngestJobEventListener((PropertyChangeEvent evt) -> {});
IngestManager.getInstance().addIngestModuleEventListener((PropertyChangeEvent evt) -> {
switch (IngestManager.IngestModuleEvent.valueOf(evt.getPropertyName())) {
case CONTENT_CHANGED:
//TODO: do we need to do anything here? -jm
case DATA_ADDED:
/* we could listen to DATA events and progressivly
* update files, and get data from DataSource ingest
* modules, but given that most modules don't post new
* artifacts in the events and we would have to query for
* them, without knowing which are the new ones, we just
* ignore these events for now. The relevant data should all
* be captured by file done event, anyways -jm */
break;
case FILE_DONE:
/** getOldValue has fileID
* getNewValue has {@link Abstractfile} */
AbstractFile file = (AbstractFile) evt.getNewValue();
if (isListeningEnabled()) {
if (ImageAnalyzerModule.isSupportedAndNotKnown(file)) {
//this file should be included and we don't already know about it from hash sets (NSRL)
queueDBWorkerTask(new UpdateFileTask(file));
} else if (ImageAnalyzerModule.getAllSupportedExtensions().contains(file.getNameExtension())) {
//doing this check results in fewer tasks queued up, and faster completion of db update
//this file would have gotten scooped up in initial grab, but actually we don't need it
queueDBWorkerTask(new RemoveFileTask(file));
}
} else { //TODO: keep track of what we missed for later
setStale(true);
}
break;
}
});
Case.addPropertyChangeListener((PropertyChangeEvent evt) -> {
switch (Case.Events.valueOf(evt.getPropertyName())) {
case CURRENT_CASE:
Case newCase = (Case) evt.getNewValue();
if (newCase != null) { // case has been opened
setCase(newCase); //connect db, groupmanager, start worker thread
} else { // case is closing
//close window, reset everything
SwingUtilities.invokeLater(ImageAnalyzerModule::closeTopComponent);
reset();
}
break;
case DATA_SOURCE_ADDED:
//copy all file data to drawable databse
Content newDataSource = (Content) evt.getNewValue();
if (isListeningEnabled()) {
queueDBWorkerTask(new PrePopulateDataSourceFiles(newDataSource.getId()));
} else {//TODO: keep track of what we missed for later
setStale(true);
}
break;
}
});
}
// @@@ REVIEW IF THIS SHOLD BE STATIC...
//TODO: concept seems like the controller deal with how much work to do at a given time
// @@@ review this class for synchronization issues (i.e. reset and cancel being called, add, etc.)
private class DBWorkerThread implements Runnable {
// true if the process was requested to stop. Currently no way to reset it
private volatile boolean cancelled = false;
// list of tasks to run
private final BlockingQueue<InnerTask> workQueue = new LinkedBlockingQueue<>();
/**
* Cancel all of the queued up tasks and the currently scheduled task.
* Note that after you cancel, you cannot submit new jobs to this
* thread.
*/
public void cancelAllTasks() {
cancelled = true;
for (InnerTask it : workQueue) {
it.cancel();
}
workQueue.clear();
queueSizeProperty.set(workQueue.size());
}
/**
* Add a task for the worker thread to perform
*
* @param it
*/
public void addTask(InnerTask it) {
workQueue.add(it);
Platform.runLater(() -> {
queueSizeProperty.set(workQueue.size());
});
}
@Override
public void run() {
// nearly infinite loop waiting for tasks
while (true) {
if (cancelled) {
return;
}
try {
// @@@ Could probably do something more fancy here and check if we've been canceled every now and then
InnerTask it = workQueue.take();
if (it.cancelled == false) {
it.run();
}
Platform.runLater(() -> {
queueSizeProperty.set(workQueue.size());
});
} catch (InterruptedException ex) {
Exceptions.printStackTrace(ex);
}
}
}
}
public SleuthkitCase getSleuthKitCase() throws IllegalStateException {
if (Case.isCaseOpen()) {
return Case.getCurrentCase().getSleuthkitCase();
} else {
throw new IllegalStateException("No Case is open!");
}
}
/**
*
* Abstract base class for task to be done on {@link DBWorkerThread}
*/
public static abstract class ProgressBase {
static private abstract class InnerTask implements Runnable {
public double getProgress() {
return progress.get();
@ -554,82 +560,8 @@ public final class ImageAnalyzerController implements FileUpdateEvent.FileUpdate
return new ReadOnlyObjectWrapper<>(state.get());
}
public ProgressBase() {
super();
protected InnerTask() {
}
}
// @@@ REVIEW IF THIS SHOLD BE STATIC...
//TODO: concept seems like the controller deal with how much work to do at a given time
// @@@ review this class for synchronization issues (i.e. reset and cancel being called, add, etc.)
private class DBWorkerThread implements Runnable {
// true if the process was requested to stop. Currently no way to reset it
private volatile boolean cancelled = false;
// list of tasks to run
private final BlockingQueue<InnerTask> workQueue = new LinkedBlockingQueue<>();
/**
* Cancel all of the queued up tasks and the currently scheduled task.
* Note that after you cancel, you cannot submit new jobs to this
* thread.
*/
public void cancelAllTasks() {
cancelled = true;
for (InnerTask it : workQueue) {
it.cancel();
}
workQueue.clear();
queueSizeProperty.set(workQueue.size());
}
/**
* Add a task for the worker thread to perform
*
* @param it
*/
public void addTask(InnerTask it) {
workQueue.add(it);
queueSizeProperty.set(workQueue.size());
}
@Override
public void run() {
// nearly infinite loop waiting for tasks
while (true) {
if (cancelled) {
return;
}
try {
// @@@ Could probably do something more fancy here and check if we've been canceled every now and then
InnerTask it = workQueue.take();
if (it.cancelled == false) {
it.run();
}
queueSizeProperty.set(workQueue.size());
} catch (InterruptedException ex) {
Exceptions.printStackTrace(ex);
}
}
}
}
public SleuthkitCase getSleuthKitCase() throws IllegalStateException {
if (Case.isCaseOpen()) {
return Case.getCurrentCase().getSleuthkitCase();
} else {
throw new IllegalStateException("No Case is open!");
}
}
/**
* Abstract base class for task to be done on {@link DBWorkerThread}
*/
static public abstract class InnerTask extends ProgressBase implements Runnable {
protected volatile boolean cancelled = false;
@ -642,27 +574,10 @@ public final class ImageAnalyzerController implements FileUpdateEvent.FileUpdate
}
}
/**
* Abstract base class for tasks associated with an obj id in the database
*/
static private abstract class TaskWithID extends InnerTask {
protected Long obj_id; // id of image or file
public TaskWithID(Long id) {
super();
this.obj_id = id;
}
public Long getId() {
return obj_id;
}
}
/**
* Abstract base class for tasks associated with a file in the database
*/
static private abstract class TaskWithFile extends InnerTask {
static private abstract class FileTask extends InnerTask {
private final AbstractFile file;
@ -670,19 +585,17 @@ public final class ImageAnalyzerController implements FileUpdateEvent.FileUpdate
return file;
}
public TaskWithFile(AbstractFile f) {
public FileTask(AbstractFile f) {
super();
this.file = f;
}
}
/**
* task that updates one file in database with results from ingest
*/
class UpdateFileTask extends TaskWithFile {
private class UpdateFileTask extends FileTask {
public UpdateFileTask(AbstractFile f) {
super(f);
@ -701,9 +614,9 @@ public final class ImageAnalyzerController implements FileUpdateEvent.FileUpdate
/**
* task that updates one file in database with results from ingest
*/
class RemoveFile extends TaskWithFile {
private class RemoveFileTask extends FileTask {
public RemoveFile(AbstractFile f) {
public RemoveFileTask(AbstractFile f) {
super(f);
}
@ -818,20 +731,23 @@ public final class ImageAnalyzerController implements FileUpdateEvent.FileUpdate
* TODO: create methods to simplify progress value/text updates to both
* netbeans and ImageAnalyzer progress/status
*/
class PrePopulateDataSourceFiles extends TaskWithID {
class PrePopulateDataSourceFiles extends InnerTask {
private final Long id; // id of image or file
/**
* here we grab by extension but in file_done listener we look at file
* type id attributes but fall back on jpeg signatures and extensions to
* check for supported images
*/
// (name like '.jpg' or name like '.png' ...)
final private String DRAWABLE_QUERY = "name LIKE '%." + StringUtils.join(ImageAnalyzerModule.getAllSupportedExtensions(), "' or name LIKE '%.") + "'";
private final String DRAWABLE_QUERY = "name LIKE '%." + StringUtils.join(ImageAnalyzerModule.getAllSupportedExtensions(), "' or name LIKE '%.") + "'";
private ProgressHandle progressHandle = ProgressHandleFactory.createHandle("prepopulating image/video database");
public PrePopulateDataSourceFiles(Long id) {
super(id);
super();
this.id = id;
}
/**
@ -847,7 +763,7 @@ public final class ImageAnalyzerController implements FileUpdateEvent.FileUpdate
* add/remove files */
final List<AbstractFile> files;
try {
files = getSleuthKitCase().findAllFilesWhere(DRAWABLE_QUERY + "and fs_obj_id = " + this.obj_id);
files = getSleuthKitCase().findAllFilesWhere(DRAWABLE_QUERY + "and fs_obj_id = " + this.id);
progressHandle.switchToDeterminate(files.size());
//do in transaction
@ -880,4 +796,5 @@ public final class ImageAnalyzerController implements FileUpdateEvent.FileUpdate
progressHandle.finish();
}
}
}

View File

@ -18,10 +18,7 @@
*/
package org.sleuthkit.autopsy.imageanalyzer;
import javafx.application.Platform;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.IngestManager;
/**
*
@ -34,21 +31,13 @@ public class OnStart implements Runnable {
static private final Logger LOGGER = Logger.getLogger(OnStart.class.getName());
/**
* make sure that the ImageAnalyzer listeners get setup as early as
* possible, and do other setup stuff.
*
* This method is invoked by virtue of the {@link OnStart} annotation on the
* {@link ImageAnalyzerModule} class
*/
@Override
public void run() {
Platform.setImplicitExit(false);
LOGGER.info("setting up ImageAnalyzer listeners");
IngestManager.getInstance().addIngestJobEventListener(AutopsyListener.getDefault().getIngestJobEventListener());
IngestManager.getInstance().addIngestModuleEventListener(AutopsyListener.getDefault().getIngestModuleEventListener());
Case.addPropertyChangeListener(AutopsyListener.getDefault().getCaseListener());
ImageAnalyzerController.getDefault().onStart();
}
}

View File

@ -0,0 +1,255 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2013 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.imageanalyzer;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import java.awt.image.BufferedImage;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.util.Optional;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import javafx.beans.property.SimpleIntegerProperty;
import javafx.embed.swing.SwingFXUtils;
import javafx.scene.image.Image;
import javafx.scene.image.WritableImage;
import javax.imageio.ImageIO;
import org.apache.commons.lang3.concurrent.BasicThreadFactory;
import org.openide.util.Exceptions;
import org.openide.util.Utilities;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.corelibs.ScalrWrapper;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.imageanalyzer.datamodel.DrawableFile;
import org.sleuthkit.autopsy.imageanalyzer.gui.Toolbar;
import org.sleuthkit.datamodel.ReadContentInputStream;
import org.sleuthkit.datamodel.TskCoreException;
/** Singleton to manage creation and access of icons. Keeps a cache in memory of
* most recently used icons, and a disk cache of all icons.
*
* TODO: this was only a singleton for convenience, convert this to
* non-singleton class -jm?
*/
public enum ThumbnailCache {
instance;
/** save thumbnails to disk as this format */
private static final String FORMAT = "png";
private static final int MAX_ICON_SIZE = 300;
private static final Logger LOGGER = Logger.getLogger(ThumbnailCache.class.getName());
/** in memory cache. keeps at most 1000 items each for up to 10 minutes.
* items may be garbage collected if there are no strong references to them.
*/
private final Cache<Long, Optional<Image>> cache = CacheBuilder.newBuilder()
.maximumSize(1000)
.softValues()
.expireAfterAccess(10, TimeUnit.MINUTES).build();
public static ThumbnailCache getDefault() {
return instance;
}
/** currently desired icon size. is bound in {@link Toolbar} */
public final SimpleIntegerProperty iconSize = new SimpleIntegerProperty(200);
/** thread that saves generated thumbnails to disk for use later */
private final Executor imageSaver = Executors.newSingleThreadExecutor(new BasicThreadFactory.Builder().namingPattern("icon saver-%d").build());
/** get the cached thumbnail for the given file or generate a new one if
* needed
*
* @param file
*
* @return a thumbnail for the given file, returns null if the thumbnail
* could not be generated
*/
public Image get(DrawableFile<?> file) {
try {
return cache.get(file.getId(), () -> load(file)).orElse(null);
} catch (CacheLoader.InvalidCacheLoadException | ExecutionException ex) {
LOGGER.log(Level.WARNING, "failed to load icon for file: " + file.getName(), ex);
return null;
}
}
public Image get(Long fileID) {
try {
return get(ImageAnalyzerController.getDefault().getFileFromId(fileID));
} catch (TskCoreException ex) {
Exceptions.printStackTrace(ex);
return null;
}
}
/**
* load a thumbnail from the disk based cache for the given file, or
* generate and save a new thumnbail if one doesn't already exist
*
* @param file the file to load a thumbnail of
*
* @return an optional containing a thumbnail, or null if a thumbnail
* couldn't be loaded or generated
*/
private Optional<Image> load(DrawableFile<?> file) {
Image thumbnail = null;
File cacheFile;
try {// try to load the thumbnail from disk
cacheFile = getCacheFile(file.getId());
if (cacheFile.exists()) {
// If a thumbnail file is already saved locally, load it
try {
int dim = iconSize.get();
thumbnail = new Image(Utilities.toURI(cacheFile).toURL().toString(), dim, dim, true, false, true);
} catch (MalformedURLException ex) {
Exceptions.printStackTrace(ex);
}
}
} catch (IllegalStateException e) {
LOGGER.log(Level.WARNING, "can't load icon when no case is open");
return Optional.empty();
}
if (thumbnail == null) { //if we failed to load the icon, try to generate it
thumbnail = generateAndSaveThumbnail(file);
}
return Optional.ofNullable(thumbnail); //return icon, or null if generation failed
}
private static File getCacheFile(long id) {
return new File(Case.getCurrentCase().getCacheDirectory() + File.separator + id + ".png");
}
/**
* generate a new thumbnail for the given file and save it to the disk cache
*
* @param file
*
* @return the newly generated thumbnail {@link Image}, or {@code null} if a
* thumbnail could not be generated
*/
private Image generateAndSaveThumbnail(final DrawableFile<?> file) {
//create a buffered input stream for the underlying Abstractfile
try (InputStream inputStream = new BufferedInputStream(new ReadContentInputStream(file.getAbstractFile()))) {
final Image thumbnail = new Image(inputStream, MAX_ICON_SIZE, MAX_ICON_SIZE, true, true);
if (thumbnail.isError()) { //if there was an error loading the image via JFX, fall back on Swing
LOGGER.log(Level.WARNING, "problem loading image: " + file.getName() + " .", thumbnail.getException());
return fallbackToSwingImage(file);
} else { //if the load went successfully, save the thumbnail to disk on a background thread
imageSaver.execute(() -> {
saveIcon(file, thumbnail);
});
return thumbnail;
}
} catch (IOException ex) {
//if the JX load throws an exception fall back to Swing
return fallbackToSwingImage(file);
}
}
/**
* use Swing to generate and save a thumbnail for the given file
*
* @param file
*
* @return a thumbnail generated for the given file, or {@code null} if a
* thumbnail could not be generated
*/
private Image fallbackToSwingImage(final DrawableFile<?> file) {
final BufferedImage generateSwingIcon = generateSwingThumbnail(file);
if (generateSwingIcon == null) { //if swing failed,
return null; //propagate failure up cal stack.
} else {//Swing load succeeded, convert to JFX Image
final WritableImage toFXImage = SwingFXUtils.toFXImage(generateSwingIcon, null);
if (toFXImage != null) { //if conversion succeeded save to disk cache
imageSaver.execute(() -> {
saveIcon(file, toFXImage);
});
}
return toFXImage; //could be null
}
}
/**
* use Swing/ImageIO to generate a thumbnail for the given file
*
* @param file
*
* @return a BufferedImage thumbail for the given file, or {@code null} if a
* thumbnail could not be generated
*/
private BufferedImage generateSwingThumbnail(DrawableFile<?> file) {
//create a buffered input stream for the underlying Abstractfile
try (InputStream inputStream = new BufferedInputStream(new ReadContentInputStream(file.getAbstractFile()))) {
BufferedImage bi = ImageIO.read(inputStream);
if (bi != null) {
try { // resize (shrink) the buffered image if needed
if (Math.max(bi.getWidth(), bi.getHeight()) > MAX_ICON_SIZE) {
bi = ScalrWrapper.resizeFast(bi, iconSize.get());
}
} catch (IllegalArgumentException e) {
//if scalr failed, just use unscaled image
LOGGER.log(Level.WARNING, "scalr could not scale image to 0: {0}", file.getName());
} catch (OutOfMemoryError e) {
LOGGER.log(Level.WARNING, "scalr could not scale image (too large): {0}", file.getName());
return null;
}
} else { //ImageIO failed to read the image
LOGGER.log(Level.WARNING, "No image reader for file: {0}", file.getName());
return null;
}
return bi;
} catch (IOException ex) {
LOGGER.log(Level.WARNING, "Could not read image: " + file.getName(), ex);
return null;
}
}
/**
* save the generated thumbnail to disk in the cache folder with
* the obj_id as the name.
*
* @param file the file the given image is a thumbnail for
* @param bi the thumbnail to save for the given DrawableFile
*/
private void saveIcon(final DrawableFile<?> file, final Image bi) {
try {
if (bi != null) {
File f = getCacheFile(file.getId());
//convert back to swing to save
ImageIO.write(SwingFXUtils.fromFXImage(bi, null), FORMAT, f);
}
} catch (IllegalArgumentException | IOException ex) {
LOGGER.log(Level.WARNING, "failed to save generated icon ", ex);
}
}
}

View File

@ -93,9 +93,8 @@ public class AddDrawableTagAction extends AddTagAction {
}
//make sure rest of ui hears category change.
ImageAnalyzerController.getDefault().handleFileUpdate(new FileUpdateEvent(Collections.singleton(fileID), DrawableAttribute.TAGS));
ImageAnalyzerController.getDefault().getGroupManager().handleFileUpdate(new FileUpdateEvent(Collections.singleton(fileID), DrawableAttribute.TAGS));
}
refreshDirectoryTree();

View File

@ -104,7 +104,7 @@ public class CategorizeAction extends AddTagAction {
Case.getCurrentCase().getServices().getTagsManager().addContentTag(file, tagName, comment);
}
//make sure rest of ui hears category change.
controller.handleFileUpdate(new FileUpdateEvent(Collections.singleton(fileID), DrawableAttribute.CATEGORY));
controller.getGroupManager().handleFileUpdate(new FileUpdateEvent(Collections.singleton(fileID), DrawableAttribute.CATEGORY));
} catch (TskCoreException ex) {
LOGGER.log(Level.SEVERE, "Error categorizing result", ex);

View File

@ -55,7 +55,7 @@ import org.sleuthkit.datamodel.TskCoreException;
/**
* @TODO: There is something I don't understand or have done wrong about
* implementing this class,as it is unreadable by
* {@link ReadContentInputStream}. As a work around I kept a reference to the
* {@link ReadContentInputStream}. As a work around we keep a reference to the
* original {@link AbstractFile} to use when reading the image. -jm
*/
public abstract class DrawableFile<T extends AbstractFile> extends AbstractFile {
@ -80,8 +80,6 @@ public abstract class DrawableFile<T extends AbstractFile> extends AbstractFile
private String drawablePath;
abstract public boolean isVideo();
protected T file;
private final SimpleBooleanProperty analyzed;
@ -90,11 +88,6 @@ public abstract class DrawableFile<T extends AbstractFile> extends AbstractFile
private Collection<String> hashHitSetNames;
public Collection<String> getHashHitSetNames() {
updateHashSets();
return hashHitSetNames;
}
private String make;
private String model;
@ -102,7 +95,7 @@ public abstract class DrawableFile<T extends AbstractFile> extends AbstractFile
protected DrawableFile(T file, Boolean analyzed) {
/* @TODO: the two 'new Integer(0).shortValue()' values and null are
* placeholders because the super constructor expects values i can't get
* easily at the moment */
* easily at the moment. I assume this is related to why ReadContentInputStream can't read from DrawableFiles.*/
super(file.getSleuthkitCase(), file.getId(), file.getAttrType(), file.getAttrId(), file.getName(), file.getType(), file.getMetaAddr(), (int) file.getMetaSeq(), file.getDirType(), file.getMetaType(), null, new Integer(0).shortValue(), file.getSize(), file.getCtime(), file.getCrtime(), file.getAtime(), file.getMtime(), new Integer(0).shortValue(), file.getUid(), file.getGid(), file.getMd5Hash(), file.getKnown(), file.getParentPath());
this.analyzed = new SimpleBooleanProperty(analyzed);
@ -110,6 +103,13 @@ public abstract class DrawableFile<T extends AbstractFile> extends AbstractFile
}
public abstract boolean isVideo();
public Collection<String> getHashHitSetNames() {
updateHashSets();
return hashHitSetNames;
}
@SuppressWarnings("unchecked")
private void updateHashSets() {
hashHitSetNames = (Collection<String>) getValuesOfBBAttribute(BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME);
@ -178,7 +178,7 @@ public abstract class DrawableFile<T extends AbstractFile> extends AbstractFile
}
@Deprecated
final protected List<? extends Object> getValuesOfBBAttribute(BlackboardArtifact.ARTIFACT_TYPE artType, BlackboardAttribute.ATTRIBUTE_TYPE attrType) {
protected final List<? extends Object> getValuesOfBBAttribute(BlackboardArtifact.ARTIFACT_TYPE artType, BlackboardAttribute.ATTRIBUTE_TYPE attrType) {
ArrayList<Object> vals = new ArrayList<>();
try {
//why doesn't file.getArtifacts() work?
@ -285,7 +285,7 @@ public abstract class DrawableFile<T extends AbstractFile> extends AbstractFile
}
}
public abstract Image getIcon();
public abstract Image getThumbnail();
public void setAnalyzed(Boolean analyzed) {
this.analyzed.set(analyzed);
@ -309,7 +309,6 @@ public abstract class DrawableFile<T extends AbstractFile> extends AbstractFile
} else {
try {
drawablePath = StringUtils.removeEnd(getUniquePath(), getName());
// drawablePath = StringUtils.replaceEachRepeatedly(drawablePath, DOUBLE_SLASH, SLASH);
return drawablePath;
} catch (TskCoreException ex) {
Logger.getLogger(DrawableFile.class.getName()).log(Level.WARNING, "failed to get drawablePath from {0}", getName());
@ -317,17 +316,4 @@ public abstract class DrawableFile<T extends AbstractFile> extends AbstractFile
}
}
}
private long getRootID() throws TskCoreException {
Content myParent = getParent();
long id = -1;
while (myParent != null) {
id = myParent.getId();
myParent = myParent.getParent();
}
return id;
}
}

View File

@ -26,7 +26,7 @@ import javafx.embed.swing.SwingFXUtils;
import javafx.scene.image.Image;
import javax.imageio.ImageIO;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.imageanalyzer.IconCache;
import org.sleuthkit.autopsy.imageanalyzer.ThumbnailCache;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.ReadContentInputStream;
@ -47,8 +47,8 @@ public class ImageFile<T extends AbstractFile> extends DrawableFile<T> {
}
@Override
public Image getIcon() {
return IconCache.getDefault().get(this);
public Image getThumbnail() {
return ThumbnailCache.getDefault().get(this);
}

View File

@ -38,7 +38,7 @@ public class VideoFile<T extends AbstractFile> extends DrawableFile<T> {
}
@Override
public Image getIcon() {
public Image getThumbnail() {
//TODO: implement video thumbnailing here?
return VIDEO_ICON;
}

View File

@ -29,16 +29,16 @@ import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.TskCoreException;
/**
* Represents a set of files in a group. The UI listens to changes to the group
* and updates itself accordingly.
*
* This class is named Grouping and not Group to avoid confusion with
* {@link javafx.scene.Group} and others.
* Represents a set of image/video files in a group. The UI listens to changes
* to the group membership and updates itself accordingly.
*/
public class Grouping {
public class DrawableGroup {
private static final Logger LOGGER = Logger.getLogger(Grouping.class.getName());
private static final Logger LOGGER = Logger.getLogger(DrawableGroup.class.getName());
/**
* the string to use when the groupkey is 'empty'
*/
public static final String UNKNOWN = "unknown";
private final ObservableList<Long> fileIDs = FXCollections.observableArrayList();
@ -52,7 +52,7 @@ public class Grouping {
final public GroupKey<?> groupKey;
public Grouping(GroupKey<?> groupKey, List<Long> filesInGroup) {
DrawableGroup(GroupKey<?> groupKey, List<Long> filesInGroup) {
this.groupKey = groupKey;
fileIDs.setAll(filesInGroup);
}
@ -66,7 +66,7 @@ public class Grouping {
}
synchronized public int getFilesWithHashSetHitsCount() {
//TODO: use the drawable db for this ? -jm
if (filesWithHashSetHitsCount < 0) {
filesWithHashSetHitsCount = 0;
for (Long fileID : fileIds()) {
@ -105,7 +105,7 @@ public class Grouping {
return false;
}
return Objects.equals(this.groupKey,
((Grouping) obj).groupKey);
((DrawableGroup) obj).groupKey);
}
synchronized public void addFile(Long f) {

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2013 Basis Technology Corp.
* Copyright 2013-4 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -22,6 +22,7 @@ import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
@ -29,13 +30,18 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.logging.Level;
import java.util.stream.Collectors;
import javafx.application.Platform;
import javafx.beans.property.ReadOnlyDoubleProperty;
import javafx.beans.property.ReadOnlyDoubleWrapper;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javax.swing.SortOrder;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.concurrent.BasicThreadFactory;
import org.netbeans.api.progress.ProgressHandle;
import org.netbeans.api.progress.ProgressHandleFactory;
import org.openide.util.Exceptions;
@ -44,8 +50,10 @@ import org.sleuthkit.autopsy.coreutils.LoggedTask;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.ThreadConfined;
import org.sleuthkit.autopsy.coreutils.ThreadConfined.ThreadType;
import org.sleuthkit.autopsy.imageanalyzer.FileUpdateEvent;
import org.sleuthkit.autopsy.imageanalyzer.ImageAnalyzerController;
import org.sleuthkit.autopsy.imageanalyzer.ImageAnalyzerModule;
import org.sleuthkit.autopsy.imageanalyzer.TagUtils;
import org.sleuthkit.autopsy.imageanalyzer.datamodel.Category;
import org.sleuthkit.autopsy.imageanalyzer.datamodel.DrawableAttribute;
import org.sleuthkit.autopsy.imageanalyzer.datamodel.DrawableDB;
@ -61,9 +69,9 @@ import org.sleuthkit.datamodel.TskCoreException;
/**
* Provides an abstraction layer on top of {@link DrawableDB} ( and to some
* extent {@link SleuthkitCase} ) to facilitate creation, retrieval, updating,
* and sorting of {@link Grouping}s.
* and sorting of {@link DrawableGroup}s.
*/
public class GroupManager {
public class GroupManager implements FileUpdateEvent.FileUpdateListener {
private static final Logger LOGGER = Logger.getLogger(GroupManager.class.getName());
@ -72,27 +80,27 @@ public class GroupManager {
private final ImageAnalyzerController controller;
/**
* map from {@link GroupKey}s to {@link Grouping}s. All groups (even not
* map from {@link GroupKey}s to {@link DrawableGroup}s. All groups (even
* not
* fully analyzed or not visible groups could be in this map
*/
private final Map<GroupKey<?>, Grouping> groupMap = new HashMap<>();
private final Map<GroupKey<?>, DrawableGroup> groupMap = new HashMap<>();
/**
* list of all analyzed groups
*/
@ThreadConfined(type = ThreadType.JFX)
private final ObservableList<Grouping> analyzedGroups = FXCollections.observableArrayList();
private final ObservableList<DrawableGroup> analyzedGroups = FXCollections.observableArrayList();
private final ObservableList<Grouping> publicAnalyzedGroupsWrapper = FXCollections.unmodifiableObservableList(analyzedGroups);
private final ObservableList<DrawableGroup> publicAnalyzedGroupsWrapper = FXCollections.unmodifiableObservableList(analyzedGroups);
/**
* list of unseen groups
*/
@ThreadConfined(type = ThreadType.JFX)
private final ObservableList<Grouping> unSeenGroups = FXCollections.observableArrayList();
private final ObservableList<DrawableGroup> unSeenGroups = FXCollections.observableArrayList();
// private final SortedList<Grouping> sortedUnSeenGroups = new SortedList<>(unSeenGroups);
private final ObservableList<Grouping> publicSortedUnseenGroupsWrapper = FXCollections.unmodifiableObservableList(unSeenGroups);
private final ObservableList<DrawableGroup> publicSortedUnseenGroupsWrapper = FXCollections.unmodifiableObservableList(unSeenGroups);
private ReGroupTask<?> groupByTask;
@ -102,18 +110,20 @@ public class GroupManager {
private volatile DrawableAttribute<?> groupBy = DrawableAttribute.PATH;
private volatile SortOrder sortOrder = SortOrder.ASCENDING;
private ReadOnlyDoubleWrapper regroupProgress = new ReadOnlyDoubleWrapper();
public void setDB(DrawableDB db) {
this.db = db;
db.addUpdatedFileListener(this);
regroup(groupBy, sortBy, sortOrder, Boolean.TRUE);
}
public ObservableList<Grouping> getAnalyzedGroups() {
public ObservableList<DrawableGroup> getAnalyzedGroups() {
return publicAnalyzedGroupsWrapper;
}
@ThreadConfined(type = ThreadType.JFX)
public ObservableList<Grouping> getUnSeenGroups() {
public ObservableList<DrawableGroup> getUnSeenGroups() {
return publicSortedUnseenGroupsWrapper;
}
@ -125,6 +135,7 @@ public class GroupManager {
*/
public GroupManager(ImageAnalyzerController controller) {
this.controller = controller;
}
/**
@ -158,7 +169,7 @@ public class GroupManager {
*
*
* @return a a set of {@link GroupKey}s representing the group(s) the given
* file is a part of
* file is a part of
*/
synchronized public Set<GroupKey<?>> getGroupKeysForFileID(Long fileID) {
try {
@ -173,10 +184,11 @@ public class GroupManager {
/**
* @param groupKey
*
* @return return the Grouping (if it exists) for the given GroupKey, or
* null if no group exists for that key.
* @return return the DrawableGroup (if it exists) for the given GroupKey,
* or
* null if no group exists for that key.
*/
public Grouping getGroupForKey(GroupKey<?> groupKey) {
public DrawableGroup getGroupForKey(GroupKey<?> groupKey) {
synchronized (groupMap) {
return groupMap.get(groupKey);
}
@ -222,28 +234,34 @@ public class GroupManager {
}
/**
* make and return a group with the given key and files. If a group already
* existed for that key, it will be replaced.
* make and return a new group with the given key and files. If a group
* already existed for that key, it will be replaced.
*
* @param groupKey
* @param files
* NOTE: this is the only API for making a new group.
*
* @return
* @param groupKey the groupKey that uniquely identifies this group
* @param files a list of fileids that are members of this group
*
* TODO: check if a group already exists for that key and ... (do what?add
* files to it?) -jm
* @return the new DrawableGroup for the given key
*/
public Grouping makeGroup(GroupKey<?> groupKey, List<Long> files) {
public DrawableGroup makeGroup(GroupKey<?> groupKey, List<Long> files) {
List<Long> newFiles = files == null ? new ArrayList<>() : files;
Grouping g = new Grouping(groupKey, newFiles);
DrawableGroup g = new DrawableGroup(groupKey, newFiles);
synchronized (groupMap) {
groupMap.put(groupKey, g);
}
return g;
}
public void markGroupSeen(Grouping group) {
/**
* 'mark' the given group as seen. This removes it from the queue of groups
* to review, and is persisted in the drawable db.
*
*
* @param group the {@link DrawableGroup} to mark as seen
*/
public void markGroupSeen(DrawableGroup group) {
synchronized (unSeenGroups) {
unSeenGroups.remove(group);
}
@ -256,11 +274,11 @@ public class GroupManager {
* no-op
*
* @param groupKey the value of groupKey
* @param fileID the value of file
* @param fileID the value of file
*/
public synchronized void removeFromGroup(GroupKey<?> groupKey, final Long fileID) {
//get grouping this file would be in
final Grouping group = getGroupForKey(groupKey);
final DrawableGroup group = getGroupForKey(groupKey);
if (group != null) {
group.removeFile(fileID);
if (group.fileIds().isEmpty()) {
@ -298,7 +316,7 @@ public class GroupManager {
* user picked a different group by attribute, while the current task
* was still running) */
if (task == null || (task.isCancelled() == false)) {
Grouping g = makeGroup(groupKey, filesInGroup);
DrawableGroup g = makeGroup(groupKey, filesInGroup);
final boolean groupSeen = db.isGroupSeen(groupKey);
Platform.runLater(() -> {
@ -321,7 +339,7 @@ public class GroupManager {
* @param groupKey
*
* @return null if this group is not analyzed or a list of file ids in this
* group if they are all analyzed
* group if they are all analyzed
*/
public List<Long> checkAnalyzed(final GroupKey<?> groupKey) {
try {
@ -551,7 +569,7 @@ public class GroupManager {
* @param groupBy
* @param sortBy
* @param sortOrder
* @param force true to force a full db query regroup
* @param force true to force a full db query regroup
*/
public <A extends Comparable<A>> void regroup(final DrawableAttribute<A> groupBy, final GroupSortBy sortBy, final SortOrder sortOrder, Boolean force) {
//only re-query the db if the group by attribute changed or it is forced
@ -567,7 +585,10 @@ public class GroupManager {
});
groupByTask = new ReGroupTask<A>(groupBy, sortBy, sortOrder);
controller.submitBGTask(groupByTask);
Platform.runLater(() -> {
regroupProgress.bind(groupByTask.progressProperty());
});
regroupExecutor.submit(groupByTask);
} else {
// just resort the list of groups
setSortBy(sortBy);
@ -578,6 +599,83 @@ public class GroupManager {
}
}
/**
* an executor to submit async ui related background tasks to.
*/
final ExecutorService regroupExecutor = Executors.newSingleThreadExecutor(new BasicThreadFactory.Builder().namingPattern("ui task -%d").build());
public ReadOnlyDoubleProperty regroupProgress() {
return regroupProgress.getReadOnlyProperty();
}
/**
* handle {@link FileUpdateEvent} sent from Db when files are
* inserted/updated
*
* @param evt
*/
@Override
synchronized public void handleFileUpdate(FileUpdateEvent evt) {
final Collection<Long> fileIDs = evt.getUpdatedFiles();
switch (evt.getUpdateType()) {
case FILE_REMOVED:
for (final long fileId : fileIDs) {
//get grouping(s) this file would be in
Set<GroupKey<?>> groupsForFile = getGroupKeysForFileID(fileId);
for (GroupKey<?> gk : groupsForFile) {
removeFromGroup(gk, fileId);
}
}
break;
case FILE_UPDATED:
/**
* TODO: is there a way to optimize this to avoid quering to db
* so much. the problem is that as a new files are analyzed they
* might be in new groups( if we are grouping by say make or
* model)
*
* TODO: Should this be a InnerTask so it can be done by the
* WorkerThread? Is it already done by worker thread because
* handlefileUpdate is invoked through call on db in UpdateTask
* innertask? -jm
*/
for (final long fileId : fileIDs) {
//get grouping(s) this file would be in
Set<GroupKey<?>> groupsForFile = getGroupKeysForFileID(fileId);
for (GroupKey<?> gk : groupsForFile) {
DrawableGroup g = getGroupForKey(gk);
if (g != null) {
//if there is aleady a group that was previously deemed fully analyzed, then add this newly analyzed file to it.
g.addFile(fileId);
} else {
//if there wasn't already a group check if there should be one now
//TODO: use method in groupmanager ?
List<Long> checkAnalyzed = checkAnalyzed(gk);
if (checkAnalyzed != null) { // => the group is analyzed, so add it to the ui
populateAnalyzedGroup(gk, checkAnalyzed);
}
}
}
}
Category.fireChange(fileIDs);
if (evt.getChangedAttribute() == DrawableAttribute.TAGS) {
TagUtils.fireChange(fileIDs);
}
break;
}
}
/**
* Task to query database for files in sorted groups and build
* {@link Groupings} for them

View File

@ -43,15 +43,15 @@ public enum GroupSortBy implements ComparatorProvider {
*/
FILE_COUNT("Group Size", true, "folder-open-image.png") {
@Override
public Comparator<Grouping> getGrpComparator(final SortOrder sortOrder) {
return applySortOrder(sortOrder, Comparator.comparingInt(Grouping::getSize));
public Comparator<DrawableGroup> getGrpComparator(final SortOrder sortOrder) {
return applySortOrder(sortOrder, Comparator.comparingInt(DrawableGroup::getSize));
}
@Override
public <A extends Comparable<A>> Comparator<A> getValueComparator(final DrawableAttribute<A> attr, final SortOrder sortOrder) {
return (A v1, A v2) -> {
Grouping g1 = ImageAnalyzerController.getDefault().getGroupManager().getGroupForKey(new GroupKey<A>(attr, v1));
Grouping g2 = ImageAnalyzerController.getDefault().getGroupManager().getGroupForKey(new GroupKey<A>(attr, v2));
DrawableGroup g1 = ImageAnalyzerController.getDefault().getGroupManager().getGroupForKey(new GroupKey<A>(attr, v1));
DrawableGroup g2 = ImageAnalyzerController.getDefault().getGroupManager().getGroupForKey(new GroupKey<A>(attr, v2));
return getGrpComparator(sortOrder).compare(g1, g2);
};
}
@ -62,7 +62,7 @@ public enum GroupSortBy implements ComparatorProvider {
*/
GROUP_BY_VALUE("Group Name", true, "folder-rename.png") {
@Override
public Comparator<Grouping> getGrpComparator(final SortOrder sortOrder) {
public Comparator<DrawableGroup> getGrpComparator(final SortOrder sortOrder) {
return applySortOrder(sortOrder, Comparator.comparing(t -> t.groupKey.getValueDisplayName()));
}
@ -81,7 +81,7 @@ public enum GroupSortBy implements ComparatorProvider {
*/
NONE("None", false, "prohibition.png") {
@Override
public Comparator<Grouping> getGrpComparator(SortOrder sortOrder) {
public Comparator<DrawableGroup> getGrpComparator(SortOrder sortOrder) {
return new NoOpComparator<>();
}
@ -95,15 +95,15 @@ public enum GroupSortBy implements ComparatorProvider {
*/
PRIORITY("Priority", false, "hashset_hits.png") {
@Override
public Comparator<Grouping> getGrpComparator(SortOrder sortOrder) {
return Comparator.nullsLast(Comparator.comparingDouble(Grouping::getHashHitDensity).thenComparingInt(Grouping::getSize).reversed());
public Comparator<DrawableGroup> getGrpComparator(SortOrder sortOrder) {
return Comparator.nullsLast(Comparator.comparingDouble(DrawableGroup::getHashHitDensity).thenComparingInt(DrawableGroup::getSize).reversed());
}
@Override
public <A extends Comparable<A>> Comparator<A> getValueComparator(DrawableAttribute<A> attr, SortOrder sortOrder) {
return (A v1, A v2) -> {
Grouping g1 = ImageAnalyzerController.getDefault().getGroupManager().getGroupForKey(new GroupKey<A>(attr, v1));
Grouping g2 = ImageAnalyzerController.getDefault().getGroupManager().getGroupForKey(new GroupKey<A>(attr, v2));
DrawableGroup g1 = ImageAnalyzerController.getDefault().getGroupManager().getGroupForKey(new GroupKey<A>(attr, v1));
DrawableGroup g2 = ImageAnalyzerController.getDefault().getGroupManager().getGroupForKey(new GroupKey<A>(attr, v2));
return getGrpComparator(sortOrder).compare(g1, g2);
};
@ -181,5 +181,5 @@ interface ComparatorProvider {
<A extends Comparable<A>> Comparator<A> getValueComparator(DrawableAttribute<A> attr, SortOrder sortOrder);
Comparator<Grouping> getGrpComparator(SortOrder sortOrder);
Comparator<DrawableGroup> getGrpComparator(SortOrder sortOrder);
}

View File

@ -26,13 +26,13 @@ import java.util.Optional;
*/
public class GroupViewState {
private final Grouping group;
private final DrawableGroup group;
private final GroupViewMode mode;
private final Optional<Long> slideShowfileID;
public Grouping getGroup() {
public DrawableGroup getGroup() {
return group;
}
@ -44,17 +44,17 @@ public class GroupViewState {
return slideShowfileID;
}
private GroupViewState(Grouping g, GroupViewMode mode, Long slideShowfileID) {
private GroupViewState(DrawableGroup g, GroupViewMode mode, Long slideShowfileID) {
this.group = g;
this.mode = mode;
this.slideShowfileID = Optional.ofNullable(slideShowfileID);
}
public static GroupViewState tile(Grouping g) {
public static GroupViewState tile(DrawableGroup g) {
return new GroupViewState(g, GroupViewMode.TILE, null);
}
public static GroupViewState slideShow(Grouping g, Long fileID) {
public static GroupViewState slideShow(DrawableGroup g, Long fileID) {
return new GroupViewState(g, GroupViewMode.SLIDE_SHOW, fileID);
}

View File

@ -106,7 +106,7 @@ public class DrawableTile extends SingleDrawableViewBase implements Category.Cat
@Override
protected Runnable getContentUpdateRunnable() {
Image image = file.getIcon();
Image image = file.getThumbnail();
return () -> {
imageView.setImage(image);

View File

@ -114,12 +114,12 @@ import org.sleuthkit.autopsy.imageanalyzer.datamodel.Category;
import org.sleuthkit.autopsy.imageanalyzer.datamodel.DrawableAttribute;
import org.sleuthkit.autopsy.imageanalyzer.grouping.GroupViewMode;
import org.sleuthkit.autopsy.imageanalyzer.grouping.GroupViewState;
import org.sleuthkit.autopsy.imageanalyzer.grouping.Grouping;
import org.sleuthkit.autopsy.imageanalyzer.grouping.DrawableGroup;
import org.sleuthkit.datamodel.TagName;
import org.sleuthkit.datamodel.TskCoreException;
/**
* A GroupPane displays the contents of a {@link Grouping}. It support both a
* A GroupPane displays the contents of a {@link DrawableGroup}. It support both a
* {@link TilePane} based view and a {@link SlideShowView} view by swapping out
* its internal components.
*
@ -204,7 +204,7 @@ public class GroupPane extends BorderPane implements GroupView {
/**
* the grouping this pane is currently the view for
*/
private final ReadOnlyObjectWrapper<Grouping> grouping = new ReadOnlyObjectWrapper<>();
private final ReadOnlyObjectWrapper<DrawableGroup> grouping = new ReadOnlyObjectWrapper<>();
/**
* map from fileIDs to their assigned cells in the tile view. This is used
@ -252,7 +252,7 @@ public class GroupPane extends BorderPane implements GroupView {
this.scrollToFileID(globalSelectionModel.lastSelectedProperty().get());
}
public Grouping getGrouping() {
public DrawableGroup getGrouping() {
return grouping.get();
}
@ -268,7 +268,7 @@ public class GroupPane extends BorderPane implements GroupView {
} else {
groupName = grouping.get().groupKey.getValue().toString();
}
return StringUtils.defaultIfBlank(groupName, Grouping.UNKNOWN) + " -- " + hashHitCount + " hash set hits / " + size + " files";
return StringUtils.defaultIfBlank(groupName, DrawableGroup.UNKNOWN) + " -- " + hashHitCount + " hash set hits / " + size + " files";
}
private MenuItem createGrpCatMenuItem(final Category cat) {
@ -322,7 +322,7 @@ public class GroupPane extends BorderPane implements GroupView {
} else {
groupName = grouping.get().groupKey.getValue().toString();
}
final String headerString = StringUtils.defaultIfBlank(groupName, Grouping.UNKNOWN) + " -- " + hashHitCount + " hash set hits / " + size + " files";
final String headerString = StringUtils.defaultIfBlank(groupName, DrawableGroup.UNKNOWN) + " -- " + hashHitCount + " hash set hits / " + size + " files";
Platform.runLater(() -> {
groupLabel.setText(headerString);
});
@ -333,7 +333,7 @@ public class GroupPane extends BorderPane implements GroupView {
return contextMenu;
}
ReadOnlyObjectProperty<Grouping> grouping() {
ReadOnlyObjectProperty<DrawableGroup> grouping() {
return grouping.getReadOnlyProperty();
}

View File

@ -220,7 +220,7 @@ public class MetaDataPane extends AnchorPane implements Category.CategoryListene
}
public void updateUI() {
final Image icon = getFile().getIcon();
final Image icon = getFile().getThumbnail();
final ObservableList<Pair<DrawableAttribute<?>, ? extends Object>> attributesList = getFile().getAttributesList();
Platform.runLater(() -> {

View File

@ -280,7 +280,7 @@ public abstract class SingleDrawableViewBase extends AnchorPane implements Drawa
}
}
controller.handleFileUpdate(new FileUpdateEvent(Collections.singleton(fileID), DrawableAttribute.TAGS));
controller.getGroupManager().handleFileUpdate(new FileUpdateEvent(Collections.singleton(fileID), DrawableAttribute.TAGS));
} catch (TskCoreException ex) {
Exceptions.printStackTrace(ex);
}

View File

@ -270,7 +270,7 @@ public class SlideShowView extends SingleDrawableViewBase implements TagUtils.Ta
if (fileID != null) {
int index = groupPane.getGrouping().fileIds().indexOf(fileID);
final int size = groupPane.getGrouping().fileIds().size();
index += d;
index = (index + d) % size;
if (index < 0) {
index += size;
}
@ -283,7 +283,7 @@ public class SlideShowView extends SingleDrawableViewBase implements TagUtils.Ta
/**
* @return supplemental text to include in the label, specifically: "image x
* of y"
* of y"
*/
private String getSupplementalText() {
return " ( " + (groupPane.getGrouping().fileIds().indexOf(fileID) + 1) + " of " + groupPane.getGrouping().fileIds().size() + " in group )";

View File

@ -15,13 +15,22 @@
<StackPane maxHeight="-Infinity" maxWidth="-Infinity" minHeight="-Infinity" minWidth="-Infinity" prefHeight="-1.0" prefWidth="-1.0" HBox.hgrow="NEVER">
<children>
<ProgressBar id="progBar" fx:id="fileTaskProgresBar" focusTraversable="false" maxHeight="-1.0" maxWidth="1.7976931348623157E308" minHeight="-Infinity" minWidth="-1.0" prefHeight="24.0" prefWidth="-1.0" progress="0.0" visible="true" />
<Label id="fileUpdateLabel" fx:id="fileUpdateTaskLabel" alignment="CENTER" contentDisplay="CENTER" graphicTextGap="0.0" labelFor="$fileTaskProgresBar" maxHeight="-Infinity" maxWidth="-Infinity" minHeight="-Infinity" minWidth="-Infinity" prefWidth="-1.0" text="0 File Update Tasks" StackPane.alignment="CENTER" />
<Label id="fileUpdateLabel" fx:id="fileUpdateTaskLabel" alignment="CENTER" contentDisplay="CENTER" graphicTextGap="0.0" labelFor="$fileTaskProgresBar" maxHeight="-Infinity" maxWidth="-Infinity" minHeight="-Infinity" minWidth="-Infinity" prefWidth="-1.0" text="0 File Update Tasks" StackPane.alignment="CENTER">
<StackPane.margin>
<Insets left="3.0" right="3.0" />
</StackPane.margin></Label>
</children>
<HBox.margin>
<Insets />
</HBox.margin>
</StackPane>
<StackPane maxHeight="-Infinity" maxWidth="-Infinity" minHeight="-Infinity" minWidth="-Infinity" prefHeight="-1.0" prefWidth="-1.0" HBox.hgrow="NEVER">
<children>
<ProgressBar fx:id="bgTaskProgressBar" maxHeight="-1.0" maxWidth="-1.0" minHeight="-Infinity" minWidth="-1.0" prefHeight="24.0" prefWidth="-1.0" progress="0.0" StackPane.alignment="CENTER" />
<Label fx:id="bgTaskLabel" alignment="CENTER" cache="false" contentDisplay="CENTER" disable="false" focusTraversable="false" labelFor="$uiTaskProgressBar" maxHeight="-Infinity" maxWidth="-Infinity" minHeight="-Infinity" minWidth="-Infinity" text="0 BG Tasks" StackPane.alignment="CENTER" />
<Label fx:id="bgTaskLabel" alignment="CENTER" cache="false" contentDisplay="CENTER" disable="false" focusTraversable="false" labelFor="$uiTaskProgressBar" maxHeight="-Infinity" maxWidth="-Infinity" minHeight="-Infinity" minWidth="-Infinity" text="Regrouping" StackPane.alignment="CENTER">
<StackPane.margin>
<Insets left="3.0" right="3.0" />
</StackPane.margin></Label>
</children>
<HBox.margin>
<Insets right="5.0" />

View File

@ -69,19 +69,16 @@ public class StatusBar extends AnchorPane {
assert bgTaskLabel != null : "fx:id=\"uiTaskLabel\" was not injected: check your FXML file 'StatusBar.fxml'.";
assert bgTaskProgressBar != null : "fx:id=\"uiTaskProgressBar\" was not injected: check your FXML file 'StatusBar.fxml'.";
controller.getFileUpdateQueueSizeProperty().addListener((ov, oldSize, newSize) -> {
Platform.runLater(() -> {
fileUpdateTaskLabel.setText(newSize.toString() + " File Update Tasks");
fileTaskProgresBar.setProgress((double) (newSize.intValue() > 0 ? -1 : 0));
});
});
fileUpdateTaskLabel.textProperty().bind(controller.getFileUpdateQueueSizeProperty().asString().concat(" File Update Tasks"));//;setText(newSize.toString() + " File Update Tasks");
fileTaskProgresBar.progressProperty().bind(controller.getFileUpdateQueueSizeProperty().negate());
// controller.getFileUpdateQueueSizeProperty().addListener((ov, oldSize, newSize) -> {
// Platform.runLater(() -> {
//
//
// });
// });
controller.bgTaskQueueSizeProperty().addListener((ov, oldSize, newSize) -> {
Platform.runLater(() -> {
bgTaskLabel.setText(newSize.toString() + " BG Tasks");
bgTaskProgressBar.setProgress((double) (newSize.intValue() > 0 ? -1 : 0));
});
});
bgTaskProgressBar.progressProperty().bind(controller.regroupProgress());
Platform.runLater(() -> {
staleLabel.setTooltip(new Tooltip("Some data may be out of date. Enable listening to ingest in Tools | Options | Image /Video Analyzer , after ingest is complete to update."));

View File

@ -44,7 +44,7 @@ import javax.swing.SortOrder;
import org.openide.util.Exceptions;
import org.sleuthkit.autopsy.imageanalyzer.FXMLConstructor;
import org.sleuthkit.autopsy.imageanalyzer.FileIDSelectionModel;
import org.sleuthkit.autopsy.imageanalyzer.IconCache;
import org.sleuthkit.autopsy.imageanalyzer.ThumbnailCache;
import org.sleuthkit.autopsy.imageanalyzer.ImageAnalyzerController;
import org.sleuthkit.autopsy.imageanalyzer.TagUtils;
import org.sleuthkit.autopsy.imageanalyzer.datamodel.Category;
@ -214,7 +214,7 @@ public class Toolbar extends ToolBar {
orderGroup.selectedToggleProperty().addListener(queryInvalidationListener);
IconCache.getDefault().iconSize.bind(sizeSlider.valueProperty());
ThumbnailCache.getDefault().iconSize.bind(sizeSlider.valueProperty());
}

View File

@ -10,7 +10,7 @@ import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import org.apache.commons.lang3.StringUtils;
import org.sleuthkit.autopsy.imageanalyzer.datamodel.DrawableAttribute;
import org.sleuthkit.autopsy.imageanalyzer.grouping.Grouping;
import org.sleuthkit.autopsy.imageanalyzer.grouping.DrawableGroup;
/**
* A {@link Node} in the tree that listens to its associated group. Manages
@ -44,7 +44,7 @@ class GroupTreeCell extends TreeCell<TreeNode> {
setGraphic(null);
});
} else {
final String name = StringUtils.defaultIfBlank(tNode.getPath(), Grouping.UNKNOWN);
final String name = StringUtils.defaultIfBlank(tNode.getPath(), DrawableGroup.UNKNOWN);
Platform.runLater(() -> {
setTooltip(new Tooltip(name));
});

View File

@ -27,7 +27,7 @@ import javafx.application.Platform;
import javafx.collections.FXCollections;
import javafx.scene.control.TreeItem;
import org.apache.commons.lang3.StringUtils;
import org.sleuthkit.autopsy.imageanalyzer.grouping.Grouping;
import org.sleuthkit.autopsy.imageanalyzer.grouping.DrawableGroup;
/**
* A node in the nav/hash tree. Manages inserts and removals. Has parents and
@ -46,7 +46,7 @@ class GroupTreeItem extends TreeItem<TreeNode> implements Comparable<GroupTreeIt
*/
private TreeNodeComparators comp;
public GroupTreeItem(String t, Grouping g, TreeNodeComparators comp) {
public GroupTreeItem(String t, DrawableGroup g, TreeNodeComparators comp) {
super(new TreeNode(t, g));
this.comp = comp;
}
@ -71,7 +71,7 @@ class GroupTreeItem extends TreeItem<TreeNode> implements Comparable<GroupTreeIt
* @param g Group to add
* @param tree True if it is part of a tree (versus a list)
*/
void insert(String path, Grouping g, Boolean tree) {
void insert(String path, DrawableGroup g, Boolean tree) {
if (tree) {
String cleanPath = StringUtils.stripStart(path, "/");
@ -132,7 +132,7 @@ class GroupTreeItem extends TreeItem<TreeNode> implements Comparable<GroupTreeIt
* @param g Group to add
* @param tree True if it is part of a tree (versus a list)
*/
void insert(List<String> path, Grouping g, Boolean tree) {
void insert(List<String> path, DrawableGroup g, Boolean tree) {
if (tree) {
// Are we at the end of the recursion?
if (path.isEmpty()) {
@ -189,7 +189,7 @@ class GroupTreeItem extends TreeItem<TreeNode> implements Comparable<GroupTreeIt
return comp.compare(this, o);
}
static GroupTreeItem getTreeItemForGroup(GroupTreeItem root, Grouping grouping) {
static GroupTreeItem getTreeItemForGroup(GroupTreeItem root, DrawableGroup grouping) {
if (Objects.equals(root.getValue().getGroup(), grouping)) {
return root;
} else {

View File

@ -46,10 +46,10 @@ import org.sleuthkit.autopsy.imageanalyzer.FXMLConstructor;
import org.sleuthkit.autopsy.imageanalyzer.ImageAnalyzerController;
import org.sleuthkit.autopsy.imageanalyzer.datamodel.DrawableAttribute;
import org.sleuthkit.autopsy.imageanalyzer.datamodel.DrawableFile;
import org.sleuthkit.autopsy.imageanalyzer.grouping.DrawableGroup;
import org.sleuthkit.autopsy.imageanalyzer.grouping.GroupKey;
import org.sleuthkit.autopsy.imageanalyzer.grouping.GroupSortBy;
import org.sleuthkit.autopsy.imageanalyzer.grouping.GroupViewState;
import org.sleuthkit.autopsy.imageanalyzer.grouping.Grouping;
import org.sleuthkit.datamodel.TskCoreException;
/**
@ -155,22 +155,22 @@ public class NavPanel extends TabPane {
initHashTree();
initNavTree();
controller.getGroupManager().getAnalyzedGroups().addListener((ListChangeListener.Change<? extends Grouping> change) -> {
controller.getGroupManager().getAnalyzedGroups().addListener((ListChangeListener.Change<? extends DrawableGroup> change) -> {
while (change.next()) {
for (Grouping g : change.getAddedSubList()) {
for (DrawableGroup g : change.getAddedSubList()) {
insertIntoNavTree(g);
if (g.getFilesWithHashSetHitsCount() > 0) {
insertIntoHashTree(g);
}
}
for (Grouping g : change.getRemoved()) {
for (DrawableGroup g : change.getRemoved()) {
removeFromNavTree(g);
removeFromHashTree(g);
}
}
});
for (Grouping g : controller.getGroupManager().getAnalyzedGroups()) {
for (DrawableGroup g : controller.getGroupManager().getAnalyzedGroups()) {
insertIntoNavTree(g);
if (g.getFilesWithHashSetHitsCount() > 0) {
insertIntoHashTree(g);
@ -195,7 +195,7 @@ public class NavPanel extends TabPane {
hashTreeRoot.resortChildren(sortByBox.getSelectionModel().getSelectedItem());
}
private void insertIntoHashTree(Grouping g) {
private void insertIntoHashTree(DrawableGroup g) {
initHashTree();
hashTreeRoot.insert(g.groupKey.getValueDisplayName(), g, false);
}
@ -205,7 +205,7 @@ public class NavPanel extends TabPane {
*
* @param grouping
*/
public void setFocusedGroup(Grouping grouping) {
public void setFocusedGroup(DrawableGroup grouping) {
List<String> path = groupingToPath(grouping);
@ -228,7 +228,7 @@ public class NavPanel extends TabPane {
}
@SuppressWarnings("fallthrough")
private static List<String> groupingToPath(Grouping g) {
private static List<String> groupingToPath(DrawableGroup g) {
if (g.groupKey.getAttribute() == DrawableAttribute.PATH) {
String path = g.groupKey.getValueDisplayName();
@ -242,14 +242,14 @@ public class NavPanel extends TabPane {
}
}
private void insertIntoNavTree(Grouping g) {
private void insertIntoNavTree(DrawableGroup g) {
initNavTree();
List<String> path = groupingToPath(g);
navTreeRoot.insert(path, g, true);
}
private void removeFromNavTree(Grouping g) {
private void removeFromNavTree(DrawableGroup g) {
initNavTree();
final GroupTreeItem treeItemForGroup = GroupTreeItem.getTreeItemForGroup(navTreeRoot, g);
if (treeItemForGroup != null) {
@ -257,7 +257,7 @@ public class NavPanel extends TabPane {
}
}
private void removeFromHashTree(Grouping g) {
private void removeFromHashTree(DrawableGroup g) {
initHashTree();
final GroupTreeItem treeItemForGroup = GroupTreeItem.getTreeItemForGroup(hashTreeRoot, g);
if (treeItemForGroup != null) {
@ -293,9 +293,9 @@ public class NavPanel extends TabPane {
private void rebuildNavTree() {
navTreeRoot = new GroupTreeItem("", null, sortByBox.getSelectionModel().selectedItemProperty().get());
ObservableList<Grouping> groups = controller.getGroupManager().getAnalyzedGroups();
ObservableList<DrawableGroup> groups = controller.getGroupManager().getAnalyzedGroups();
for (Grouping g : groups) {
for (DrawableGroup g : groups) {
insertIntoNavTree(g);
}
@ -320,7 +320,7 @@ public class NavPanel extends TabPane {
Collection<GroupKey<?>> groupKeysForFile = controller.getGroupManager().getGroupKeysForFile(file);
for (GroupKey<?> k : groupKeysForFile) {
final Grouping groupForKey = controller.getGroupManager().getGroupForKey(k);
final DrawableGroup groupForKey = controller.getGroupManager().getGroupForKey(k);
if (groupForKey != null) {
insertIntoHashTree(groupForKey);
}

View File

@ -18,7 +18,7 @@
*/
package org.sleuthkit.autopsy.imageanalyzer.gui.navpanel;
import org.sleuthkit.autopsy.imageanalyzer.grouping.Grouping;
import org.sleuthkit.autopsy.imageanalyzer.grouping.DrawableGroup;
/**
*
@ -26,22 +26,22 @@ import org.sleuthkit.autopsy.imageanalyzer.grouping.Grouping;
class TreeNode {
private String path;
private Grouping group;
private DrawableGroup group;
public String getPath() {
return path;
}
public Grouping getGroup() {
public DrawableGroup getGroup() {
return group;
}
public TreeNode(String path, Grouping group) {
public TreeNode(String path, DrawableGroup group) {
this.path = path;
this.group = group;
}
void setGroup(Grouping g) {
void setGroup(DrawableGroup g) {
group = g;
}
}