mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-12 07:56:16 +00:00
Merge pull request #4159 from sleuthkit/ig_tag_query
Cache IG data, change order of loading
This commit is contained in:
commit
a75e3244a9
@ -50,10 +50,12 @@ import javax.annotation.Nonnull;
|
||||
import static org.apache.commons.collections4.CollectionUtils.isNotEmpty;
|
||||
import org.netbeans.api.progress.ProgressHandle;
|
||||
import org.openide.util.Cancellable;
|
||||
import org.openide.util.Exceptions;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.Case.CaseType;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.casemodule.services.TagsManager;
|
||||
import org.sleuthkit.autopsy.coreutils.History;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
||||
@ -70,6 +72,7 @@ import org.sleuthkit.autopsy.imagegallery.datamodel.grouping.GroupViewState;
|
||||
import org.sleuthkit.autopsy.ingest.IngestManager;
|
||||
import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.ContentTag;
|
||||
import org.sleuthkit.datamodel.DataSource;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction;
|
||||
@ -602,6 +605,7 @@ public final class ImageGalleryController {
|
||||
|
||||
DRAWABLE_QUERY
|
||||
= DATASOURCE_CLAUSE
|
||||
+ " AND ( meta_type = " + TskData.TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_REG.getValue() + ")"
|
||||
+ " AND ( "
|
||||
+ //grab files with supported extension
|
||||
FILE_EXTENSION_CLAUSE
|
||||
@ -635,7 +639,7 @@ public final class ImageGalleryController {
|
||||
public void run() {
|
||||
progressHandle = getInitialProgressHandle();
|
||||
progressHandle.start();
|
||||
updateMessage(Bundle.CopyAnalyzedFiles_populatingDb_status());
|
||||
updateMessage(Bundle.CopyAnalyzedFiles_populatingDb_status() + " (Data Source " + dataSourceObjId + ")" );
|
||||
|
||||
DrawableDB.DrawableTransaction drawableDbTransaction = null;
|
||||
CaseDbTransaction caseDbTransaction = null;
|
||||
@ -650,6 +654,7 @@ public final class ImageGalleryController {
|
||||
taskCompletionStatus = true;
|
||||
int workDone = 0;
|
||||
|
||||
// Cycle through all of the files returned and call processFile on each
|
||||
//do in transaction
|
||||
drawableDbTransaction = taskDB.beginTransaction();
|
||||
caseDbTransaction = tskCase.beginTransaction();
|
||||
@ -663,7 +668,7 @@ public final class ImageGalleryController {
|
||||
}
|
||||
|
||||
processFile(f, drawableDbTransaction, caseDbTransaction);
|
||||
|
||||
|
||||
workDone++;
|
||||
progressHandle.progress(f.getName(), workDone);
|
||||
updateProgress(workDone - 1 / (double) files.size());
|
||||
@ -672,11 +677,12 @@ public final class ImageGalleryController {
|
||||
|
||||
progressHandle.finish();
|
||||
progressHandle = ProgressHandle.createHandle(Bundle.BulkTask_committingDb_status());
|
||||
updateMessage(Bundle.BulkTask_committingDb_status());
|
||||
updateMessage(Bundle.BulkTask_committingDb_status() + " (Data Source " + dataSourceObjId + ")" );
|
||||
updateProgress(1.0);
|
||||
|
||||
progressHandle.start();
|
||||
caseDbTransaction.commit();
|
||||
// pass true so that groupmanager is notified of the changes
|
||||
taskDB.commitTransaction(drawableDbTransaction, true);
|
||||
|
||||
} catch (TskCoreException ex) {
|
||||
@ -728,10 +734,12 @@ public final class ImageGalleryController {
|
||||
|
||||
CopyAnalyzedFiles(long dataSourceObjId, ImageGalleryController controller) {
|
||||
super(dataSourceObjId, controller);
|
||||
taskDB.buildFileMetaDataCache();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void cleanup(boolean success) {
|
||||
taskDB.freeFileMetaDataCache();
|
||||
// at the end of the task, set the stale status based on the
|
||||
// cumulative status of all data sources
|
||||
controller.setStale(controller.isDataSourcesTableStale());
|
||||
@ -744,20 +752,19 @@ public final class ImageGalleryController {
|
||||
if (known) {
|
||||
taskDB.removeFile(f.getId(), tr); //remove known files
|
||||
} else {
|
||||
|
||||
try {
|
||||
// if mimetype of the file hasn't been ascertained, ingest might not have completed yet.
|
||||
if (null == f.getMIMEType()) {
|
||||
// set to false to force the DB to be marked as stale
|
||||
this.setTaskCompletionStatus(false);
|
||||
}
|
||||
//supported mimetype => analyzed
|
||||
if (null != f.getMIMEType() && FileTypeUtils.hasDrawableMIMEType(f)) {
|
||||
else if (FileTypeUtils.hasDrawableMIMEType(f)) {
|
||||
taskDB.updateFile(DrawableFile.create(f, true, false), tr, caseDbTransaction);
|
||||
} else {
|
||||
// if mimetype of the file hasn't been ascertained, ingest might not have completed yet.
|
||||
if (null == f.getMIMEType()) {
|
||||
// set to false to force the DB to be marked as stale
|
||||
this.setTaskCompletionStatus(false);
|
||||
} else {
|
||||
//unsupported mimtype => analyzed but shouldn't include
|
||||
taskDB.removeFile(f.getId(), tr);
|
||||
}
|
||||
}
|
||||
//unsupported mimtype => analyzed but shouldn't include
|
||||
else {
|
||||
taskDB.removeFile(f.getId(), tr);
|
||||
}
|
||||
} catch (FileTypeDetector.FileTypeDetectorInitException ex) {
|
||||
throw new TskCoreException("Failed to initialize FileTypeDetector.", ex);
|
||||
|
@ -27,6 +27,7 @@ import javafx.application.Platform;
|
||||
import javax.swing.JOptionPane;
|
||||
import javax.swing.SwingUtilities;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
import org.openide.util.Exceptions;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
@ -39,9 +40,13 @@ import org.sleuthkit.autopsy.events.AutopsyEvent;
|
||||
import org.sleuthkit.autopsy.imagegallery.datamodel.DrawableDB;
|
||||
import org.sleuthkit.autopsy.ingest.IngestManager;
|
||||
import org.sleuthkit.autopsy.ingest.IngestManager.IngestJobEvent;
|
||||
import static org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent.DATA_ADDED;
|
||||
import static org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent.FILE_DONE;
|
||||
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
|
||||
import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.TskData;
|
||||
@ -153,45 +158,72 @@ public class ImageGalleryModule {
|
||||
IngestManager.getInstance().removeIngestModuleEventListener(this);
|
||||
return;
|
||||
}
|
||||
|
||||
if (IngestManager.IngestModuleEvent.valueOf(evt.getPropertyName()) != FILE_DONE) {
|
||||
return;
|
||||
}
|
||||
// getOldValue has fileID getNewValue has Abstractfile
|
||||
AbstractFile file = (AbstractFile) evt.getNewValue();
|
||||
if (false == file.isFile()) {
|
||||
return;
|
||||
}
|
||||
|
||||
/* only process individual files in realtime on the node that is
|
||||
* running the ingest. on a remote node, image files are processed
|
||||
* enblock when ingest is complete */
|
||||
if (((AutopsyEvent) evt).getSourceType() != AutopsyEvent.SourceType.LOCAL) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
// Bail out if the case is closed
|
||||
try {
|
||||
ImageGalleryController con = getController();
|
||||
if (con.isListeningEnabled()) {
|
||||
try {
|
||||
if (isDrawableAndNotKnown(file)) {
|
||||
//this file should be included and we don't already know about it from hash sets (NSRL)
|
||||
con.queueDBTask(new ImageGalleryController.UpdateFileTask(file, controller.getDatabase()));
|
||||
} else if (FileTypeUtils.getAllSupportedExtensions().contains(file.getNameExtension())) {
|
||||
/* Doing this check results in fewer tasks queued
|
||||
* up, and faster completion of db update. This file
|
||||
* would have gotten scooped up in initial grab, but
|
||||
* actually we don't need it */
|
||||
con.queueDBTask(new ImageGalleryController.RemoveFileTask(file, controller.getDatabase()));
|
||||
}
|
||||
|
||||
} catch (FileTypeDetector.FileTypeDetectorInitException ex) {
|
||||
logger.log(Level.SEVERE, "Unable to determine if file is drawable and not known. Not making any changes to DB", ex); //NON-NLS
|
||||
MessageNotifyUtil.Notify.error("Image Gallery Error",
|
||||
"Unable to determine if file is drawable and not known. Not making any changes to DB. See the logs for details.");
|
||||
}
|
||||
if (controller == null || Case.getCurrentCaseThrows() == null) {
|
||||
return;
|
||||
}
|
||||
} catch (NoCurrentCaseException ex) {
|
||||
logger.log(Level.SEVERE, "Attempted to access ImageGallery with no case open.", ex); //NON-NLS
|
||||
return;
|
||||
}
|
||||
|
||||
if (IngestManager.IngestModuleEvent.valueOf(evt.getPropertyName()) == FILE_DONE) {
|
||||
|
||||
// getOldValue has fileID getNewValue has Abstractfile
|
||||
AbstractFile file = (AbstractFile) evt.getNewValue();
|
||||
if (false == file.isFile()) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
ImageGalleryController con = getController();
|
||||
if (con.isListeningEnabled()) {
|
||||
try {
|
||||
// Update the entry if it is a picture and not in NSRL
|
||||
if (isDrawableAndNotKnown(file)) {
|
||||
con.queueDBTask(new ImageGalleryController.UpdateFileTask(file, controller.getDatabase()));
|
||||
}
|
||||
// Remove it from the DB if it is no longer relevant, but had the correct extension
|
||||
else if (FileTypeUtils.getAllSupportedExtensions().contains(file.getNameExtension())) {
|
||||
/* Doing this check results in fewer tasks queued
|
||||
* up, and faster completion of db update. This file
|
||||
* would have gotten scooped up in initial grab, but
|
||||
* actually we don't need it */
|
||||
con.queueDBTask(new ImageGalleryController.RemoveFileTask(file, controller.getDatabase()));
|
||||
}
|
||||
} catch (FileTypeDetector.FileTypeDetectorInitException ex) {
|
||||
logger.log(Level.SEVERE, "Unable to determine if file is drawable and not known. Not making any changes to DB", ex); //NON-NLS
|
||||
MessageNotifyUtil.Notify.error("Image Gallery Error",
|
||||
"Unable to determine if file is drawable and not known. Not making any changes to DB. See the logs for details.");
|
||||
}
|
||||
}
|
||||
} catch (NoCurrentCaseException ex) {
|
||||
logger.log(Level.SEVERE, "Attempted to access ImageGallery with no case open.", ex); //NON-NLS
|
||||
}
|
||||
}
|
||||
else if (IngestManager.IngestModuleEvent.valueOf(evt.getPropertyName()) == DATA_ADDED) {
|
||||
ModuleDataEvent mde = (ModuleDataEvent)evt.getOldValue();
|
||||
|
||||
if (mde.getBlackboardArtifactType().getTypeID() == ARTIFACT_TYPE.TSK_METADATA_EXIF.getTypeID()) {
|
||||
DrawableDB drawableDB = controller.getDatabase();
|
||||
for (BlackboardArtifact art : mde.getArtifacts()) {
|
||||
drawableDB.addExifCache(art.getObjectID());
|
||||
}
|
||||
}
|
||||
else if (mde.getBlackboardArtifactType().getTypeID() == ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID()) {
|
||||
DrawableDB drawableDB = controller.getDatabase();
|
||||
for (BlackboardArtifact art : mde.getArtifacts()) {
|
||||
drawableDB.addHashSetCache(art.getObjectID());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -251,7 +283,14 @@ public class ImageGalleryModule {
|
||||
break;
|
||||
case CONTENT_TAG_ADDED:
|
||||
final ContentTagAddedEvent tagAddedEvent = (ContentTagAddedEvent) evt;
|
||||
if (con.getDatabase().isInDB(tagAddedEvent.getAddedTag().getContent().getId())) {
|
||||
|
||||
long objId = tagAddedEvent.getAddedTag().getContent().getId();
|
||||
|
||||
// update the cache
|
||||
DrawableDB drawableDB = controller.getDatabase();
|
||||
drawableDB.addTagCache(objId);
|
||||
|
||||
if (con.getDatabase().isInDB(objId)) {
|
||||
con.getTagsManager().fireTagAddedEvent(tagAddedEvent);
|
||||
}
|
||||
break;
|
||||
|
@ -118,6 +118,8 @@ public final class ImageGalleryTopComponent extends TopComponent implements Expl
|
||||
|
||||
private Node infoOverlay;
|
||||
private final Region infoOverLayBackground = new TranslucentRegion();
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Returns whether the ImageGallery window is open or not.
|
||||
@ -142,6 +144,11 @@ public final class ImageGalleryTopComponent extends TopComponent implements Expl
|
||||
return WindowManager.getDefault().findTopComponent(PREFERRED_ID);
|
||||
}
|
||||
|
||||
/**
|
||||
* NOTE: This usually gets called on the EDT
|
||||
*
|
||||
* @throws NoCurrentCaseException
|
||||
*/
|
||||
@Messages({
|
||||
"ImageGalleryTopComponent.openTopCommponent.chooseDataSourceDialog.headerText=Choose a data source to view.",
|
||||
"ImageGalleryTopComponent.openTopCommponent.chooseDataSourceDialog.contentText=Data source:",
|
||||
@ -149,24 +156,35 @@ public final class ImageGalleryTopComponent extends TopComponent implements Expl
|
||||
"ImageGalleryTopComponent.openTopCommponent.chooseDataSourceDialog.titleText=Image Gallery",})
|
||||
public static void openTopComponent() throws NoCurrentCaseException {
|
||||
|
||||
// This creates the top component and adds the UI widgets if it has not yet been opened
|
||||
final TopComponent topComponent = WindowManager.getDefault().findTopComponent(PREFERRED_ID);
|
||||
if (topComponent == null) {
|
||||
return;
|
||||
}
|
||||
topComponentInitialized = true;
|
||||
|
||||
if (topComponent.isOpened()) {
|
||||
showTopComponent(topComponent);
|
||||
return;
|
||||
}
|
||||
|
||||
// Wait until the FX UI has been created. This way, we can always
|
||||
// show the gray progress screen
|
||||
// TODO: do this in a more elegant way.
|
||||
while (topComponentInitialized == false) {}
|
||||
|
||||
List<DataSource> dataSources = Collections.emptyList();
|
||||
ImageGalleryController controller = ImageGalleryModule.getController();
|
||||
((ImageGalleryTopComponent) topComponent).setController(controller);
|
||||
|
||||
// Display the UI so taht they can see the progress screen
|
||||
showTopComponent(topComponent);
|
||||
|
||||
List<DataSource> dataSources = Collections.emptyList();
|
||||
try {
|
||||
dataSources = controller.getSleuthKitCase().getDataSources();
|
||||
} catch (TskCoreException tskCoreException) {
|
||||
logger.log(Level.SEVERE, "Unable to get data sourcecs.", tskCoreException);
|
||||
}
|
||||
|
||||
GroupManager groupManager = controller.getGroupManager();
|
||||
synchronized (groupManager) {
|
||||
if (dataSources.size() <= 1
|
||||
@ -175,15 +193,13 @@ public final class ImageGalleryTopComponent extends TopComponent implements Expl
|
||||
* set to something other than path , don't both to ask for
|
||||
* datasource */
|
||||
groupManager.regroup(null, groupManager.getGroupBy(), groupManager.getSortBy(), groupManager.getSortOrder(), true);
|
||||
|
||||
showTopComponent(topComponent);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
Map<String, DataSource> dataSourceNames = new HashMap<>();
|
||||
dataSourceNames.put("All", null);
|
||||
dataSources.forEach(dataSource -> dataSourceNames.put(dataSource.getName(), dataSource));
|
||||
dataSources.forEach(dataSource -> dataSourceNames.put(dataSource.getName() + " (ID: " + dataSource.getId() + ")", dataSource));
|
||||
|
||||
Platform.runLater(() -> {
|
||||
ChoiceDialog<String> datasourceDialog = new ChoiceDialog<>(null, dataSourceNames.keySet());
|
||||
@ -198,7 +214,6 @@ public final class ImageGalleryTopComponent extends TopComponent implements Expl
|
||||
synchronized (groupManager) {
|
||||
groupManager.regroup(dataSource, groupManager.getGroupBy(), groupManager.getSortBy(), groupManager.getSortOrder(), true);
|
||||
}
|
||||
SwingUtilities.invokeLater(() -> showTopComponent(topComponent));
|
||||
});
|
||||
}
|
||||
|
||||
@ -266,6 +281,9 @@ public final class ImageGalleryTopComponent extends TopComponent implements Expl
|
||||
controller.regroupDisabledProperty().addListener((Observable observable) -> checkForGroups());
|
||||
controller.getGroupManager().getAnalyzedGroups().addListener((Observable observable) -> Platform.runLater(() -> checkForGroups()));
|
||||
|
||||
topComponentInitialized = true;
|
||||
|
||||
// This will cause the UI to show the progress dialog
|
||||
Platform.runLater(() -> checkForGroups());
|
||||
}
|
||||
});
|
||||
@ -329,6 +347,8 @@ public final class ImageGalleryTopComponent extends TopComponent implements Expl
|
||||
* Check if there are any fully analyzed groups available from the
|
||||
* GroupManager and remove blocking progress spinners if there are. If there
|
||||
* aren't, add a blocking progress spinner with appropriate message.
|
||||
*
|
||||
* This gets called when any group becomes analyzed and when started.
|
||||
*/
|
||||
@ThreadConfined(type = ThreadConfined.ThreadType.JFX)
|
||||
@NbBundle.Messages({
|
||||
@ -345,11 +365,14 @@ public final class ImageGalleryTopComponent extends TopComponent implements Expl
|
||||
private void checkForGroups() {
|
||||
GroupManager groupManager = controller.getGroupManager();
|
||||
|
||||
// if there are groups to display, then display them
|
||||
// @@@ Need to check timing on this and make sure we have only groups for the selected DS. Seems like rebuild can cause groups to be created for a DS that is not later selected...
|
||||
if (isNotEmpty(groupManager.getAnalyzedGroups())) {
|
||||
clearNotification();
|
||||
return;
|
||||
}
|
||||
|
||||
// display a message based on if ingest is running and/or listening
|
||||
if (IngestManager.getInstance().isIngestRunning()) {
|
||||
if (controller.isListeningEnabled()) {
|
||||
replaceNotification(centralStack,
|
||||
@ -361,12 +384,17 @@ public final class ImageGalleryTopComponent extends TopComponent implements Expl
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// display a message about stuff still being in the queue
|
||||
if (controller.getDBTasksQueueSizeProperty().get() > 0) {
|
||||
replaceNotification(fullUIStack,
|
||||
new NoGroupsDialog(Bundle.ImageGalleryController_noGroupsDlg_msg3(),
|
||||
new ProgressIndicator()));
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
// are there are files in the DB?
|
||||
try {
|
||||
if (controller.getDatabase().countAllFiles() <= 0) {
|
||||
// there are no files in db
|
||||
|
@ -150,8 +150,16 @@ public final class DrawableDB {
|
||||
|
||||
private final Lock DBLock = rwLock.writeLock(); //using exclusing lock for all db ops for now
|
||||
|
||||
// caches to make inserts / updates faster
|
||||
private Cache<String, Boolean> groupCache = CacheBuilder.newBuilder().expireAfterWrite(5, TimeUnit.MINUTES).build();
|
||||
|
||||
private final Object cacheLock = new Object(); // protects access to the below cache-related objects
|
||||
private boolean areCachesLoaded = false; // if true, the below caches contain valid data
|
||||
private Set<Long> hasTagCache = new HashSet<>(); // contains obj id of files with tags
|
||||
private Set<Long> hasHashCache = new HashSet<>(); // obj id of files with hash set hits
|
||||
private Set<Long> hasExifCache = new HashSet<>(); // obj id of files with EXIF (make/model)
|
||||
private int cacheBuildCount = 0; // number of tasks taht requested the caches be built
|
||||
|
||||
|
||||
static {//make sure sqlite driver is loaded // possibly redundant
|
||||
try {
|
||||
Class.forName("org.sqlite.JDBC");
|
||||
@ -771,6 +779,123 @@ public final class DrawableDB {
|
||||
public void updateFile(DrawableFile f, DrawableTransaction tr, CaseDbTransaction caseDbTransaction) {
|
||||
insertOrUpdateFile(f, tr, updateFileStmt, caseDbTransaction);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Populate caches based on current state of Case DB
|
||||
*/
|
||||
public void buildFileMetaDataCache() {
|
||||
|
||||
synchronized (cacheLock) {
|
||||
cacheBuildCount++;
|
||||
if (areCachesLoaded == true)
|
||||
return;
|
||||
|
||||
try {
|
||||
// get tags
|
||||
try (SleuthkitCase.CaseDbQuery dbQuery = tskCase.executeQuery("SELECT obj_id FROM content_tags")) {
|
||||
ResultSet rs = dbQuery.getResultSet();
|
||||
while (rs.next()) {
|
||||
long id = rs.getLong("obj_id");
|
||||
hasTagCache.add(id);
|
||||
}
|
||||
} catch (SQLException ex) {
|
||||
logger.log(Level.SEVERE, "Error getting tags from DB", ex); //NON-NLS
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Error executing query to get tags", ex); //NON-NLS
|
||||
}
|
||||
|
||||
try {
|
||||
// hash sets
|
||||
try (SleuthkitCase.CaseDbQuery dbQuery = tskCase.executeQuery("SELECT obj_id FROM blackboard_artifacts WHERE artifact_type_id = " + BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID())) {
|
||||
ResultSet rs = dbQuery.getResultSet();
|
||||
while (rs.next()) {
|
||||
long id = rs.getLong("obj_id");
|
||||
hasHashCache.add(id);
|
||||
}
|
||||
|
||||
} catch (SQLException ex) {
|
||||
logger.log(Level.SEVERE, "Error getting hashsets from DB", ex); //NON-NLS
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Error executing query to get hashsets", ex); //NON-NLS
|
||||
}
|
||||
|
||||
try {
|
||||
// EXIF
|
||||
try (SleuthkitCase.CaseDbQuery dbQuery = tskCase.executeQuery("SELECT obj_id FROM blackboard_artifacts WHERE artifact_type_id = " + BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF.getTypeID())) {
|
||||
ResultSet rs = dbQuery.getResultSet();
|
||||
while (rs.next()) {
|
||||
long id = rs.getLong("obj_id");
|
||||
hasExifCache.add(id);
|
||||
}
|
||||
|
||||
} catch (SQLException ex) {
|
||||
logger.log(Level.SEVERE, "Error getting EXIF from DB", ex); //NON-NLS
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Error executing query to get EXIF", ex); //NON-NLS
|
||||
}
|
||||
|
||||
areCachesLoaded = true;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a file to cache of files that have EXIF data
|
||||
* @param objectID ObjId of file with EXIF
|
||||
*/
|
||||
public void addExifCache(long objectID) {
|
||||
synchronized (cacheLock) {
|
||||
// bail out if we are not maintaining caches
|
||||
if (cacheBuildCount == 0)
|
||||
return;
|
||||
hasExifCache.add(objectID);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a file to cache of files that have hash set hits
|
||||
* @param objectID ObjId of file with hash set
|
||||
*/
|
||||
public void addHashSetCache(long objectID) {
|
||||
synchronized (cacheLock) {
|
||||
// bail out if we are not maintaining caches
|
||||
if (cacheBuildCount == 0)
|
||||
return;
|
||||
hasHashCache.add(objectID);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a file to cache of files that have tags
|
||||
* @param objectID ObjId of file with tags
|
||||
*/
|
||||
public void addTagCache(long objectID) {
|
||||
synchronized (cacheLock) {
|
||||
// bail out if we are not maintaining caches
|
||||
if (cacheBuildCount == 0)
|
||||
return;
|
||||
hasTagCache.add(objectID);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Free the cached case DB data
|
||||
*/
|
||||
public void freeFileMetaDataCache() {
|
||||
synchronized (cacheLock) {
|
||||
// dont' free these if there is another task still using them
|
||||
if (--cacheBuildCount > 0)
|
||||
return;
|
||||
|
||||
areCachesLoaded = false;
|
||||
hasTagCache.clear();
|
||||
hasHashCache.clear();
|
||||
hasExifCache.clear();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update (or insert) a file in(to) the drawable db. Weather this is an
|
||||
@ -783,69 +908,99 @@ public final class DrawableDB {
|
||||
*
|
||||
* @param f The file to insert.
|
||||
* @param tr a transaction to use, must not be null
|
||||
* @param stmt the statement that does the actull inserting
|
||||
* @param stmt the statement that does the actual inserting
|
||||
*/
|
||||
private void insertOrUpdateFile(DrawableFile f, @Nonnull DrawableTransaction tr, @Nonnull PreparedStatement stmt, @Nonnull CaseDbTransaction caseDbTransaction) {
|
||||
|
||||
if (tr.isClosed()) {
|
||||
throw new IllegalArgumentException("can't update database with closed transaction");
|
||||
}
|
||||
|
||||
// get data from caches. Default to true and force the DB lookup if we don't have caches
|
||||
boolean hasExif = true;
|
||||
boolean hasHashSet = true;
|
||||
boolean hasTag = true;
|
||||
synchronized (cacheLock) {
|
||||
if (areCachesLoaded) {
|
||||
hasExif = hasExifCache.contains(f.getId());
|
||||
hasHashSet = hasHashCache.contains(f.getId());
|
||||
hasTag = hasTagCache.contains(f.getId());
|
||||
}
|
||||
}
|
||||
|
||||
dbWriteLock();
|
||||
try {
|
||||
// "INSERT OR IGNORE/ INTO drawable_files (obj_id, data_source_obj_id, path, name, created_time, modified_time, make, model, analyzed)"
|
||||
stmt.setLong(1, f.getId());
|
||||
stmt.setLong(2, f.getAbstractFile().getDataSource().getId());
|
||||
stmt.setLong(2, f.getAbstractFile().getDataSourceObjectId());
|
||||
stmt.setString(3, f.getDrawablePath());
|
||||
stmt.setString(4, f.getName());
|
||||
stmt.setLong(5, f.getCrtime());
|
||||
stmt.setLong(6, f.getMtime());
|
||||
stmt.setString(7, f.getMake());
|
||||
stmt.setString(8, f.getModel());
|
||||
if (hasExif) {
|
||||
stmt.setString(7, f.getMake());
|
||||
stmt.setString(8, f.getModel());
|
||||
} else {
|
||||
stmt.setString(7, "");
|
||||
stmt.setString(8, "");
|
||||
}
|
||||
stmt.setBoolean(9, f.isAnalyzed());
|
||||
stmt.executeUpdate();
|
||||
|
||||
// Update the list of file IDs in memory
|
||||
addImageFileToList(f.getId());
|
||||
|
||||
try {
|
||||
for (String name : f.getHashSetNames()) {
|
||||
// Update the hash set tables
|
||||
if (hasHashSet) {
|
||||
try {
|
||||
for (String name : f.getHashSetNames()) {
|
||||
|
||||
// "insert or ignore into hash_sets (hash_set_name) values (?)"
|
||||
insertHashSetStmt.setString(1, name);
|
||||
insertHashSetStmt.executeUpdate();
|
||||
// "insert or ignore into hash_sets (hash_set_name) values (?)"
|
||||
insertHashSetStmt.setString(1, name);
|
||||
insertHashSetStmt.executeUpdate();
|
||||
|
||||
//TODO: use nested select to get hash_set_id rather than seperate statement/query
|
||||
//"select hash_set_id from hash_sets where hash_set_name = ?"
|
||||
selectHashSetStmt.setString(1, name);
|
||||
try (ResultSet rs = selectHashSetStmt.executeQuery()) {
|
||||
while (rs.next()) {
|
||||
int hashsetID = rs.getInt("hash_set_id"); //NON-NLS
|
||||
//"insert or ignore into hash_set_hits (hash_set_id, obj_id) values (?,?)";
|
||||
insertHashHitStmt.setInt(1, hashsetID);
|
||||
insertHashHitStmt.setLong(2, f.getId());
|
||||
insertHashHitStmt.executeUpdate();
|
||||
break;
|
||||
//TODO: use nested select to get hash_set_id rather than seperate statement/query
|
||||
//"select hash_set_id from hash_sets where hash_set_name = ?"
|
||||
selectHashSetStmt.setString(1, name);
|
||||
try (ResultSet rs = selectHashSetStmt.executeQuery()) {
|
||||
while (rs.next()) {
|
||||
int hashsetID = rs.getInt("hash_set_id"); //NON-NLS
|
||||
//"insert or ignore into hash_set_hits (hash_set_id, obj_id) values (?,?)";
|
||||
insertHashHitStmt.setInt(1, hashsetID);
|
||||
insertHashHitStmt.setLong(2, f.getId());
|
||||
insertHashHitStmt.executeUpdate();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "failed to insert/update hash hits for file" + f.getContentPathSafe(), ex); //NON-NLS
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "failed to insert/update hash hits for file" + f.getContentPathSafe(), ex); //NON-NLS
|
||||
}
|
||||
|
||||
//and update all groups this file is in
|
||||
for (DrawableAttribute<?> attr : DrawableAttribute.getGroupableAttrs()) {
|
||||
// skip attributes that we do not have data for
|
||||
if ((attr == DrawableAttribute.TAGS) && (hasTag == false)) {
|
||||
continue;
|
||||
}
|
||||
else if ((attr == DrawableAttribute.MAKE || attr == DrawableAttribute.MODEL) && (hasExif == false)) {
|
||||
continue;
|
||||
}
|
||||
Collection<? extends Comparable<?>> vals = attr.getValue(f);
|
||||
for (Comparable<?> val : vals) {
|
||||
if (null != val) {
|
||||
if (attr == DrawableAttribute.PATH) {
|
||||
insertGroup(f.getAbstractFile().getDataSource().getId(), val.toString(), attr, caseDbTransaction);
|
||||
} else {
|
||||
}
|
||||
else {
|
||||
insertGroup(val.toString(), attr, caseDbTransaction);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// @@@ Consider storing more than ID so that we do not need to requery each file during commit
|
||||
tr.addUpdatedFile(f.getId());
|
||||
|
||||
} catch (SQLException | NullPointerException | TskCoreException ex) {
|
||||
@ -931,11 +1086,16 @@ public final class DrawableDB {
|
||||
return new DrawableTransaction();
|
||||
}
|
||||
|
||||
public void commitTransaction(DrawableTransaction tr, Boolean notify) {
|
||||
/**
|
||||
*
|
||||
* @param tr
|
||||
* @param notifyGM If true, notify GroupManager about the changes.
|
||||
*/
|
||||
public void commitTransaction(DrawableTransaction tr, Boolean notifyGM) {
|
||||
if (tr.isClosed()) {
|
||||
throw new IllegalArgumentException("can't close already closed transaction");
|
||||
}
|
||||
tr.commit(notify);
|
||||
tr.commit(notifyGM);
|
||||
}
|
||||
|
||||
public void rollbackTransaction(DrawableTransaction tr) {
|
||||
@ -1076,7 +1236,7 @@ public final class DrawableDB {
|
||||
* @param sortOrder Sort ascending or descending.
|
||||
* @param dataSource
|
||||
*
|
||||
* @return
|
||||
* @return Map of data source (or null of group by attribute ignores data sources) to list of unique group values
|
||||
*
|
||||
* @throws org.sleuthkit.datamodel.TskCoreException
|
||||
*/
|
||||
@ -1524,14 +1684,19 @@ public final class DrawableDB {
|
||||
}
|
||||
}
|
||||
|
||||
synchronized private void commit(Boolean notify) {
|
||||
/**
|
||||
* Commit changes that happened during this transaction
|
||||
*
|
||||
* @param notifyGM If true, notify GroupManager about the changes.
|
||||
*/
|
||||
synchronized private void commit(Boolean notifyGM) {
|
||||
if (!closed) {
|
||||
try {
|
||||
con.commit();
|
||||
// make sure we close before we update, bc they'll need locks
|
||||
close();
|
||||
|
||||
if (notify) {
|
||||
if (notifyGM) {
|
||||
if (groupManager != null) {
|
||||
groupManager.handleFileUpdate(updatedFiles);
|
||||
groupManager.handleFileRemoved(removedFiles);
|
||||
|
@ -63,8 +63,8 @@ public abstract class DrawableFile {
|
||||
|
||||
private static final Logger LOGGER = Logger.getLogger(DrawableFile.class.getName());
|
||||
|
||||
public static DrawableFile create(AbstractFile abstractFileById, boolean analyzed) {
|
||||
return create(abstractFileById, analyzed, FileTypeUtils.hasVideoMIMEType(abstractFileById));
|
||||
public static DrawableFile create(AbstractFile abstractFile, boolean analyzed) {
|
||||
return create(abstractFile, analyzed, FileTypeUtils.hasVideoMIMEType(abstractFile));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -93,7 +93,7 @@ public class HashSetManager {
|
||||
*
|
||||
* @param fileID the fileID to invalidate in the cache
|
||||
*/
|
||||
public void invalidateHashSetsForFile(long fileID) {
|
||||
public void invalidateHashSetsCacheForFile(long fileID) {
|
||||
hashSetCache.invalidate(fileID);
|
||||
}
|
||||
}
|
||||
|
@ -172,12 +172,15 @@ public class GroupManager {
|
||||
* a part of.
|
||||
*/
|
||||
@SuppressWarnings({"rawtypes", "unchecked"})
|
||||
synchronized public Set<GroupKey<?>> getGroupKeysForFile(DrawableFile file) throws TskCoreException, TskDataException {
|
||||
synchronized public Set<GroupKey<?>> getGroupKeysForCurrentGroupBy(DrawableFile file) throws TskCoreException, TskDataException {
|
||||
Set<GroupKey<?>> resultSet = new HashSet<>();
|
||||
for (Comparable<?> val : getGroupBy().getValue(file)) {
|
||||
|
||||
if (getGroupBy() == DrawableAttribute.PATH) {
|
||||
resultSet.add(new GroupKey(getGroupBy(), val, file.getDataSource()));
|
||||
// verify this file is in a data source being displayed
|
||||
if ((getDataSource() == null) || (file.getDataSource().equals(getDataSource()))) {
|
||||
resultSet.add(new GroupKey(getGroupBy(), val, file.getDataSource()));
|
||||
}
|
||||
} else if (getGroupBy() == DrawableAttribute.TAGS) {
|
||||
//don't show groups for the categories when grouped by tags.
|
||||
if (CategoryManager.isNotCategoryTagName((TagName) val)) {
|
||||
@ -199,10 +202,10 @@ public class GroupManager {
|
||||
* @return A set of GroupKeys representing the group(s) the given file is a
|
||||
* part of
|
||||
*/
|
||||
synchronized public Set<GroupKey<?>> getGroupKeysForFileID(Long fileID) {
|
||||
synchronized public Set<GroupKey<?>> getGroupKeysForCurrentGroupBy(Long fileID) {
|
||||
try {
|
||||
DrawableFile file = getDrawableDB().getFileFromID(fileID);
|
||||
return getGroupKeysForFile(file);
|
||||
return getGroupKeysForCurrentGroupBy(file);
|
||||
|
||||
} catch (TskCoreException | TskDataException ex) {
|
||||
logger.log(Level.SEVERE, "Failed to get group keys for file with ID " +fileID, ex); //NON-NLS
|
||||
@ -434,10 +437,18 @@ public class GroupManager {
|
||||
return sortOrderProp.getReadOnlyProperty();
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @return null if all data sources are being displayed
|
||||
*/
|
||||
public synchronized DataSource getDataSource() {
|
||||
return dataSourceProp.get();
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param dataSource Data source to display or null to display all of them
|
||||
*/
|
||||
synchronized void setDataSource(DataSource dataSource) {
|
||||
dataSourceProp.set(dataSource);
|
||||
}
|
||||
@ -505,16 +516,28 @@ public class GroupManager {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds an analyzed file to a group and marks the group as analyzed if the entire group is
|
||||
* now analyzed.
|
||||
*
|
||||
* @param group Group being added to (will be null if a group has not yet been created)
|
||||
* @param groupKey Group type/value
|
||||
* @param fileID
|
||||
*/
|
||||
@SuppressWarnings("AssignmentToMethodParameter")
|
||||
synchronized private void addFileToGroup(DrawableGroup group, final GroupKey<?> groupKey, final long fileID) {
|
||||
|
||||
// NOTE: We assume that it has already been determined that GroupKey can be displayed based on Data Source filters
|
||||
if (group == null) {
|
||||
//if there wasn't already a group check if there should be one now
|
||||
// path group, for example, only gets created when all files are analyzed
|
||||
group = popuplateIfAnalyzed(groupKey, null);
|
||||
}
|
||||
if (group != null) {
|
||||
else {
|
||||
//if there is aleady a group that was previously deemed fully analyzed, then add this newly analyzed file to it.
|
||||
group.addFile(fileID);
|
||||
}
|
||||
// reset the seen status for the group
|
||||
markGroupSeen(group, false);
|
||||
}
|
||||
|
||||
@ -543,7 +566,7 @@ public class GroupManager {
|
||||
|
||||
for (final long fileId : removedFileIDs) {
|
||||
//get grouping(s) this file would be in
|
||||
Set<GroupKey<?>> groupsForFile = getGroupKeysForFileID(fileId);
|
||||
Set<GroupKey<?>> groupsForFile = getGroupKeysForCurrentGroupBy(fileId);
|
||||
|
||||
for (GroupKey<?> gk : groupsForFile) {
|
||||
removeFromGroup(gk, fileId);
|
||||
@ -563,13 +586,14 @@ public class GroupManager {
|
||||
* the problem is that as a new files are analyzed they might be in new
|
||||
* groups( if we are grouping by say make or model) -jm
|
||||
*/
|
||||
for (long fileId : updatedFileIDs) {
|
||||
for (long fileId : updatedFileIDs) {
|
||||
// reset the hash cache
|
||||
controller.getHashSetManager().invalidateHashSetsCacheForFile(fileId);
|
||||
|
||||
controller.getHashSetManager().invalidateHashSetsForFile(fileId);
|
||||
|
||||
//get grouping(s) this file would be in
|
||||
Set<GroupKey<?>> groupsForFile = getGroupKeysForFileID(fileId);
|
||||
// Update the current groups (if it is visible)
|
||||
Set<GroupKey<?>> groupsForFile = getGroupKeysForCurrentGroupBy(fileId);
|
||||
for (GroupKey<?> gk : groupsForFile) {
|
||||
// see if a group has been created yet for the key
|
||||
DrawableGroup g = getGroupForKey(gk);
|
||||
addFileToGroup(g, gk, fileId);
|
||||
}
|
||||
@ -579,6 +603,10 @@ public class GroupManager {
|
||||
controller.getCategoryManager().fireChange(updatedFileIDs, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* If the group is analyzed (or other criteria based on grouping) and should be shown to the user,
|
||||
* then add it to the appropriate data structures so that it can be viewed.
|
||||
*/
|
||||
synchronized private DrawableGroup popuplateIfAnalyzed(GroupKey<?> groupKey, ReGroupTask<?> task) {
|
||||
/*
|
||||
* If this method call is part of a ReGroupTask and that task is
|
||||
@ -588,44 +616,45 @@ public class GroupManager {
|
||||
* user picked a different group by attribute, while the current task
|
||||
* was still running)
|
||||
*/
|
||||
if (isNull(task) || task.isCancelled() == false) {
|
||||
if (isNull(task) == false && task.isCancelled() == true) {
|
||||
return null;
|
||||
}
|
||||
|
||||
/*
|
||||
* For attributes other than path we can't be sure a group is fully
|
||||
* analyzed because we don't know all the files that will be a part
|
||||
* of that group. just show them no matter what.
|
||||
*/
|
||||
if (groupKey.getAttribute() != DrawableAttribute.PATH
|
||||
|| getDrawableDB().isGroupAnalyzed(groupKey)) {
|
||||
try {
|
||||
Set<Long> fileIDs = getFileIDsInGroup(groupKey);
|
||||
if (Objects.nonNull(fileIDs)) {
|
||||
|
||||
/*
|
||||
* For attributes other than path we can't be sure a group is fully
|
||||
* analyzed because we don't know all the files that will be a part
|
||||
* of that group. just show them no matter what.
|
||||
*/
|
||||
if (groupKey.getAttribute() != DrawableAttribute.PATH
|
||||
|| getDrawableDB().isGroupAnalyzed(groupKey)) {
|
||||
try {
|
||||
Set<Long> fileIDs = getFileIDsInGroup(groupKey);
|
||||
if (Objects.nonNull(fileIDs)) {
|
||||
long examinerID = collaborativeModeProp.get() ? -1 : controller.getSleuthKitCase().getCurrentExaminer().getId();
|
||||
final boolean groupSeen = getDrawableDB().isGroupSeenByExaminer(groupKey, examinerID);
|
||||
DrawableGroup group;
|
||||
|
||||
long examinerID = collaborativeModeProp.get() ? -1 : controller.getSleuthKitCase().getCurrentExaminer().getId();
|
||||
final boolean groupSeen = getDrawableDB().isGroupSeenByExaminer(groupKey, examinerID);
|
||||
DrawableGroup group;
|
||||
|
||||
if (groupMap.containsKey(groupKey)) {
|
||||
group = groupMap.get(groupKey);
|
||||
group.setFiles(fileIDs);
|
||||
group.setSeen(groupSeen);
|
||||
} else {
|
||||
group = new DrawableGroup(groupKey, fileIDs, groupSeen);
|
||||
controller.getCategoryManager().registerListener(group);
|
||||
groupMap.put(groupKey, group);
|
||||
}
|
||||
|
||||
if (analyzedGroups.contains(group) == false) {
|
||||
analyzedGroups.add(group);
|
||||
sortAnalyzedGroups();
|
||||
}
|
||||
updateUnSeenGroups(group);
|
||||
|
||||
return group;
|
||||
if (groupMap.containsKey(groupKey)) {
|
||||
group = groupMap.get(groupKey);
|
||||
group.setFiles(fileIDs);
|
||||
group.setSeen(groupSeen);
|
||||
} else {
|
||||
group = new DrawableGroup(groupKey, fileIDs, groupSeen);
|
||||
controller.getCategoryManager().registerListener(group);
|
||||
groupMap.put(groupKey, group);
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "failed to get files for group: " + groupKey.getAttribute().attrName.toString() + " = " + groupKey.getValue(), ex); //NON-NLS
|
||||
|
||||
if (analyzedGroups.contains(group) == false) {
|
||||
analyzedGroups.add(group);
|
||||
sortAnalyzedGroups();
|
||||
}
|
||||
updateUnSeenGroups(group);
|
||||
|
||||
return group;
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "failed to get files for group: " + groupKey.getAttribute().attrName.toString() + " = " + groupKey.getValue(), ex); //NON-NLS
|
||||
}
|
||||
}
|
||||
|
||||
@ -810,7 +839,7 @@ public class GroupManager {
|
||||
*
|
||||
* @param groupBy
|
||||
*
|
||||
* @return
|
||||
* @return map of data source (or null if group by attribute ignores data sources) to list of unique group values
|
||||
*/
|
||||
public Multimap<DataSource, AttrValType> findValuesForAttribute() {
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user