From 703c780f27d0032dff295bf3625bd6ce128cc50c Mon Sep 17 00:00:00 2001 From: jmillman Date: Fri, 31 Jul 2015 15:37:48 -0400 Subject: [PATCH 1/7] show tag icons on detail view event groups, and tag counts in tooltip; cleanup; use NbBundle.messages to generate bundle strings --- .../timeline/events/AggregateEvent.java | 81 +++++---- .../timeline/events/TimeLineEvent.java | 6 +- .../timeline/events/db/Bundle.properties | 6 - .../autopsy/timeline/events/db/EventDB.java | 163 +++++++++++------- .../timeline/events/db/EventsRepository.java | 80 +++++---- .../ui/detailview/AggregateEventNode.java | 89 +++++++--- 6 files changed, 270 insertions(+), 155 deletions(-) delete mode 100644 Core/src/org/sleuthkit/autopsy/timeline/events/db/Bundle.properties diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/AggregateEvent.java b/Core/src/org/sleuthkit/autopsy/timeline/events/AggregateEvent.java index bf04a26188..2e4c5ac45d 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/events/AggregateEvent.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/events/AggregateEvent.java @@ -27,37 +27,51 @@ import org.sleuthkit.autopsy.timeline.events.type.EventType; import org.sleuthkit.autopsy.timeline.utils.IntervalUtils; import org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD; -/** An event that represent a set of other events aggregated together. All the - * sub events should have the same type and matching descriptions at the +/** Represents a set of other (TimeLineEvent) events aggregated together. All + * the sub events should have the same type and matching descriptions at the * designated 'zoom level'. */ @Immutable public class AggregateEvent { + /** the smallest time interval containing all the aggregated events */ final private Interval span; + /** the type of all the aggregted events */ final private EventType type; - final private Set eventIDs; - + /** the common description of all the aggregated events */ final private String description; + /** the description level of detail that the events were aggregated at. */ private final DescriptionLOD lod; + /** the set of ids of the aggregated events */ + final private Set eventIDs; + + /** + * the ids of the subset of aggregated events that have at least one tag + * applied to them + */ + private final Set tagged; + + /** + * the ids of the subset of aggregated events that have at least one hash + * set hit + */ private final Set hashHits; - public AggregateEvent(Interval spanningInterval, EventType type, Set eventIDs, Set hashHits, String description, DescriptionLOD lod) { + public AggregateEvent(Interval spanningInterval, EventType type, Set eventIDs, Set hashHits, Set tagged, String description, DescriptionLOD lod) { this.span = spanningInterval; this.type = type; this.hashHits = hashHits; + this.tagged = tagged; this.description = description; - this.eventIDs = eventIDs; this.lod = lod; } - /** @return the actual interval from the first event to the last event */ public Interval getSpan() { return span; } @@ -70,6 +84,10 @@ public class AggregateEvent { return Collections.unmodifiableSet(hashHits); } + public Set getEventIDsWithTags() { + return Collections.unmodifiableSet(tagged); + } + public String getDescription() { return description; } @@ -78,30 +96,33 @@ public class AggregateEvent { return type; } - /** - * merge two aggregate events into one new aggregate event. - * - * @param ag1 - * @param ag2 - * - * @return - */ - public static AggregateEvent merge(AggregateEvent ag1, AggregateEvent ag2) { - - if (ag1.getType() != ag2.getType()) { - throw new IllegalArgumentException("aggregate events are not compatible they have different types"); - } - - if (!ag1.getDescription().equals(ag2.getDescription())) { - throw new IllegalArgumentException("aggregate events are not compatible they have different descriptions"); - } - Sets.SetView idsUnion = Sets.union(ag1.getEventIDs(), ag2.getEventIDs()); - Sets.SetView hashHitsUnion = Sets.union(ag1.getEventIDsWithHashHits(), ag2.getEventIDsWithHashHits()); - - return new AggregateEvent(IntervalUtils.span(ag1.span, ag2.span), ag1.getType(), idsUnion, hashHitsUnion, ag1.getDescription(), ag1.lod); - } - public DescriptionLOD getLOD() { return lod; } + + /** + * merge two aggregate events into one new aggregate event. + * + * @param aggEvent1 + * @param aggEVent2 + * + * @return a new aggregate event that is the result of merging the given + * events + */ + public static AggregateEvent merge(AggregateEvent aggEvent1, AggregateEvent ag2) { + + if (aggEvent1.getType() != ag2.getType()) { + throw new IllegalArgumentException("aggregate events are not compatible they have different types"); + } + + if (!aggEvent1.getDescription().equals(ag2.getDescription())) { + throw new IllegalArgumentException("aggregate events are not compatible they have different descriptions"); + } + Sets.SetView idsUnion = Sets.union(aggEvent1.getEventIDs(), ag2.getEventIDs()); + Sets.SetView hashHitsUnion = Sets.union(aggEvent1.getEventIDsWithHashHits(), ag2.getEventIDsWithHashHits()); + Sets.SetView taggedUnion = Sets.union(aggEvent1.getEventIDsWithTags(), ag2.getEventIDsWithTags()); + + return new AggregateEvent(IntervalUtils.span(aggEvent1.span, ag2.span), aggEvent1.getType(), idsUnion, hashHitsUnion, taggedUnion, aggEvent1.getDescription(), aggEvent1.lod); + } + } diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/TimeLineEvent.java b/Core/src/org/sleuthkit/autopsy/timeline/events/TimeLineEvent.java index be7e04e631..1d199ad840 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/events/TimeLineEvent.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/events/TimeLineEvent.java @@ -18,6 +18,7 @@ */ package org.sleuthkit.autopsy.timeline.events; +import javax.annotation.Nullable; import org.sleuthkit.autopsy.timeline.events.type.EventType; import org.sleuthkit.datamodel.TskData; @@ -29,7 +30,7 @@ public class TimeLineEvent { private final Long eventID; private final Long fileID; - + private final Long time; private final Long artifactID; @@ -42,7 +43,7 @@ public class TimeLineEvent { private final boolean hashHit; - public TimeLineEvent(Long eventID, Long objID, Long artifactID, Long time, EventType type, String fullDescription, String medDescription, String shortDescription, TskData.FileKnown known, boolean hashHit) { + public TimeLineEvent(Long eventID, Long objID, @Nullable Long artifactID, Long time, EventType type, String fullDescription, String medDescription, String shortDescription, TskData.FileKnown known, boolean hashHit) { this.eventID = eventID; this.fileID = objID; this.artifactID = artifactID; @@ -60,6 +61,7 @@ public class TimeLineEvent { return hashHit; } + @Nullable public Long getArtifactID() { return artifactID; } diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/db/Bundle.properties b/Core/src/org/sleuthkit/autopsy/timeline/events/db/Bundle.properties deleted file mode 100644 index b09aa42a5b..0000000000 --- a/Core/src/org/sleuthkit/autopsy/timeline/events/db/Bundle.properties +++ /dev/null @@ -1,6 +0,0 @@ -EventsRepository.progressWindow.msg.reinit_db=(re)initializing events database -EventsRepository.progressWindow.msg.populateMacEventsFiles=populating mac events for files\: -EventsRepository.progressWindow.msg.populateMacEventsFiles2=populating mac events for files\: -EventsRepository.progressWindow.msg.commitingDb=committing events db -EventsRepository.msgdlg.problem.text=There was a problem populating the timeline. Not all events may be present or accurate. See the log for details. -EventsRepository.progressWindow.populatingXevents=populating {0} events \ No newline at end of file diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java b/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java index f5bbf5e3e7..a379a8630b 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java @@ -88,9 +88,10 @@ import org.sqlite.SQLiteJDBCLoader; */ public class EventDB { - private PreparedStatement insertHashSetStmt; - private PreparedStatement insertHashHitStmt; - private PreparedStatement selectHashSetStmt; + private PreparedStatement dropEventsTableStmt; + private PreparedStatement dropHashSetHitsTableStmt; + private PreparedStatement dropHashSetsTableStmt; + private PreparedStatement dropDBInfoTableStmt; /** enum to represent columns in the events table */ enum EventTableColumn { @@ -105,8 +106,9 @@ public class EventDB { FULL_DESCRIPTION("full_description"), // NON-NLS MED_DESCRIPTION("med_description"), // NON-NLS SHORT_DESCRIPTION("short_description"), // NON-NLS - TIME("time"), - HASH_HIT("hash_hit"); // NON-NLS + TIME("time"), // NON-NLS + HASH_HIT("hash_hit"), // NON-NLS + TAGGED("tagged"); // NON-NLS private final String columnName; @@ -183,12 +185,14 @@ public class EventDB { private PreparedStatement getDataSourceIDsStmt; private PreparedStatement insertRowStmt; private PreparedStatement recordDBInfoStmt; + private PreparedStatement insertHashSetStmt; + private PreparedStatement insertHashHitStmt; + private PreparedStatement selectHashSetStmt; + private PreparedStatement countAllEventsStmt; private final Set preparedStatements = new HashSet<>(); - private final ReentrantReadWriteLock rwLock = new ReentrantReadWriteLock(true); //use fairness policy - - private final Lock DBLock = rwLock.writeLock(); //using exclusive lock for all db ops for now + private final Lock DBLock = new ReentrantReadWriteLock(true).writeLock(); //using exclusive lock for all db ops for now private EventDB(Case autoCase) throws SQLException, Exception { //should this go into module output (or even cache, we should be able to rebuild it)? @@ -205,30 +209,6 @@ public class EventDB { } } - public Interval getSpanningInterval(Collection eventIDs) { - - Interval span = null; - DBLock.lock(); - try (Statement stmt = con.createStatement(); - //You can't inject multiple values into one ? paramater in prepared statement, - //so we make new statement each time... - ResultSet rs = stmt.executeQuery("select Min(time), Max(time) from events where event_id in (" + StringUtils.join(eventIDs, ", ") + ")");) { // NON-NLS - while (rs.next()) { - span = new Interval(rs.getLong("Min(time)"), rs.getLong("Max(time)") + 1, DateTimeZone.UTC); // NON-NLS - - } - } catch (SQLException ex) { - LOGGER.log(Level.SEVERE, "Error executing get spanning interval query.", ex); // NON-NLS - } finally { - DBLock.unlock(); - } - return span; - } - - EventTransaction beginTransaction() { - return new EventTransaction(); - } - void closeDBCon() { if (con != null) { try { @@ -241,6 +221,27 @@ public class EventDB { con = null; } + public Interval getSpanningInterval(Collection eventIDs) { + DBLock.lock(); + try (Statement stmt = con.createStatement(); + //You can't inject multiple values into one ? paramater in prepared statement, + //so we make new statement each time... + ResultSet rs = stmt.executeQuery("select Min(time), Max(time) from events where event_id in (" + StringUtils.join(eventIDs, ", ") + ")");) { // NON-NLS + while (rs.next()) { + return new Interval(rs.getLong("Min(time)"), rs.getLong("Max(time)") + 1, DateTimeZone.UTC); // NON-NLS + } + } catch (SQLException ex) { + LOGGER.log(Level.SEVERE, "Error executing get spanning interval query.", ex); // NON-NLS + } finally { + DBLock.unlock(); + } + return null; + } + + EventTransaction beginTransaction() { + return new EventTransaction(); + } + void commitTransaction(EventTransaction tr, Boolean notify) { if (tr.isClosed()) { throw new IllegalArgumentException("can't close already closed transaction"); // NON-NLS @@ -248,24 +249,34 @@ public class EventDB { tr.commit(notify); } + /** + * @return the total number of events in the database or, + * -1 if there is an error. + */ int countAllEvents() { - int result = -1; DBLock.lock(); - //TODO convert this to prepared statement -jm - try (ResultSet rs = con.createStatement().executeQuery("select count(*) as count from events")) { // NON-NLS + try (ResultSet rs = countAllEventsStmt.executeQuery()) { // NON-NLS while (rs.next()) { - result = rs.getInt("count"); // NON-NLS - break; + return rs.getInt("count"); // NON-NLS } } catch (SQLException ex) { - Exceptions.printStackTrace(ex); + LOGGER.log(Level.SEVERE, "Error counting all events", ex); } finally { DBLock.unlock(); } - return result; + return -1; } - Map countEvents(ZoomParams params) { + /** + * get the count of all events that fit the given zoom params organized by + * the EvenType of the level spcified in the ZoomParams + * + * @param params the params that control what events to count and how to + * organize the returned map + * + * @return a map from event type( of the requested level) to event counts + */ + Map countEventsByType(ZoomParams params) { if (params.getTimeRange() != null) { return countEvents(params.getTimeRange().getStartMillis() / 1000, params.getTimeRange().getEndMillis() / 1000, @@ -275,22 +286,25 @@ public class EventDB { } } - void dropEventsTable() { - //TODO: use prepared statement - jm + /** + * drop the tables from this database and recreate them in order to start + * over. + */ + void reInitializeDB() { DBLock.lock(); - try (Statement createStatement = con.createStatement()) { - createStatement.execute("drop table if exists events"); // NON-NLS + try { + dropEventsTableStmt.executeUpdate(); + dropHashSetHitsTableStmt.executeUpdate(); + dropHashSetsTableStmt.executeUpdate(); + dropDBInfoTableStmt.executeUpdate(); + initializeDB();; } catch (SQLException ex) { - LOGGER.log(Level.SEVERE, "could not drop old events table", ex); // NON-NLS + LOGGER.log(Level.SEVERE, "could not drop old tables table", ex); // NON-NLS } finally { DBLock.unlock(); } } - List getAggregatedEvents(ZoomParams params) { - return getAggregatedEvents(params.getTimeRange(), params.getFilter(), params.getTypeZoomLevel(), params.getDescrLOD()); - } - Interval getBoundingEventsInterval(Interval timeRange, RootFilter filter) { long start = timeRange.getStartMillis() / 1000; long end = timeRange.getEndMillis() / 1000; @@ -378,7 +392,7 @@ public class EventDB { boolean hasNewColumns() { /* this relies on the fact that no tskObj has ID 0 but 0 is the default * value for the datasource_id column in the events table. */ - return hasHashHitColumn() && hasDataSourceIDColumn() + return hasHashHitColumn() && hasDataSourceIDColumn() && hasTaggedColumn() && (getDataSourceIDs().isEmpty() == false); } @@ -485,7 +499,7 @@ public class EventDB { + "PRIMARY KEY (key))"; // NON-NLS stmt.execute(sql); } catch (SQLException ex) { - LOGGER.log(Level.SEVERE, "problem creating db_info table", ex); // NON-NLS + LOGGER.log(Level.SEVERE, "problem creating db_info table", ex); // NON-NLS } try (Statement stmt = con.createStatement()) { @@ -516,6 +530,15 @@ public class EventDB { LOGGER.log(Level.SEVERE, "problem upgrading events table", ex); // NON-NLS } } + if (hasTaggedColumn() == false) { + try (Statement stmt = con.createStatement()) { + String sql = "ALTER TABLE events ADD COLUMN tagged INTEGER"; // NON-NLS + stmt.execute(sql); + } catch (SQLException ex) { + + LOGGER.log(Level.SEVERE, "problem upgrading events table", ex); // NON-NLS + } + } if (hasHashHitColumn() == false) { try (Statement stmt = con.createStatement()) { @@ -553,8 +576,8 @@ public class EventDB { try { insertRowStmt = prepareStatement( - "INSERT INTO events (datasource_id,file_id ,artifact_id, time, sub_type, base_type, full_description, med_description, short_description, known_state, hash_hit) " // NON-NLS - + "VALUES (?,?,?,?,?,?,?,?,?,?,?)"); // NON-NLS + "INSERT INTO events (datasource_id,file_id ,artifact_id, time, sub_type, base_type, full_description, med_description, short_description, known_state, hash_hit, tagged) " // NON-NLS + + "VALUES (?,?,?,?,?,?,?,?,?,?,?,?)"); // NON-NLS getDataSourceIDsStmt = prepareStatement("select distinct datasource_id from events"); // NON-NLS getMaxTimeStmt = prepareStatement("select Max(time) as max from events"); // NON-NLS @@ -565,6 +588,11 @@ public class EventDB { insertHashSetStmt = prepareStatement("insert or ignore into hash_sets (hash_set_name) values (?)"); selectHashSetStmt = prepareStatement("select hash_set_id from hash_sets where hash_set_name = ?"); insertHashHitStmt = prepareStatement("insert or ignore into hash_set_hits (hash_set_id, event_id) values (?,?)"); + countAllEventsStmt = prepareStatement("select count(*) as count from events"); + dropEventsTableStmt = prepareStatement("drop table if exists events"); + dropHashSetHitsTableStmt = prepareStatement("drop table if exists hash_set_hits"); + dropHashSetsTableStmt = prepareStatement("drop table if exists hash_sets"); + dropDBInfoTableStmt = prepareStatement("drop table if exists db_ino"); } catch (SQLException sQLException) { LOGGER.log(Level.SEVERE, "failed to prepareStatment", sQLException); // NON-NLS } @@ -624,17 +652,21 @@ public class EventDB { return hasDBColumn(EventTableColumn.DATA_SOURCE_ID); } + private boolean hasTaggedColumn() { + return hasDBColumn(EventTableColumn.TAGGED); + } + private boolean hasHashHitColumn() { return hasDBColumn(EventTableColumn.HASH_HIT); } void insertEvent(long time, EventType type, long datasourceID, Long objID, Long artifactID, String fullDescription, String medDescription, - String shortDescription, TskData.FileKnown known, Set hashSets) { + String shortDescription, TskData.FileKnown known, Set hashSets, boolean tagged) { - EventTransaction trans = beginTransaction(); - insertEvent(time, type, datasourceID, objID, artifactID, fullDescription, medDescription, shortDescription, known, hashSets, trans); - commitTransaction(trans, true); + EventTransaction transaction = beginTransaction(); + insertEvent(time, type, datasourceID, objID, artifactID, fullDescription, medDescription, shortDescription, known, hashSets, tagged, transaction); + commitTransaction(transaction, true); } /** @@ -646,6 +678,7 @@ public class EventDB { void insertEvent(long time, EventType type, long datasourceID, Long objID, Long artifactID, String fullDescription, String medDescription, String shortDescription, TskData.FileKnown known, Set hashSetNames, + boolean tagged, EventTransaction transaction) { if (transaction.isClosed()) { @@ -660,7 +693,7 @@ public class EventDB { DBLock.lock(); try { - //"INSERT INTO events (datasource_id,file_id ,artifact_id, time, sub_type, base_type, full_description, med_description, short_description, known_state, hashHit) " + //"INSERT INTO events (datasource_id,file_id ,artifact_id, time, sub_type, base_type, full_description, med_description, short_description, known_state, hashHit, tagged) " insertRowStmt.clearParameters(); insertRowStmt.setLong(1, datasourceID); if (objID != null) { @@ -689,6 +722,7 @@ public class EventDB { insertRowStmt.setByte(10, known == null ? TskData.FileKnown.UNKNOWN.getFileKnownValue() : known.getFileKnownValue()); insertRowStmt.setInt(11, hashSetNames.isEmpty() ? 0 : 1); + insertRowStmt.setInt(12, tagged ? 1 : 0); insertRowStmt.executeUpdate(); @@ -866,6 +900,10 @@ public class EventDB { return typeMap; } + List getAggregatedEvents(ZoomParams params) { + return getAggregatedEvents(params.getTimeRange(), params.getFilter(), params.getTypeZoomLevel(), params.getDescrLOD()); + } + /** * //TODO: update javadoc //TODO: split this into helper methods * @@ -938,6 +976,14 @@ public class EventDB { hashHits.add(executeQuery.getLong(EventTableColumn.EVENT_ID.toString())); } } + HashSet tagged = new HashSet<>(); + try (Statement st3 = con.createStatement();) { + + ResultSet executeQuery = st3.executeQuery("select event_id from events where event_id in (" + eventIDS + ") and tagged = 1"); + while (executeQuery.next()) { + tagged.add(executeQuery.getLong(EventTableColumn.EVENT_ID.toString())); + } + } EventType type = useSubTypes ? RootEventType.allTypes.get(rs.getInt(EventTableColumn.SUB_TYPE.toString())) : BaseTypes.values()[rs.getInt(EventTableColumn.BASE_TYPE.toString())]; @@ -946,6 +992,7 @@ public class EventDB { type, Stream.of(eventIDS.split(",")).map(Long::valueOf).collect(Collectors.toSet()), // NON-NLS hashHits, + tagged, rs.getString(descriptionColumn), lod); //put events in map from type/descrition -> event diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventsRepository.java b/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventsRepository.java index c1b40dc918..a09851269d 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventsRepository.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventsRepository.java @@ -44,12 +44,18 @@ import org.apache.commons.lang3.StringUtils; import org.joda.time.Interval; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.casemodule.services.TagsManager; import org.sleuthkit.autopsy.coreutils.HashHitUtils; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.timeline.ProgressWindow; import org.sleuthkit.autopsy.timeline.events.AggregateEvent; import org.sleuthkit.autopsy.timeline.events.FilteredEventsModel; import org.sleuthkit.autopsy.timeline.events.TimeLineEvent; +import static org.sleuthkit.autopsy.timeline.events.db.Bundle.msgdlg_problem_text; +import static org.sleuthkit.autopsy.timeline.events.db.Bundle.progressWindow_msg_commitingDb; +import static org.sleuthkit.autopsy.timeline.events.db.Bundle.progressWindow_msg_populateMacEventsFiles; +import static org.sleuthkit.autopsy.timeline.events.db.Bundle.progressWindow_msg_reinit_db; +import static org.sleuthkit.autopsy.timeline.events.db.Bundle.progressWindow_populatingXevents; import org.sleuthkit.autopsy.timeline.events.type.ArtifactEventType; import org.sleuthkit.autopsy.timeline.events.type.EventType; import org.sleuthkit.autopsy.timeline.events.type.FileSystemTypes; @@ -131,14 +137,13 @@ public class EventsRepository { }).build(CacheLoader.from(eventDB::getEventById)); eventCountsCache = CacheBuilder.newBuilder().maximumSize(1000L).expireAfterAccess(10, TimeUnit.MINUTES).removalListener((RemovalNotification> rn) -> { //LOGGER.log(Level.INFO, "evicting counts: {0}", rn.toString()); - }).build(CacheLoader.from(eventDB::countEvents)); + }).build(CacheLoader.from(eventDB::countEventsByType)); aggregateEventsCache = CacheBuilder.newBuilder().maximumSize(1000L).expireAfterAccess(10, TimeUnit.MINUTES).removalListener((RemovalNotification> rn) -> { //LOGGER.log(Level.INFO, "evicting aggregated events: {0}", rn.toString()); }).build(CacheLoader.from(eventDB::getAggregatedEvents)); maxCache = CacheBuilder.newBuilder().build(CacheLoader.from(eventDB::getMaxTime)); minCache = CacheBuilder.newBuilder().build(CacheLoader.from(eventDB::getMinTime)); this.modelInstance = new FilteredEventsModel(this, currentStateProperty); - } /** @return min time (in seconds from unix epoch) */ @@ -231,30 +236,35 @@ public class EventsRepository { //TODO: can we avoid this with a state listener? does it amount to the same thing? //post population operation to execute - private final Runnable r; + private final Runnable postPopulationOperation; + private final SleuthkitCase skCase; + private final TagsManager tagsManager; - public DBPopulationWorker(Runnable r) { + public DBPopulationWorker(Runnable postPopulationOperation) { progressDialog = new ProgressWindow(null, true, this); progressDialog.setVisible(true); - this.r = r; + + skCase = autoCase.getSleuthkitCase(); + tagsManager = autoCase.getServices().getTagsManager(); + + this.postPopulationOperation = postPopulationOperation; } @Override + @NbBundle.Messages({"progressWindow.msg.populateMacEventsFiles=populating mac events for files:", + "progressWindow.msg.reinit_db=(re)initializing events database", + "progressWindow.msg.commitingDb=committing events db"}) protected Void doInBackground() throws Exception { - process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, -1, NbBundle.getMessage(this.getClass(), - "EventsRepository.progressWindow.msg.reinit_db"), ""))); + process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, -1, progressWindow_msg_reinit_db(), ""))); //reset database //TODO: can we do more incremental updates? -jm - eventDB.dropEventsTable(); - eventDB.initializeDB(); + eventDB.reInitializeDB(); //grab ids of all files - SleuthkitCase skCase = autoCase.getSleuthkitCase(); List files = skCase.findAllFileIdsWhere("name != '.' AND name != '..'"); final int numFiles = files.size(); - process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, numFiles, NbBundle.getMessage(this.getClass(), - "EventsRepository.progressWindow.msg.populateMacEventsFiles"), ""))); + process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, numFiles, progressWindow_msg_populateMacEventsFiles(), ""))); //insert file events into db int i = 1; @@ -266,7 +276,9 @@ public class EventsRepository { try { AbstractFile f = skCase.getAbstractFileById(fID); - if (f != null) { + if (f == null) { + LOGGER.log(Level.WARNING, "Failed to get data for file : {0}", fID); // NON-NLS + } else { //TODO: This is broken for logical files? fix -jm //TODO: logical files don't necessarily have valid timestamps, so ... -jm final String uniquePath = f.getUniquePath(); @@ -279,26 +291,24 @@ public class EventsRepository { final TskData.FileKnown known = f.getKnown(); boolean hashHit = f.getArtifactsCount(BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT) > 0; Set hashSets = hashHit ? HashHitUtils.getHashSetNamesForFile(skCase, f.getId()) : Collections.emptySet(); + boolean tagged = !tagsManager.getContentTagsByContent(f).isEmpty(); //insert it into the db if time is > 0 => time is legitimate (drops logical files) if (f.getAtime() > 0) { - eventDB.insertEvent(f.getAtime(), FileSystemTypes.FILE_ACCESSED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, trans); + eventDB.insertEvent(f.getAtime(), FileSystemTypes.FILE_ACCESSED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, tagged, trans); } if (f.getMtime() > 0) { - eventDB.insertEvent(f.getMtime(), FileSystemTypes.FILE_MODIFIED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, trans); + eventDB.insertEvent(f.getMtime(), FileSystemTypes.FILE_MODIFIED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, tagged, trans); } if (f.getCtime() > 0) { - eventDB.insertEvent(f.getCtime(), FileSystemTypes.FILE_CHANGED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, trans); + eventDB.insertEvent(f.getCtime(), FileSystemTypes.FILE_CHANGED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, tagged, trans); } if (f.getCrtime() > 0) { - eventDB.insertEvent(f.getCrtime(), FileSystemTypes.FILE_CREATED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, trans); + eventDB.insertEvent(f.getCrtime(), FileSystemTypes.FILE_CREATED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, tagged, trans); } process(Arrays.asList(new ProgressWindow.ProgressUpdate(i, numFiles, - NbBundle.getMessage(this.getClass(), - "EventsRepository.progressWindow.msg.populateMacEventsFiles2"), f.getName()))); - } else { - LOGGER.log(Level.WARNING, "failed to look up data for file : {0}", fID); // NON-NLS + progressWindow_msg_populateMacEventsFiles(), f.getName()))); } } catch (TskCoreException tskCoreException) { LOGGER.log(Level.WARNING, "failed to insert mac event for file : " + fID, tskCoreException); // NON-NLS @@ -315,12 +325,11 @@ public class EventsRepository { } //skip file_system events, they are already handled above. if (type instanceof ArtifactEventType) { - populateEventType((ArtifactEventType) type, trans, skCase); + populateEventType((ArtifactEventType) type, trans); } } - process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, -1, NbBundle.getMessage(this.getClass(), - "EventsRepository.progressWindow.msg.commitingDb"), ""))); + process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, -1, progressWindow_msg_commitingDb(), ""))); if (isCancelled()) { eventDB.rollBackTransaction(trans); } else { @@ -346,6 +355,8 @@ public class EventsRepository { } @Override + @NbBundle.Messages("msgdlg.problem.text=There was a problem populating the timeline." + + " Not all events may be present or accurate. See the log for details.") protected void done() { super.done(); try { @@ -356,14 +367,12 @@ public class EventsRepository { LOGGER.log(Level.INFO, "Database population was cancelled by the user. Not all events may be present or accurate. See the log for details.", ex); // NON-NLS } catch (InterruptedException | ExecutionException ex) { LOGGER.log(Level.WARNING, "Exception while populating database.", ex); // NON-NLS - JOptionPane.showMessageDialog(null, NbBundle.getMessage(this.getClass(), - "EventsRepository.msgdlg.problem.text")); + JOptionPane.showMessageDialog(null, msgdlg_problem_text()); } catch (Exception ex) { LOGGER.log(Level.WARNING, "Unexpected exception while populating database.", ex); // NON-NLS - JOptionPane.showMessageDialog(null, NbBundle.getMessage(this.getClass(), - "EventsRepository.msgdlg.problem.text")); + JOptionPane.showMessageDialog(null, msgdlg_problem_text()); } - r.run(); //execute post db population operation + postPopulationOperation.run(); //execute post db population operation } /** @@ -373,16 +382,15 @@ public class EventsRepository { * @param trans the db transaction to use * @param skCase a reference to the sleuthkit case */ - private void populateEventType(final ArtifactEventType type, EventDB.EventTransaction trans, SleuthkitCase skCase) { + @NbBundle.Messages({"# {0} - event type ", "progressWindow.populatingXevents=populating {0} events"}) + private void populateEventType(final ArtifactEventType type, EventDB.EventTransaction trans) { try { //get all the blackboard artifacts corresponding to the given event sub_type final ArrayList blackboardArtifacts = skCase.getBlackboardArtifacts(type.getArtifactType()); final int numArtifacts = blackboardArtifacts.size(); process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, numArtifacts, - NbBundle.getMessage(this.getClass(), - "EventsRepository.progressWindow.populatingXevents", - type.toString()), ""))); + progressWindow_populatingXevents(type.toString()), ""))); int i = 0; for (final BlackboardArtifact bbart : blackboardArtifacts) { @@ -395,7 +403,11 @@ public class EventsRepository { AbstractFile f = skCase.getAbstractFileById(bbart.getObjectID()); boolean hashHit = f.getArtifactsCount(BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT) > 0; Set hashSets = hashHit ? HashHitUtils.getHashSetNamesForFile(skCase, f.getId()) : Collections.emptySet(); - eventDB.insertEvent(eventDescription.getTime(), type, datasourceID, bbart.getObjectID(), bbart.getArtifactID(), eventDescription.getFullDescription(), eventDescription.getMedDescription(), eventDescription.getShortDescription(), null, hashSets, trans); + + boolean tagged = tagsManager.getContentTagsByContent(f).isEmpty() == false; + tagged |= tagsManager.getBlackboardArtifactTagsByArtifact(bbart).isEmpty() == false; + + eventDB.insertEvent(eventDescription.getTime(), type, datasourceID, bbart.getObjectID(), bbart.getArtifactID(), eventDescription.getFullDescription(), eventDescription.getMedDescription(), eventDescription.getShortDescription(), null, hashSets, tagged, trans); } i++; diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/AggregateEventNode.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/AggregateEventNode.java index 766fd7a5fc..d260608c3d 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/AggregateEventNode.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/AggregateEventNode.java @@ -22,7 +22,6 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Objects; import java.util.concurrent.ExecutionException; import java.util.logging.Level; import java.util.stream.Collectors; @@ -73,9 +72,13 @@ import org.sleuthkit.autopsy.timeline.filters.TextFilter; import org.sleuthkit.autopsy.timeline.filters.TypeFilter; import org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD; import org.sleuthkit.autopsy.timeline.zooming.ZoomParams; +import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; +import org.sleuthkit.datamodel.BlackboardArtifactTag; import org.sleuthkit.datamodel.BlackboardAttribute; +import org.sleuthkit.datamodel.ContentTag; import org.sleuthkit.datamodel.SleuthkitCase; +import org.sleuthkit.datamodel.TagName; import org.sleuthkit.datamodel.TskCoreException; /** Represents an {@link AggregateEvent} in a {@link EventDetailChart}. */ @@ -84,6 +87,7 @@ public class AggregateEventNode extends StackPane { private static final Image HASH_PIN = new Image(AggregateEventNode.class.getResourceAsStream("/org/sleuthkit/autopsy/images/hashset_hits.png")); private final static Image PLUS = new Image("/org/sleuthkit/autopsy/timeline/images/plus-button.png"); // NON-NLS private final static Image MINUS = new Image("/org/sleuthkit/autopsy/timeline/images/minus-button.png"); // NON-NLS + private final static Image TAG = new Image("/org/sleuthkit/autopsy/images/green-tag-icon-16.png"); // NON-NLS private static final CornerRadii CORNER_RADII = new CornerRadii(3); @@ -145,7 +149,7 @@ public class AggregateEventNode extends StackPane { private DescriptionVisibility descrVis; private final SleuthkitCase sleuthkitCase; private final FilteredEventsModel eventsModel; - private Map hashSetCounts = null; + private Tooltip tooltip; public AggregateEventNode(final AggregateEvent event, AggregateEventNode parentEventNode, EventDetailChart chart) { @@ -157,10 +161,14 @@ public class AggregateEventNode extends StackPane { eventsModel = chart.getController().getEventsModel(); final Region region = new Region(); HBox.setHgrow(region, Priority.ALWAYS); - ImageView imageView = new ImageView(HASH_PIN); - final HBox hBox = new HBox(descrLabel, countLabel, region, imageView, minusButton, plusButton); + ImageView hashIV = new ImageView(HASH_PIN); + ImageView tagIV = new ImageView(TAG); + final HBox hBox = new HBox(descrLabel, countLabel, region, hashIV, tagIV, minusButton, plusButton); if (event.getEventIDsWithHashHits().isEmpty()) { - hBox.getChildren().remove(imageView); + hBox.getChildren().remove(hashIV); + } + if (event.getEventIDsWithTags().isEmpty()) { + hBox.getChildren().remove(tagIV); } hBox.setPrefWidth(USE_COMPUTED_SIZE); hBox.setMinWidth(USE_PREF_SIZE); @@ -252,39 +260,70 @@ public class AggregateEventNode extends StackPane { } private void installTooltip() { - + //TODO: all this work should probably go on a background thread... if (tooltip == null) { - String collect = ""; + + HashMap hashSetCounts = new HashMap<>(); if (!event.getEventIDsWithHashHits().isEmpty()) { - if (Objects.isNull(hashSetCounts)) { - hashSetCounts = new HashMap<>(); - try { - for (TimeLineEvent tle : eventsModel.getEventsById(event.getEventIDsWithHashHits())) { - ArrayList blackboardArtifacts = sleuthkitCase.getBlackboardArtifacts(BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT, tle.getFileID()); - for (BlackboardArtifact artf : blackboardArtifacts) { - for (BlackboardAttribute attr : artf.getAttributes()) { - if (attr.getAttributeTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()) { - hashSetCounts.merge(attr.getValueString(), 1L, Long::sum); - }; + try { + for (TimeLineEvent tle : eventsModel.getEventsById(event.getEventIDsWithHashHits())) { + ArrayList blackboardArtifacts = sleuthkitCase.getBlackboardArtifacts(BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT, tle.getFileID()); + for (BlackboardArtifact artf : blackboardArtifacts) { + for (BlackboardAttribute attr : artf.getAttributes()) { + if (attr.getAttributeTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()) { + hashSetCounts.merge(attr.getValueString(), 1L, Long::sum); } } } - } catch (TskCoreException ex) { - Logger.getLogger(AggregateEventNode.class.getName()).log(Level.SEVERE, "Error getting hashset hit info for event.", ex); } + } catch (TskCoreException ex) { + Logger.getLogger(AggregateEventNode.class.getName()).log(Level.SEVERE, "Error getting hashset hit info for event.", ex); } - - collect = hashSetCounts.entrySet().stream() - .map((Map.Entry t) -> t.getKey() + " : " + t.getValue()) - .collect(Collectors.joining("\n")); - } + + Map tags = new HashMap<>(); + if (!event.getEventIDsWithTags().isEmpty()) { + try { + for (TimeLineEvent tle : eventsModel.getEventsById(event.getEventIDsWithTags())) { + + AbstractFile abstractFileById = sleuthkitCase.getAbstractFileById(tle.getFileID()); + List contentTagsByContent = sleuthkitCase.getContentTagsByContent(abstractFileById); + for (ContentTag tag : contentTagsByContent) { + tags.putIfAbsent(tag.getId(), tag.getName()); + } + + Long artifactID = tle.getArtifactID(); + if (artifactID != 0) { + BlackboardArtifact blackboardArtifact = sleuthkitCase.getBlackboardArtifact(artifactID); + List blackboardArtifactTagsByArtifact = sleuthkitCase.getBlackboardArtifactTagsByArtifact(blackboardArtifact); + for (BlackboardArtifactTag tag : blackboardArtifactTagsByArtifact) { + tags.putIfAbsent(tag.getId(), tag.getName()); + } + } + } + } catch (TskCoreException ex) { + Logger.getLogger(AggregateEventNode.class.getName()).log(Level.SEVERE, "Error getting tag info for event.", ex); + } + } + + Map tagCounts = tags.values().stream() + .collect(Collectors.toMap(TagName::getDisplayName, anything -> 1L, Long::sum)); + + String hashSetCountsString = hashSetCounts.entrySet().stream() + .map((Map.Entry t) -> t.getKey() + " : " + t.getValue()) + .collect(Collectors.joining("\n")); + String tagCountsString = tagCounts.entrySet().stream() + .map((Map.Entry t) -> t.getKey() + " : " + t.getValue()) + .collect(Collectors.joining("\n")); + tooltip = new Tooltip( NbBundle.getMessage(this.getClass(), "AggregateEventNode.installTooltip.text", getEvent().getEventIDs().size(), getEvent().getType(), getEvent().getDescription(), getEvent().getSpan().getStart().toString(TimeLineController.getZonedFormatter()), getEvent().getSpan().getEnd().toString(TimeLineController.getZonedFormatter())) - + (collect.isEmpty() ? "" : "\n\nHash Set Hits\n" + collect)); + + (hashSetCountsString.isEmpty() ? "" : "\n\nHash Set Hits\n" + hashSetCountsString) + + (tagCountsString.isEmpty() ? "" : "\n\nTags\n" + tagCountsString) + ); Tooltip.install(AggregateEventNode.this, tooltip); } } From 10f7a45eb1779e2c935596f9f950cdb574437687 Mon Sep 17 00:00:00 2001 From: jmillman Date: Fri, 31 Jul 2015 16:58:29 -0400 Subject: [PATCH 2/7] WIP keep tagged column in sync with autopsy. --- .../autopsy/timeline/TimeLineController.java | 20 +++++++ .../autopsy/timeline/events/db/EventDB.java | 16 +++++ .../timeline/events/db/EventsRepository.java | 60 +++++++++++++++---- .../ui/detailview/DetailViewPane.java | 18 +++--- 4 files changed, 95 insertions(+), 19 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java b/Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java index 5919531ab7..e7495c9c23 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java @@ -66,6 +66,10 @@ import static org.sleuthkit.autopsy.casemodule.Case.Events.DATA_SOURCE_ADDED; import org.sleuthkit.autopsy.coreutils.History; import org.sleuthkit.autopsy.coreutils.LoggedTask; import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.events.BlackBoardArtifactTagAddedEvent; +import org.sleuthkit.autopsy.events.BlackBoardArtifactTagDeletedEvent; +import org.sleuthkit.autopsy.events.ContentTagAddedEvent; +import org.sleuthkit.autopsy.events.ContentTagDeletedEvent; import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.autopsy.timeline.events.FilteredEventsModel; import org.sleuthkit.autopsy.timeline.events.db.EventsRepository; @@ -786,6 +790,22 @@ public class TimeLineController { @Override public void propertyChange(PropertyChangeEvent evt) { switch (Case.Events.valueOf(evt.getPropertyName())) { + case BLACKBOARD_ARTIFACT_TAG_ADDED: + BlackBoardArtifactTagAddedEvent bTagAddedEvent = (BlackBoardArtifactTagAddedEvent) evt; + eventsRepository.handleTagAdded(bTagAddedEvent.getTag().getArtifact()); + break; + case BLACKBOARD_ARTIFACT_TAG_DELETED: + BlackBoardArtifactTagDeletedEvent bTagDeletedEvent = (BlackBoardArtifactTagDeletedEvent) evt; + eventsRepository.handleTagDeleted(bTagDeletedEvent.getTag().getArtifact()); + break; + case CONTENT_TAG_ADDED: + ContentTagAddedEvent cTagAddedEvent = (ContentTagAddedEvent) evt; + eventsRepository.handleTagAdded(cTagAddedEvent.getTag().getContent()); + break; + case CONTENT_TAG_DELETED: + ContentTagDeletedEvent cTagDeletedEvent = (ContentTagDeletedEvent) evt; + eventsRepository.handleTagDeleted(cTagDeletedEvent.getTag().getContent()); + break; case DATA_SOURCE_ADDED: // Content content = (Content) evt.getNewValue(); //if we are doing incremental updates, drop this diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java b/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java index a379a8630b..3cc163ddca 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java @@ -760,6 +760,22 @@ public class EventDB { } } + boolean updateEvent(long objectID, Long artifactID, boolean tagged) { + + DBLock.lock(); + try { + //UPDATE events SET tagged = ? where file_id == ? AND artifact_id == ? + int executeUpdate = con.createStatement().executeUpdate("UPDATE events SET tagged =" + (tagged ? 1 : 0) + " WHERE file_id == " + objectID + + " AND artifact_id IS " + (Objects.isNull(artifactID) ? "NULL" : artifactID.toString())); + return executeUpdate > 0; + } catch (SQLException ex) { + LOGGER.log(Level.SEVERE, "failed to insert event", ex); // NON-NLS + } finally { + DBLock.unlock(); + } + return false; + } + void recordLastArtifactID(long lastArtfID) { recordDBInfo(DBInfoKey.LAST_ARTIFACT_ID, lastArtfID); } diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventsRepository.java b/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventsRepository.java index a09851269d..e385f2ca65 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventsRepository.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventsRepository.java @@ -51,11 +51,6 @@ import org.sleuthkit.autopsy.timeline.ProgressWindow; import org.sleuthkit.autopsy.timeline.events.AggregateEvent; import org.sleuthkit.autopsy.timeline.events.FilteredEventsModel; import org.sleuthkit.autopsy.timeline.events.TimeLineEvent; -import static org.sleuthkit.autopsy.timeline.events.db.Bundle.msgdlg_problem_text; -import static org.sleuthkit.autopsy.timeline.events.db.Bundle.progressWindow_msg_commitingDb; -import static org.sleuthkit.autopsy.timeline.events.db.Bundle.progressWindow_msg_populateMacEventsFiles; -import static org.sleuthkit.autopsy.timeline.events.db.Bundle.progressWindow_msg_reinit_db; -import static org.sleuthkit.autopsy.timeline.events.db.Bundle.progressWindow_populatingXevents; import org.sleuthkit.autopsy.timeline.events.type.ArtifactEventType; import org.sleuthkit.autopsy.timeline.events.type.EventType; import org.sleuthkit.autopsy.timeline.events.type.FileSystemTypes; @@ -64,6 +59,7 @@ import org.sleuthkit.autopsy.timeline.filters.RootFilter; import org.sleuthkit.autopsy.timeline.zooming.ZoomParams; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; +import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; @@ -106,6 +102,7 @@ public class EventsRepository { private final ObservableMap datasourcesMap = FXCollections.observableHashMap(); private final ObservableMap hashSetMap = FXCollections.observableHashMap(); private final Case autoCase; + synchronized public ObservableMap getDatasourcesMap() { return datasourcesMap; @@ -230,6 +227,45 @@ public class EventsRepository { return eventDB.hasNewColumns(); } + public void handleTagAdded(BlackboardArtifact artifact) { + boolean updateEvent = eventDB.updateEvent(artifact.getObjectID(), artifact.getArtifactID(), true); + if (updateEvent) { + aggregateEventsCache.invalidateAll(); + } + } + + public void handleTagDeleted(BlackboardArtifact artifact) { + try { + boolean tagged = autoCase.getServices().getTagsManager().getBlackboardArtifactTagsByArtifact(artifact).isEmpty() == false; + boolean updateEvent = eventDB.updateEvent(artifact.getObjectID(), artifact.getArtifactID(), tagged); + if (updateEvent) { + aggregateEventsCache.invalidateAll(); + } + } catch (TskCoreException ex) { + LOGGER.log(Level.SEVERE, "unable to determine tagged status of attribute.", ex); + } + } + + public void handleTagAdded(Content content) { + boolean updateEvent = eventDB.updateEvent(content.getId(), null, true); + if (updateEvent) { + aggregateEventsCache.invalidateAll(); + } + + } + + public void handleTagDeleted(Content content) { + try { + boolean tagged = autoCase.getServices().getTagsManager().getContentTagsByContent(content).isEmpty() == false; + boolean updateEvent = eventDB.updateEvent(content.getId(), null, tagged); + if (updateEvent) { + aggregateEventsCache.invalidateAll(); + } + } catch (TskCoreException ex) { + LOGGER.log(Level.SEVERE, "unable to determine tagged status of content.", ex); + } + } + private class DBPopulationWorker extends SwingWorker { private final ProgressWindow progressDialog; @@ -255,7 +291,7 @@ public class EventsRepository { "progressWindow.msg.reinit_db=(re)initializing events database", "progressWindow.msg.commitingDb=committing events db"}) protected Void doInBackground() throws Exception { - process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, -1, progressWindow_msg_reinit_db(), ""))); + process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, -1, Bundle.progressWindow_msg_reinit_db(), ""))); //reset database //TODO: can we do more incremental updates? -jm eventDB.reInitializeDB(); @@ -264,7 +300,7 @@ public class EventsRepository { List files = skCase.findAllFileIdsWhere("name != '.' AND name != '..'"); final int numFiles = files.size(); - process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, numFiles, progressWindow_msg_populateMacEventsFiles(), ""))); + process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, numFiles, Bundle.progressWindow_msg_populateMacEventsFiles(), ""))); //insert file events into db int i = 1; @@ -308,7 +344,7 @@ public class EventsRepository { } process(Arrays.asList(new ProgressWindow.ProgressUpdate(i, numFiles, - progressWindow_msg_populateMacEventsFiles(), f.getName()))); + Bundle.progressWindow_msg_populateMacEventsFiles(), f.getName()))); } } catch (TskCoreException tskCoreException) { LOGGER.log(Level.WARNING, "failed to insert mac event for file : " + fID, tskCoreException); // NON-NLS @@ -329,7 +365,7 @@ public class EventsRepository { } } - process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, -1, progressWindow_msg_commitingDb(), ""))); + process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, -1, Bundle.progressWindow_msg_commitingDb(), ""))); if (isCancelled()) { eventDB.rollBackTransaction(trans); } else { @@ -367,10 +403,10 @@ public class EventsRepository { LOGGER.log(Level.INFO, "Database population was cancelled by the user. Not all events may be present or accurate. See the log for details.", ex); // NON-NLS } catch (InterruptedException | ExecutionException ex) { LOGGER.log(Level.WARNING, "Exception while populating database.", ex); // NON-NLS - JOptionPane.showMessageDialog(null, msgdlg_problem_text()); + JOptionPane.showMessageDialog(null, Bundle.msgdlg_problem_text()); } catch (Exception ex) { LOGGER.log(Level.WARNING, "Unexpected exception while populating database.", ex); // NON-NLS - JOptionPane.showMessageDialog(null, msgdlg_problem_text()); + JOptionPane.showMessageDialog(null, Bundle.msgdlg_problem_text()); } postPopulationOperation.run(); //execute post db population operation } @@ -390,7 +426,7 @@ public class EventsRepository { final int numArtifacts = blackboardArtifacts.size(); process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, numArtifacts, - progressWindow_populatingXevents(type.toString()), ""))); + Bundle.progressWindow_populatingXevents(type.toString()), ""))); int i = 0; for (final BlackboardArtifact bbart : blackboardArtifacts) { diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/DetailViewPane.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/DetailViewPane.java index 644059ef90..2608598276 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/DetailViewPane.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/DetailViewPane.java @@ -37,7 +37,17 @@ import javafx.scene.Cursor; import javafx.scene.chart.Axis; import javafx.scene.chart.BarChart; import javafx.scene.chart.XYChart; -import javafx.scene.control.*; +import javafx.scene.control.CheckBox; +import javafx.scene.control.CustomMenuItem; +import javafx.scene.control.Label; +import javafx.scene.control.MenuButton; +import javafx.scene.control.MultipleSelectionModel; +import javafx.scene.control.RadioButton; +import javafx.scene.control.ScrollBar; +import javafx.scene.control.SeparatorMenuItem; +import javafx.scene.control.Slider; +import javafx.scene.control.ToggleGroup; +import javafx.scene.control.TreeItem; import javafx.scene.effect.Effect; import static javafx.scene.input.KeyCode.DOWN; import static javafx.scene.input.KeyCode.KP_DOWN; @@ -95,12 +105,6 @@ public class DetailViewPane extends AbstractVisualization> treeSelectionModel; - @FXML - protected ResourceBundle resources; - - @FXML - protected URL location; - //these three could be injected from fxml but it was causing npe's private final DateAxis dateAxis = new DateAxis(); From 4790f84a0e7fd26214d580bc8f7c19f76301a885 Mon Sep 17 00:00:00 2001 From: jmillman Date: Mon, 3 Aug 2015 13:29:36 -0400 Subject: [PATCH 3/7] have tags visualization update in response to tag creation/deletion make aggregateEvent mutable.... ( i am not sure I like this) remove overzealos usage of enums for db table columns wire new tag related events from timeline controller through filtered events eventbus to aggregateeventnode created new timeline internal tag events --- .../autopsy/timeline/TimeLineController.java | 18 +- .../timeline/events/AggregateEvent.java | 16 +- .../timeline/events/EventsTaggedEvent.java | 25 +++ .../timeline/events/EventsUnTaggedEvent.java | 25 +++ .../timeline/events/FilteredEventsModel.java | 67 ++++++- .../timeline/events/TimeLineEvent.java | 8 +- .../autopsy/timeline/events/db/EventDB.java | 181 ++++++++---------- .../timeline/events/db/EventsRepository.java | 59 ++---- .../autopsy/timeline/events/db/SQLHelper.java | 29 ++- .../ui/detailview/AggregateEventNode.java | 38 +++- 10 files changed, 280 insertions(+), 186 deletions(-) create mode 100644 Core/src/org/sleuthkit/autopsy/timeline/events/EventsTaggedEvent.java create mode 100644 Core/src/org/sleuthkit/autopsy/timeline/events/EventsUnTaggedEvent.java diff --git a/Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java b/Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java index e7495c9c23..33fe52c85f 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java @@ -165,10 +165,8 @@ public class TimeLineController { @GuardedBy("this") private boolean listeningToAutopsy = false; - private final PropertyChangeListener caseListener; - + private final PropertyChangeListener caseListener = new AutopsyCaseListener(); private final PropertyChangeListener ingestJobListener = new AutopsyIngestJobListener(); - private final PropertyChangeListener ingestModuleListener = new AutopsyIngestModuleListener(); @GuardedBy("this") @@ -242,8 +240,6 @@ public class TimeLineController { DescriptionLOD.SHORT); historyManager.advance(InitialZoomState); - //persistent listener instances - caseListener = new AutopsyCaseListener(); } /** @return a shared events model */ @@ -791,20 +787,16 @@ public class TimeLineController { public void propertyChange(PropertyChangeEvent evt) { switch (Case.Events.valueOf(evt.getPropertyName())) { case BLACKBOARD_ARTIFACT_TAG_ADDED: - BlackBoardArtifactTagAddedEvent bTagAddedEvent = (BlackBoardArtifactTagAddedEvent) evt; - eventsRepository.handleTagAdded(bTagAddedEvent.getTag().getArtifact()); + filteredEvents.handleTagAdded((BlackBoardArtifactTagAddedEvent) evt); break; case BLACKBOARD_ARTIFACT_TAG_DELETED: - BlackBoardArtifactTagDeletedEvent bTagDeletedEvent = (BlackBoardArtifactTagDeletedEvent) evt; - eventsRepository.handleTagDeleted(bTagDeletedEvent.getTag().getArtifact()); + filteredEvents.handleTagDeleted((BlackBoardArtifactTagDeletedEvent) evt); break; case CONTENT_TAG_ADDED: - ContentTagAddedEvent cTagAddedEvent = (ContentTagAddedEvent) evt; - eventsRepository.handleTagAdded(cTagAddedEvent.getTag().getContent()); + filteredEvents.handleTagAdded((ContentTagAddedEvent) evt); break; case CONTENT_TAG_DELETED: - ContentTagDeletedEvent cTagDeletedEvent = (ContentTagDeletedEvent) evt; - eventsRepository.handleTagDeleted(cTagDeletedEvent.getTag().getContent()); + filteredEvents.handleTagDeleted((ContentTagDeletedEvent) evt); break; case DATA_SOURCE_ADDED: // Content content = (Content) evt.getNewValue(); diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/AggregateEvent.java b/Core/src/org/sleuthkit/autopsy/timeline/events/AggregateEvent.java index 2e4c5ac45d..6550a230e3 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/events/AggregateEvent.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/events/AggregateEvent.java @@ -21,17 +21,17 @@ package org.sleuthkit.autopsy.timeline.events; import com.google.common.collect.Sets; import java.util.Collections; import java.util.Set; -import javax.annotation.concurrent.Immutable; +import java.util.stream.Collectors; import org.joda.time.Interval; import org.sleuthkit.autopsy.timeline.events.type.EventType; import org.sleuthkit.autopsy.timeline.utils.IntervalUtils; import org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD; -/** Represents a set of other (TimeLineEvent) events aggregated together. All +/** + * Represents a set of other (TimeLineEvent) events aggregated together. All * the sub events should have the same type and matching descriptions at the * designated 'zoom level'. */ -@Immutable public class AggregateEvent { /** the smallest time interval containing all the aggregated events */ @@ -125,4 +125,14 @@ public class AggregateEvent { return new AggregateEvent(IntervalUtils.span(aggEvent1.span, ag2.span), aggEvent1.getType(), idsUnion, hashHitsUnion, taggedUnion, aggEvent1.getDescription(), aggEvent1.lod); } + public boolean removeTags(Set eventIDs) { + return tagged.removeAll(eventIDs); + } + + public boolean addTags(Set collect) { + return tagged.addAll(collect.stream() + .filter(eventIDs::contains) + .collect(Collectors.toSet())); + } + } diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/EventsTaggedEvent.java b/Core/src/org/sleuthkit/autopsy/timeline/events/EventsTaggedEvent.java new file mode 100644 index 0000000000..b6f7205443 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/timeline/events/EventsTaggedEvent.java @@ -0,0 +1,25 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package org.sleuthkit.autopsy.timeline.events; + +import java.util.Set; + +/** + * + */ +public class EventsTaggedEvent { + + private final Set eventIDs; + + public EventsTaggedEvent(Set eventIDs) { + this.eventIDs = eventIDs; + } + + public Set getEventIDs() { + return eventIDs; + } + +} diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/EventsUnTaggedEvent.java b/Core/src/org/sleuthkit/autopsy/timeline/events/EventsUnTaggedEvent.java new file mode 100644 index 0000000000..230de58fb6 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/timeline/events/EventsUnTaggedEvent.java @@ -0,0 +1,25 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package org.sleuthkit.autopsy.timeline.events; + +import java.util.Set; + +/** + * + */ +public class EventsUnTaggedEvent { + + private final Set eventIDs; + + public Set getEventIDs() { + return eventIDs; + } + + public EventsUnTaggedEvent(Set eventIDs) { + this.eventIDs = eventIDs; + } + +} diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/FilteredEventsModel.java b/Core/src/org/sleuthkit/autopsy/timeline/events/FilteredEventsModel.java index 1aefce8a8f..8dbeb6d301 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/events/FilteredEventsModel.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/events/FilteredEventsModel.java @@ -18,10 +18,13 @@ */ package org.sleuthkit.autopsy.timeline.events; +import com.google.common.eventbus.EventBus; +import static com.sun.xml.internal.ws.spi.db.BindingContextFactory.LOGGER; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.logging.Level; import javafx.beans.Observable; import javafx.beans.property.ReadOnlyObjectProperty; import javafx.beans.property.ReadOnlyObjectWrapper; @@ -29,6 +32,11 @@ import javafx.collections.MapChangeListener; import javax.annotation.concurrent.GuardedBy; import org.joda.time.DateTimeZone; import org.joda.time.Interval; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.events.BlackBoardArtifactTagAddedEvent; +import org.sleuthkit.autopsy.events.BlackBoardArtifactTagDeletedEvent; +import org.sleuthkit.autopsy.events.ContentTagAddedEvent; +import org.sleuthkit.autopsy.events.ContentTagDeletedEvent; import org.sleuthkit.autopsy.timeline.TimeLineView; import org.sleuthkit.autopsy.timeline.events.db.EventsRepository; import org.sleuthkit.autopsy.timeline.events.type.EventType; @@ -45,6 +53,9 @@ import org.sleuthkit.autopsy.timeline.filters.TypeFilter; import org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD; import org.sleuthkit.autopsy.timeline.zooming.EventTypeZoomLevel; import org.sleuthkit.autopsy.timeline.zooming.ZoomParams; +import org.sleuthkit.datamodel.BlackboardArtifact; +import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.TskCoreException; /** * This class acts as the model for a {@link TimeLineView} @@ -91,6 +102,8 @@ public final class FilteredEventsModel { @GuardedBy("this") private final ReadOnlyObjectWrapper requestedZoomParamters = new ReadOnlyObjectWrapper<>(); + private final EventBus eventbus = new EventBus("Event_Repository_EventBus"); + /** * The underlying repo for events. Atomic access to repo is synchronized * internally, but compound access should be done with the intrinsic lock of @@ -98,10 +111,12 @@ public final class FilteredEventsModel { */ @GuardedBy("this") private final EventsRepository repo; + private final Case autoCase; /** @return the default filter used at startup */ public RootFilter getDefaultFilter() { DataSourcesFilter dataSourcesFilter = new DataSourcesFilter(); + repo.getDatasourcesMap().entrySet().stream().forEach((Map.Entry t) -> { DataSourceFilter dataSourceFilter = new DataSourceFilter(t.getValue(), t.getKey()); dataSourceFilter.setSelected(Boolean.TRUE); @@ -119,7 +134,7 @@ public final class FilteredEventsModel { public FilteredEventsModel(EventsRepository repo, ReadOnlyObjectProperty currentStateProperty) { this.repo = repo; - + this.autoCase = repo.getAutoCase(); repo.getDatasourcesMap().addListener((MapChangeListener.Change change) -> { DataSourceFilter dataSourceFilter = new DataSourceFilter(change.getValueAdded(), change.getKey()); RootFilter rootFilter = filter().get(); @@ -298,4 +313,54 @@ public final class FilteredEventsModel { return requestedLOD.get(); } + public void handleTagAdded(BlackBoardArtifactTagAddedEvent e) { + BlackboardArtifact artifact = e.getTag().getArtifact(); + Set updatedEventIDs = repo.markEventsTagged(artifact.getObjectID(), artifact.getArtifactID(), true); + if (!updatedEventIDs.isEmpty()) { + eventbus.post(new EventsTaggedEvent(updatedEventIDs)); + } + } + + public void handleTagDeleted(BlackBoardArtifactTagDeletedEvent e) { + BlackboardArtifact artifact = e.getTag().getArtifact(); + try { + boolean tagged = autoCase.getServices().getTagsManager().getBlackboardArtifactTagsByArtifact(artifact).isEmpty() == false; + Set updatedEventIDs = repo.markEventsTagged(artifact.getObjectID(), artifact.getArtifactID(), tagged); + if (!updatedEventIDs.isEmpty()) { + eventbus.post(new EventsUnTaggedEvent(updatedEventIDs)); + } + } catch (TskCoreException ex) { + LOGGER.log(Level.SEVERE, "unable to determine tagged status of attribute.", ex); + } + } + + public void handleTagAdded(ContentTagAddedEvent e) { + Content content = e.getTag().getContent(); + Set updatedEventIDs = repo.markEventsTagged(content.getId(), null, true); + if (!updatedEventIDs.isEmpty()) { + eventbus.post(new EventsTaggedEvent(updatedEventIDs)); + } + } + + public void handleTagDeleted(ContentTagDeletedEvent e) { + Content content = e.getTag().getContent(); + try { + boolean tagged = autoCase.getServices().getTagsManager().getContentTagsByContent(content).isEmpty() == false; + + Set updatedEventIDs = repo.markEventsTagged(content.getId(), null, tagged); + if (!updatedEventIDs.isEmpty()) { + eventbus.post(new EventsUnTaggedEvent(updatedEventIDs)); + } + } catch (TskCoreException ex) { + LOGGER.log(Level.SEVERE, "unable to determine tagged status of content.", ex); + } + } + + public void register(Object o) { + eventbus.register(o); + } + + public void unRegister(Object o) { + eventbus.unregister(0); + } } diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/TimeLineEvent.java b/Core/src/org/sleuthkit/autopsy/timeline/events/TimeLineEvent.java index 1d199ad840..dfe2045293 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/events/TimeLineEvent.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/events/TimeLineEvent.java @@ -42,8 +42,9 @@ public class TimeLineEvent { private final TskData.FileKnown known; private final boolean hashHit; + private final boolean tagged; - public TimeLineEvent(Long eventID, Long objID, @Nullable Long artifactID, Long time, EventType type, String fullDescription, String medDescription, String shortDescription, TskData.FileKnown known, boolean hashHit) { + public TimeLineEvent(Long eventID, Long objID, @Nullable Long artifactID, Long time, EventType type, String fullDescription, String medDescription, String shortDescription, TskData.FileKnown known, boolean hashHit, boolean tagged) { this.eventID = eventID; this.fileID = objID; this.artifactID = artifactID; @@ -55,6 +56,11 @@ public class TimeLineEvent { this.shortDescription = shortDescription; this.known = known; this.hashHit = hashHit; + this.tagged = tagged; + } + + public boolean isTagged() { + return tagged; } public boolean isHashHit() { diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java b/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java index 3cc163ddca..5ef130a83d 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java @@ -46,6 +46,7 @@ import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.logging.Level; import java.util.stream.Collectors; import java.util.stream.Stream; +import javax.annotation.Nonnull; import org.apache.commons.lang3.StringUtils; import org.joda.time.DateTimeZone; import org.joda.time.Interval; @@ -88,41 +89,6 @@ import org.sqlite.SQLiteJDBCLoader; */ public class EventDB { - private PreparedStatement dropEventsTableStmt; - private PreparedStatement dropHashSetHitsTableStmt; - private PreparedStatement dropHashSetsTableStmt; - private PreparedStatement dropDBInfoTableStmt; - - /** enum to represent columns in the events table */ - enum EventTableColumn { - - EVENT_ID("event_id"), // NON-NLS - FILE_ID("file_id"), // NON-NLS - ARTIFACT_ID("artifact_id"), // NON-NLS - BASE_TYPE("base_type"), // NON-NLS - SUB_TYPE("sub_type"), // NON-NLS - KNOWN("known_state"), // NON-NLS - DATA_SOURCE_ID("datasource_id"), // NON-NLS - FULL_DESCRIPTION("full_description"), // NON-NLS - MED_DESCRIPTION("med_description"), // NON-NLS - SHORT_DESCRIPTION("short_description"), // NON-NLS - TIME("time"), // NON-NLS - HASH_HIT("hash_hit"), // NON-NLS - TAGGED("tagged"); // NON-NLS - - private final String columnName; - - private EventTableColumn(String columnName) { - this.columnName = columnName; - } - - @Override - public String toString() { - return columnName; - } - - } - /** enum to represent keys stored in db_info table */ private enum DBInfoKey { @@ -189,6 +155,11 @@ public class EventDB { private PreparedStatement insertHashHitStmt; private PreparedStatement selectHashSetStmt; private PreparedStatement countAllEventsStmt; + private PreparedStatement dropEventsTableStmt; + private PreparedStatement dropHashSetHitsTableStmt; + private PreparedStatement dropHashSetsTableStmt; + private PreparedStatement dropDBInfoTableStmt; + private PreparedStatement selectEventsFromOBjectAndArtifactStmt; private final Set preparedStatements = new HashSet<>(); @@ -369,7 +340,7 @@ public class EventDB { ResultSet rs = stmt.executeQuery(query)) { while (rs.next()) { - resultIDs.add(rs.getLong(EventTableColumn.EVENT_ID.toString())); + resultIDs.add(rs.getLong("event_id")); } } catch (SQLException sqlEx) { @@ -401,7 +372,7 @@ public class EventDB { DBLock.lock(); try (ResultSet rs = getDataSourceIDsStmt.executeQuery()) { while (rs.next()) { - long datasourceID = rs.getLong(EventTableColumn.DATA_SOURCE_ID.toString()); + long datasourceID = rs.getLong("datasource_id"); //this relies on the fact that no tskObj has ID 0 but 0 is the default value for the datasource_id column in the events table. if (datasourceID != 0) { hashSet.add(datasourceID); @@ -568,31 +539,32 @@ public class EventDB { LOGGER.log(Level.SEVERE, "problem creating hash_set_hits table", ex); } - createEventsIndex(Arrays.asList(EventTableColumn.FILE_ID)); - createEventsIndex(Arrays.asList(EventTableColumn.ARTIFACT_ID)); - createEventsIndex(Arrays.asList(EventTableColumn.SUB_TYPE, EventTableColumn.TIME)); - createEventsIndex(Arrays.asList(EventTableColumn.BASE_TYPE, EventTableColumn.TIME)); - createEventsIndex(Arrays.asList(EventTableColumn.KNOWN)); + createIndex("events", Arrays.asList("file_id")); + createIndex("events", Arrays.asList("artifact_id")); + createIndex("events", Arrays.asList("sub_type", "time")); + createIndex("events", Arrays.asList("base_type", "time")); + createIndex("events", Arrays.asList("known_state")); try { insertRowStmt = prepareStatement( "INSERT INTO events (datasource_id,file_id ,artifact_id, time, sub_type, base_type, full_description, med_description, short_description, known_state, hash_hit, tagged) " // NON-NLS + "VALUES (?,?,?,?,?,?,?,?,?,?,?,?)"); // NON-NLS - getDataSourceIDsStmt = prepareStatement("select distinct datasource_id from events"); // NON-NLS - getMaxTimeStmt = prepareStatement("select Max(time) as max from events"); // NON-NLS - getMinTimeStmt = prepareStatement("select Min(time) as min from events"); // NON-NLS - getEventByIDStmt = prepareStatement("select * from events where event_id = ?"); // NON-NLS - recordDBInfoStmt = prepareStatement("insert or replace into db_info (key, value) values (?, ?)"); // NON-NLS - getDBInfoStmt = prepareStatement("select value from db_info where key = ?"); // NON-NLS - insertHashSetStmt = prepareStatement("insert or ignore into hash_sets (hash_set_name) values (?)"); - selectHashSetStmt = prepareStatement("select hash_set_id from hash_sets where hash_set_name = ?"); - insertHashHitStmt = prepareStatement("insert or ignore into hash_set_hits (hash_set_id, event_id) values (?,?)"); - countAllEventsStmt = prepareStatement("select count(*) as count from events"); - dropEventsTableStmt = prepareStatement("drop table if exists events"); - dropHashSetHitsTableStmt = prepareStatement("drop table if exists hash_set_hits"); - dropHashSetsTableStmt = prepareStatement("drop table if exists hash_sets"); - dropDBInfoTableStmt = prepareStatement("drop table if exists db_ino"); + getDataSourceIDsStmt = prepareStatement("SELECT DISTINCT datasource_id FROM events"); // NON-NLS + getMaxTimeStmt = prepareStatement("SELECT Max(time) AS max FROM events"); // NON-NLS + getMinTimeStmt = prepareStatement("SELECT Min(time) AS min FROM events"); // NON-NLS + getEventByIDStmt = prepareStatement("SELECT * FROM events WHERE event_id = ?"); // NON-NLS + recordDBInfoStmt = prepareStatement("INSERT OR REPLACE INTO db_info (key, value) values (?, ?)"); // NON-NLS + getDBInfoStmt = prepareStatement("SELECT value FROM db_info WHERE key = ?"); // NON-NLS + insertHashSetStmt = prepareStatement("INSERT OR IGNORE INTO hash_sets (hash_set_name) values (?)"); + selectHashSetStmt = prepareStatement("SELECT hash_set_id FROM hash_sets WHERE hash_set_name = ?"); + insertHashHitStmt = prepareStatement("INSERT OR IGNORE INTO hash_set_hits (hash_set_id, event_id) values (?,?)"); + countAllEventsStmt = prepareStatement("SELECT count(*) AS count FROM events"); + dropEventsTableStmt = prepareStatement("DROP TABLE IF EXISTS events"); + dropHashSetHitsTableStmt = prepareStatement("DROP TABLE IF EXISTS hash_set_hits"); + dropHashSetsTableStmt = prepareStatement("DROP TABLE IF EXISTS hash_sets"); + dropDBInfoTableStmt = prepareStatement("DROP TABLE IF EXISTS db_ino"); + selectEventsFromOBjectAndArtifactStmt = prepareStatement("SELECT event_id FROM events WHERE file_id == ? AND artifact_id IS ?"); } catch (SQLException sQLException) { LOGGER.log(Level.SEVERE, "failed to prepareStatment", sQLException); // NON-NLS } @@ -602,15 +574,6 @@ public class EventDB { } } - /** - * @param tableName the value of tableName - * @param columnList the value of columnList - */ - private void createEventsIndex(final List columnList) { - createIndex("events", - columnList.stream().map(EventTableColumn::toString).collect(Collectors.toList())); - } - /** * * @param tableName the value of tableName @@ -633,12 +596,12 @@ public class EventDB { * * @return the boolean */ - private boolean hasDBColumn(final EventTableColumn dbColumn) { + private boolean hasDBColumn(@Nonnull final String dbColumn) { try (Statement stmt = con.createStatement()) { ResultSet executeQuery = stmt.executeQuery("PRAGMA table_info(events)"); while (executeQuery.next()) { - if (dbColumn.toString().equals(executeQuery.getString("name"))) { + if (dbColumn.equals(executeQuery.getString("name"))) { return true; } } @@ -649,15 +612,15 @@ public class EventDB { } private boolean hasDataSourceIDColumn() { - return hasDBColumn(EventTableColumn.DATA_SOURCE_ID); + return hasDBColumn("datasource_id"); } private boolean hasTaggedColumn() { - return hasDBColumn(EventTableColumn.TAGGED); + return hasDBColumn("tagged"); } private boolean hasHashHitColumn() { - return hasDBColumn(EventTableColumn.HASH_HIT); + return hasDBColumn("hash_hit"); } void insertEvent(long time, EventType type, long datasourceID, Long objID, @@ -760,20 +723,36 @@ public class EventDB { } } - boolean updateEvent(long objectID, Long artifactID, boolean tagged) { + Set markEventsTagged(long objectID, Long artifactID, boolean tagged) { + HashSet eventIDs = new HashSet<>(); DBLock.lock(); try { - //UPDATE events SET tagged = ? where file_id == ? AND artifact_id == ? - int executeUpdate = con.createStatement().executeUpdate("UPDATE events SET tagged =" + (tagged ? 1 : 0) + " WHERE file_id == " + objectID - + " AND artifact_id IS " + (Objects.isNull(artifactID) ? "NULL" : artifactID.toString())); - return executeUpdate > 0; + selectEventsFromOBjectAndArtifactStmt.clearParameters(); + selectEventsFromOBjectAndArtifactStmt.setLong(1, objectID); + if (Objects.isNull(artifactID)) { + selectEventsFromOBjectAndArtifactStmt.setNull(2, Types.INTEGER); + } else { + selectEventsFromOBjectAndArtifactStmt.setLong(2, artifactID); + } + try (ResultSet eventsToUpdateRS = selectEventsFromOBjectAndArtifactStmt.executeQuery();) { + while (eventsToUpdateRS.next()) { + eventIDs.add(eventsToUpdateRS.getLong("event_id")); + } + try (Statement updateStatement = con.createStatement();) { + int updatedRowCount = updateStatement.executeUpdate("UPDATE events SET tagged = " + (tagged ? 1 : 0) + + " WHERE event_id IN (" + StringUtils.join(eventIDs, ",") + ")"); + if (updatedRowCount != eventIDs.size()) { + LOGGER.log(Level.SEVERE, "Updated row count did not match expectation when marking events as {0}", (tagged ? "" : "(un)") + tagged); // NON-NLS + } + } + } } catch (SQLException ex) { - LOGGER.log(Level.SEVERE, "failed to insert event", ex); // NON-NLS + LOGGER.log(Level.SEVERE, "failed to mark events as " + (tagged ? "" : "(un)") + tagged, ex); // NON-NLS } finally { DBLock.unlock(); } - return false; + return eventIDs; } void recordLastArtifactID(long lastArtfID) { @@ -841,15 +820,16 @@ public class EventDB { } private TimeLineEvent constructTimeLineEvent(ResultSet rs) throws SQLException { - return new TimeLineEvent(rs.getLong(EventTableColumn.EVENT_ID.toString()), - rs.getLong(EventTableColumn.FILE_ID.toString()), - rs.getLong(EventTableColumn.ARTIFACT_ID.toString()), - rs.getLong(EventTableColumn.TIME.toString()), RootEventType.allTypes.get(rs.getInt(EventTableColumn.SUB_TYPE.toString())), - rs.getString(EventTableColumn.FULL_DESCRIPTION.toString()), - rs.getString(EventTableColumn.MED_DESCRIPTION.toString()), - rs.getString(EventTableColumn.SHORT_DESCRIPTION.toString()), - TskData.FileKnown.valueOf(rs.getByte(EventTableColumn.KNOWN.toString())), - rs.getInt(EventTableColumn.HASH_HIT.toString()) != 0); + return new TimeLineEvent(rs.getLong("event_id"), + rs.getLong("file_id"), + rs.getLong("artifact_id"), + rs.getLong("time"), RootEventType.allTypes.get(rs.getInt("sub_type")), + rs.getString("full_description"), + rs.getString("med_description"), + rs.getString("short_description"), + TskData.FileKnown.valueOf(rs.getByte("known_state")), + rs.getInt("hash_hit") != 0, + rs.getInt("tagged") != 0); } /** @@ -897,8 +877,8 @@ public class EventDB { while (rs.next()) { EventType type = useSubTypes - ? RootEventType.allTypes.get(rs.getInt(EventTableColumn.SUB_TYPE.toString())) - : BaseTypes.values()[rs.getInt(EventTableColumn.BASE_TYPE.toString())]; + ? RootEventType.allTypes.get(rs.getInt("sub_type")) + : BaseTypes.values()[rs.getInt("base_type")]; typeMap.put(type, rs.getLong("count(*)")); // NON-NLS } @@ -972,14 +952,12 @@ public class EventDB { + " group by interval, " + useSubTypeHelper(useSubTypes) + " , " + descriptionColumn // NON-NLS + " order by Min(time)"; // NON-NLS System.out.println(query); - ResultSet rs = null; - try (Statement stmt = con.createStatement(); // scoop up requested events in groups organized by interval, type, and desription - ) { + // scoop up requested events in groups organized by interval, type, and desription + try (ResultSet rs = con.createStatement().executeQuery(query);) { Stopwatch stopwatch = new Stopwatch(); stopwatch.start(); - rs = stmt.executeQuery(query); stopwatch.stop(); System.out.println(stopwatch.elapsedMillis() / 1000.0 + " seconds"); while (rs.next()) { @@ -989,7 +967,7 @@ public class EventDB { ResultSet executeQuery = st2.executeQuery("select event_id from events where event_id in (" + eventIDS + ") and hash_hit = 1"); while (executeQuery.next()) { - hashHits.add(executeQuery.getLong(EventTableColumn.EVENT_ID.toString())); + hashHits.add(executeQuery.getLong("event_id")); } } HashSet tagged = new HashSet<>(); @@ -997,11 +975,11 @@ public class EventDB { ResultSet executeQuery = st3.executeQuery("select event_id from events where event_id in (" + eventIDS + ") and tagged = 1"); while (executeQuery.next()) { - tagged.add(executeQuery.getLong(EventTableColumn.EVENT_ID.toString())); + tagged.add(executeQuery.getLong("event_id")); } } - EventType type = useSubTypes ? RootEventType.allTypes.get(rs.getInt(EventTableColumn.SUB_TYPE.toString())) : BaseTypes.values()[rs.getInt(EventTableColumn.BASE_TYPE.toString())]; + EventType type = useSubTypes ? RootEventType.allTypes.get(rs.getInt("sub_type")) : BaseTypes.values()[rs.getInt("base_type")]; AggregateEvent aggregateEvent = new AggregateEvent( new Interval(rs.getLong("Min(time)") * 1000, rs.getLong("Max(time)") * 1000, TimeLineController.getJodaTimeZone()), // NON-NLS @@ -1023,11 +1001,6 @@ public class EventDB { } catch (SQLException ex) { Exceptions.printStackTrace(ex); } finally { - try { - rs.close(); - } catch (SQLException ex) { - Exceptions.printStackTrace(ex); - } DBLock.unlock(); } @@ -1075,7 +1048,7 @@ public class EventDB { } private static String useSubTypeHelper(final boolean useSubTypes) { - return useSubTypes ? EventTableColumn.SUB_TYPE.toString() : EventTableColumn.BASE_TYPE.toString(); + return useSubTypes ? "sub_type" : "base_type"; } private long getDBInfo(DBInfoKey key, long defaultValue) { @@ -1104,12 +1077,12 @@ public class EventDB { private String getDescriptionColumn(DescriptionLOD lod) { switch (lod) { case FULL: - return EventTableColumn.FULL_DESCRIPTION.toString(); + return "full_description"; case MEDIUM: - return EventTableColumn.MED_DESCRIPTION.toString(); + return "med_description"; case SHORT: default: - return EventTableColumn.SHORT_DESCRIPTION.toString(); + return "short_description"; } } diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventsRepository.java b/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventsRepository.java index e385f2ca65..a3775d3ba2 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventsRepository.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventsRepository.java @@ -59,7 +59,6 @@ import org.sleuthkit.autopsy.timeline.filters.RootFilter; import org.sleuthkit.autopsy.timeline.zooming.ZoomParams; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; @@ -102,7 +101,10 @@ public class EventsRepository { private final ObservableMap datasourcesMap = FXCollections.observableHashMap(); private final ObservableMap hashSetMap = FXCollections.observableHashMap(); private final Case autoCase; - + + public Case getAutoCase() { + return autoCase; + } synchronized public ObservableMap getDatasourcesMap() { return datasourcesMap; @@ -227,45 +229,6 @@ public class EventsRepository { return eventDB.hasNewColumns(); } - public void handleTagAdded(BlackboardArtifact artifact) { - boolean updateEvent = eventDB.updateEvent(artifact.getObjectID(), artifact.getArtifactID(), true); - if (updateEvent) { - aggregateEventsCache.invalidateAll(); - } - } - - public void handleTagDeleted(BlackboardArtifact artifact) { - try { - boolean tagged = autoCase.getServices().getTagsManager().getBlackboardArtifactTagsByArtifact(artifact).isEmpty() == false; - boolean updateEvent = eventDB.updateEvent(artifact.getObjectID(), artifact.getArtifactID(), tagged); - if (updateEvent) { - aggregateEventsCache.invalidateAll(); - } - } catch (TskCoreException ex) { - LOGGER.log(Level.SEVERE, "unable to determine tagged status of attribute.", ex); - } - } - - public void handleTagAdded(Content content) { - boolean updateEvent = eventDB.updateEvent(content.getId(), null, true); - if (updateEvent) { - aggregateEventsCache.invalidateAll(); - } - - } - - public void handleTagDeleted(Content content) { - try { - boolean tagged = autoCase.getServices().getTagsManager().getContentTagsByContent(content).isEmpty() == false; - boolean updateEvent = eventDB.updateEvent(content.getId(), null, tagged); - if (updateEvent) { - aggregateEventsCache.invalidateAll(); - } - } catch (TskCoreException ex) { - LOGGER.log(Level.SEVERE, "unable to determine tagged status of content.", ex); - } - } - private class DBPopulationWorker extends SwingWorker { private final ProgressWindow progressDialog; @@ -398,7 +361,6 @@ public class EventsRepository { try { progressDialog.close(); get(); - } catch (CancellationException ex) { LOGGER.log(Level.INFO, "Database population was cancelled by the user. Not all events may be present or accurate. See the log for details.", ex); // NON-NLS } catch (InterruptedException | ExecutionException ex) { @@ -448,9 +410,7 @@ public class EventsRepository { i++; process(Arrays.asList(new ProgressWindow.ProgressUpdate(i, numArtifacts, - NbBundle.getMessage(this.getClass(), - "EventsRepository.progressWindow.populatingXevents", - type.toString()), ""))); + Bundle.progressWindow_populatingXevents(type), ""))); } } catch (TskCoreException ex) { LOGGER.log(Level.SEVERE, "There was a problem getting events with sub type = " + type.toString() + ".", ex); // NON-NLS @@ -481,4 +441,13 @@ public class EventsRepository { } } } + + public Set markEventsTagged(long objID, Long artifactID, boolean tagged) { + Set updatedEventIDs = eventDB.markEventsTagged(objID, artifactID, true); + if (!updatedEventIDs.isEmpty()) { + aggregateEventsCache.invalidateAll(); + idToEventCache.invalidateAll(); + } + return updatedEventIDs; + } } diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/db/SQLHelper.java b/Core/src/org/sleuthkit/autopsy/timeline/events/db/SQLHelper.java index 58c45aba8c..1c63f9f2d9 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/events/db/SQLHelper.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/events/db/SQLHelper.java @@ -89,9 +89,7 @@ public class SQLHelper { static String getSQLWhere(HideKnownFilter filter) { if (filter.isSelected()) { - return "(" + EventDB.EventTableColumn.KNOWN.toString() - + " is not '" + TskData.FileKnown.KNOWN.getFileKnownValue() - + "')"; // NON-NLS + return "(known_state IS NOT '" + TskData.FileKnown.KNOWN.getFileKnownValue() + "')"; // NON-NLS } else { return "1"; } @@ -111,11 +109,11 @@ public class SQLHelper { } static String getSQLWhere(DataSourceFilter filter) { - return (filter.isSelected()) ? "(" + EventDB.EventTableColumn.DATA_SOURCE_ID.toString() + " = '" + filter.getDataSourceID() + "')" : "1"; + return (filter.isSelected()) ? "(datasource_id = '" + filter.getDataSourceID() + "')" : "1"; } static String getSQLWhere(DataSourcesFilter filter) { - return (filter.isSelected()) ? "(" + EventDB.EventTableColumn.DATA_SOURCE_ID.toString() + " in (" + return (filter.isSelected()) ? "(datasource_id in (" + filter.getSubFilters().stream() .filter(AbstractFilter::isSelected) .map((dataSourceFilter) -> String.valueOf(dataSourceFilter.getDataSourceID())) @@ -127,10 +125,10 @@ public class SQLHelper { if (StringUtils.isBlank(filter.getText())) { return "1"; } - String strip = StringUtils.strip(filter.getText()); - return "((" + EventDB.EventTableColumn.MED_DESCRIPTION.toString() + " like '%" + strip + "%') or (" // NON-NLS - + EventDB.EventTableColumn.FULL_DESCRIPTION.toString() + " like '%" + strip + "%') or (" // NON-NLS - + EventDB.EventTableColumn.SHORT_DESCRIPTION.toString() + " like '%" + strip + "%'))"; + String strippedFilterText = StringUtils.strip(filter.getText()); + return "((med_description like '%" + strippedFilterText + "%')" + + " or (full_description like '%" + strippedFilterText + "%')" + + " or (short_description like '%" + strippedFilterText + "%'))"; } else { return "1"; } @@ -140,19 +138,20 @@ public class SQLHelper { * generate a sql where clause for the given type filter, while trying to be * as simple as possible to improve performance. * - * @param filter + * @param typeFilter * * @return */ - static String getSQLWhere(TypeFilter filter) { - if (filter.isSelected() == false) { + static String getSQLWhere(TypeFilter typeFilter) { + if (typeFilter.isSelected() == false) { return "0"; - } else if (filter.getEventType() instanceof RootEventType) { - if (filter.getSubFilters().stream().allMatch((Filter f) -> f.isSelected() && ((TypeFilter) f).getSubFilters().stream().allMatch(Filter::isSelected))) { + } else if (typeFilter.getEventType() instanceof RootEventType) { + if (typeFilter.getSubFilters().stream() + .allMatch(subFilter -> subFilter.isSelected() && subFilter.getSubFilters().stream().allMatch(Filter::isSelected))) { return "1"; //then collapse clause to true } } - return "(" + EventDB.EventTableColumn.SUB_TYPE.toString() + " in (" + StringUtils.join(getActiveSubTypes(filter), ",") + "))"; + return "(sub_type IN (" + StringUtils.join(getActiveSubTypes(typeFilter), ",") + "))"; } } diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/AggregateEventNode.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/AggregateEventNode.java index d260608c3d..97ad654141 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/AggregateEventNode.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/AggregateEventNode.java @@ -18,6 +18,7 @@ */ package org.sleuthkit.autopsy.timeline.ui.detailview; +import com.google.common.eventbus.Subscribe; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -65,6 +66,8 @@ import org.sleuthkit.autopsy.coreutils.LoggedTask; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.events.AggregateEvent; +import org.sleuthkit.autopsy.timeline.events.EventsTaggedEvent; +import org.sleuthkit.autopsy.timeline.events.EventsUnTaggedEvent; import org.sleuthkit.autopsy.timeline.events.FilteredEventsModel; import org.sleuthkit.autopsy.timeline.events.TimeLineEvent; import org.sleuthkit.autopsy.timeline.filters.RootFilter; @@ -151,6 +154,8 @@ public class AggregateEventNode extends StackPane { private final FilteredEventsModel eventsModel; private Tooltip tooltip; + private final ImageView hashIV = new ImageView(HASH_PIN); + private final ImageView tagIV = new ImageView(TAG); public AggregateEventNode(final AggregateEvent event, AggregateEventNode parentEventNode, EventDetailChart chart) { this.event = event; @@ -159,16 +164,18 @@ public class AggregateEventNode extends StackPane { this.chart = chart; sleuthkitCase = chart.getController().getAutopsyCase().getSleuthkitCase(); eventsModel = chart.getController().getEventsModel(); + eventsModel.register(this); final Region region = new Region(); HBox.setHgrow(region, Priority.ALWAYS); - ImageView hashIV = new ImageView(HASH_PIN); - ImageView tagIV = new ImageView(TAG); + final HBox hBox = new HBox(descrLabel, countLabel, region, hashIV, tagIV, minusButton, plusButton); if (event.getEventIDsWithHashHits().isEmpty()) { - hBox.getChildren().remove(hashIV); + hashIV.setManaged(false); + hashIV.setVisible(false); } if (event.getEventIDsWithTags().isEmpty()) { - hBox.getChildren().remove(tagIV); + tagIV.setManaged(false); + tagIV.setVisible(false); } hBox.setPrefWidth(USE_COMPUTED_SIZE); hBox.setMinWidth(USE_PREF_SIZE); @@ -524,4 +531,27 @@ public class AggregateEventNode extends StackPane { } } } + + @Subscribe + public void handleEventsUnTagged(EventsUnTaggedEvent tagEvent) { + if (event.removeTags(tagEvent.getEventIDs())) { + tooltip = null; + boolean hasTags = event.getEventIDsWithTags().isEmpty() == false; + Platform.runLater(() -> { + tagIV.setManaged(hasTags); + tagIV.setVisible(hasTags); + }); + } + } + + @Subscribe + public void handleEventsTagged(EventsTaggedEvent tagEvent) { + if (event.addTags(tagEvent.getEventIDs())) { + tooltip = null; + Platform.runLater(() -> { + tagIV.setManaged(true); + tagIV.setVisible(true); + }); + } + } } From 85f04d842930d63f255a621d31c1be001d3f5f1d Mon Sep 17 00:00:00 2001 From: jmillman Date: Mon, 3 Aug 2015 16:08:49 -0400 Subject: [PATCH 4/7] use solution to updating tags visualization that maintains immutability of AggregateEvent increase synchronizatio, bug fixes, and cleanup --- .../autopsy/timeline/TimeLineController.java | 2 +- .../timeline/events/AggregateEvent.java | 44 +++++++++++++++---- .../timeline/events/FilteredEventsModel.java | 9 ++-- .../autopsy/timeline/events/db/EventDB.java | 7 +-- .../timeline/events/db/EventsRepository.java | 4 +- .../ui/detailview/AggregateEventNode.java | 34 +++++++------- 6 files changed, 62 insertions(+), 38 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java b/Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java index 33fe52c85f..f3969a3e89 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java @@ -810,7 +810,7 @@ public class TimeLineController { }); break; case CURRENT_CASE: - OpenTimelineAction.invalidateController(); + OpenTimelineAction.invalidateController(); SwingUtilities.invokeLater(TimeLineController.this::closeTimeLine); break; } diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/AggregateEvent.java b/Core/src/org/sleuthkit/autopsy/timeline/events/AggregateEvent.java index 6550a230e3..d46761a3fb 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/events/AggregateEvent.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/events/AggregateEvent.java @@ -21,7 +21,6 @@ package org.sleuthkit.autopsy.timeline.events; import com.google.common.collect.Sets; import java.util.Collections; import java.util.Set; -import java.util.stream.Collectors; import org.joda.time.Interval; import org.sleuthkit.autopsy.timeline.events.type.EventType; import org.sleuthkit.autopsy.timeline.utils.IntervalUtils; @@ -125,14 +124,43 @@ public class AggregateEvent { return new AggregateEvent(IntervalUtils.span(aggEvent1.span, ag2.span), aggEvent1.getType(), idsUnion, hashHitsUnion, taggedUnion, aggEvent1.getDescription(), aggEvent1.lod); } - public boolean removeTags(Set eventIDs) { - return tagged.removeAll(eventIDs); + /** + * get an AggregateEvent the same as this one but with the given eventIDs + * removed from the list of tagged events + * + * @param unTaggedIDs + * + * @return a new Aggregate event that is the same as this one but with the + * given event Ids removed from the list of tagged ids, or, this + * AggregateEvent if no event ids would be removed + */ + public AggregateEvent withTagsRemoved(Set unTaggedIDs) { + Sets.SetView difference = Sets.difference(tagged, unTaggedIDs); + if (difference.size() < tagged.size()) { + return new AggregateEvent(span, type, unTaggedIDs, hashHits, difference.immutableCopy(), description, lod); + } + return this; //no change } - public boolean addTags(Set collect) { - return tagged.addAll(collect.stream() - .filter(eventIDs::contains) - .collect(Collectors.toSet())); - } + /** + * get an AggregateEvent the same as this one but with the given eventIDs + * added to the list of tagged events if there are part of this Aggregate + * + * @param taggedIDs + * + * @return a new Aggregate event that is the same as this one but with the + * given event Ids added to the list of tagged ids, or, this + * AggregateEvent if no event ids would be added + */ + public AggregateEvent withTagsAdded(Set taggedIDs) { + Sets.SetView taggedIdsInAgg = Sets.intersection(eventIDs, taggedIDs);//events that are in this aggregate and marked as tagged + if (taggedIdsInAgg.size() > 0) { + Sets.SetView notYetIncludedTagged = Sets.difference(taggedIdsInAgg,tagged); // events that are tagged, but not already marked as tagged in this Agg + if (notYetIncludedTagged.size() > 0) { + return new AggregateEvent(span, type, eventIDs, hashHits, Sets.union(tagged, taggedIdsInAgg).immutableCopy(), description, lod); + } + } + return this; //no change + } } diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/FilteredEventsModel.java b/Core/src/org/sleuthkit/autopsy/timeline/events/FilteredEventsModel.java index 8dbeb6d301..07e681fd46 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/events/FilteredEventsModel.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/events/FilteredEventsModel.java @@ -313,7 +313,7 @@ public final class FilteredEventsModel { return requestedLOD.get(); } - public void handleTagAdded(BlackBoardArtifactTagAddedEvent e) { + synchronized public void handleTagAdded(BlackBoardArtifactTagAddedEvent e) { BlackboardArtifact artifact = e.getTag().getArtifact(); Set updatedEventIDs = repo.markEventsTagged(artifact.getObjectID(), artifact.getArtifactID(), true); if (!updatedEventIDs.isEmpty()) { @@ -321,7 +321,7 @@ public final class FilteredEventsModel { } } - public void handleTagDeleted(BlackBoardArtifactTagDeletedEvent e) { + synchronized public void handleTagDeleted(BlackBoardArtifactTagDeletedEvent e) { BlackboardArtifact artifact = e.getTag().getArtifact(); try { boolean tagged = autoCase.getServices().getTagsManager().getBlackboardArtifactTagsByArtifact(artifact).isEmpty() == false; @@ -334,7 +334,7 @@ public final class FilteredEventsModel { } } - public void handleTagAdded(ContentTagAddedEvent e) { + synchronized public void handleTagAdded(ContentTagAddedEvent e) { Content content = e.getTag().getContent(); Set updatedEventIDs = repo.markEventsTagged(content.getId(), null, true); if (!updatedEventIDs.isEmpty()) { @@ -342,11 +342,10 @@ public final class FilteredEventsModel { } } - public void handleTagDeleted(ContentTagDeletedEvent e) { + synchronized public void handleTagDeleted(ContentTagDeletedEvent e) { Content content = e.getTag().getContent(); try { boolean tagged = autoCase.getServices().getTagsManager().getContentTagsByContent(content).isEmpty() == false; - Set updatedEventIDs = repo.markEventsTagged(content.getId(), null, tagged); if (!updatedEventIDs.isEmpty()) { eventbus.post(new EventsUnTaggedEvent(updatedEventIDs)); diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java b/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java index 5ef130a83d..42b397ce17 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java @@ -723,7 +723,7 @@ public class EventDB { } } - Set markEventsTagged(long objectID, Long artifactID, boolean tagged) { + Set markEventsTagged(long objectID, Long artifactID, boolean tagged) { HashSet eventIDs = new HashSet<>(); DBLock.lock(); @@ -740,11 +740,8 @@ public class EventDB { eventIDs.add(eventsToUpdateRS.getLong("event_id")); } try (Statement updateStatement = con.createStatement();) { - int updatedRowCount = updateStatement.executeUpdate("UPDATE events SET tagged = " + (tagged ? 1 : 0) + updateStatement.executeUpdate("UPDATE events SET tagged = " + (tagged ? 1 : 0) + " WHERE event_id IN (" + StringUtils.join(eventIDs, ",") + ")"); - if (updatedRowCount != eventIDs.size()) { - LOGGER.log(Level.SEVERE, "Updated row count did not match expectation when marking events as {0}", (tagged ? "" : "(un)") + tagged); // NON-NLS - } } } } catch (SQLException ex) { diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventsRepository.java b/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventsRepository.java index a3775d3ba2..ca22cbeb20 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventsRepository.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventsRepository.java @@ -93,9 +93,7 @@ public class EventsRepository { private final FilteredEventsModel modelInstance; private final LoadingCache idToEventCache; - private final LoadingCache> eventCountsCache; - private final LoadingCache> aggregateEventsCache; private final ObservableMap datasourcesMap = FXCollections.observableHashMap(); @@ -442,7 +440,7 @@ public class EventsRepository { } } - public Set markEventsTagged(long objID, Long artifactID, boolean tagged) { + synchronized public Set markEventsTagged(long objID, Long artifactID, boolean tagged) { Set updatedEventIDs = eventDB.markEventsTagged(objID, artifactID, true); if (!updatedEventIDs.isEmpty()) { aggregateEventsCache.invalidateAll(); diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/AggregateEventNode.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/AggregateEventNode.java index 97ad654141..ff9a3177a8 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/AggregateEventNode.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/AggregateEventNode.java @@ -98,7 +98,7 @@ public class AggregateEventNode extends StackPane { private static final Border selectionBorder = new Border(new BorderStroke(Color.BLACK, BorderStrokeStyle.SOLID, CORNER_RADII, new BorderWidths(2))); /** The event this AggregateEventNode represents visually */ - private final AggregateEvent event; + private AggregateEvent event; private final AggregateEventNode parentEventNode; @@ -233,7 +233,6 @@ public class AggregateEventNode extends StackPane { minusButton.setManaged(true); plusButton.setManaged(true); toFront(); - }); setOnMouseExited((MouseEvent e) -> { @@ -242,7 +241,6 @@ public class AggregateEventNode extends StackPane { plusButton.setVisible(false); minusButton.setManaged(false); plusButton.setManaged(false); - }); setOnMouseClicked(new EventMouseHandler()); @@ -266,7 +264,7 @@ public class AggregateEventNode extends StackPane { }); } - private void installTooltip() { + synchronized private void installTooltip() { //TODO: all this work should probably go on a background thread... if (tooltip == null) { @@ -339,7 +337,7 @@ public class AggregateEventNode extends StackPane { return subNodePane; } - public AggregateEvent getEvent() { + synchronized public AggregateEvent getEvent() { return event; } @@ -364,7 +362,7 @@ public class AggregateEventNode extends StackPane { } /** @param descrVis the level of description that should be displayed */ - final void setDescriptionVisibility(DescriptionVisibility descrVis) { + synchronized final void setDescriptionVisibility(DescriptionVisibility descrVis) { this.descrVis = descrVis; final int size = event.getEventIDs().size(); @@ -408,18 +406,18 @@ public class AggregateEventNode extends StackPane { * * @param applied true to apply the highlight 'effect', false to remove it */ - void applyHighlightEffect(boolean applied) { + synchronized void applyHighlightEffect(boolean applied) { if (applied) { descrLabel.setStyle("-fx-font-weight: bold;"); // NON-NLS - spanFill = new Background(new BackgroundFill(getEvent().getType().getColor().deriveColor(0, 1, 1, .3), CORNER_RADII, Insets.EMPTY)); + spanFill = new Background(new BackgroundFill(event.getType().getColor().deriveColor(0, 1, 1, .3), CORNER_RADII, Insets.EMPTY)); spanRegion.setBackground(spanFill); - setBackground(new Background(new BackgroundFill(getEvent().getType().getColor().deriveColor(0, 1, 1, .2), CORNER_RADII, Insets.EMPTY))); + setBackground(new Background(new BackgroundFill(event.getType().getColor().deriveColor(0, 1, 1, .2), CORNER_RADII, Insets.EMPTY))); } else { descrLabel.setStyle("-fx-font-weight: normal;"); // NON-NLS - spanFill = new Background(new BackgroundFill(getEvent().getType().getColor().deriveColor(0, 1, 1, .1), CORNER_RADII, Insets.EMPTY)); + spanFill = new Background(new BackgroundFill(event.getType().getColor().deriveColor(0, 1, 1, .1), CORNER_RADII, Insets.EMPTY)); spanRegion.setBackground(spanFill); - setBackground(new Background(new BackgroundFill(getEvent().getType().getColor().deriveColor(0, 1, 1, .1), CORNER_RADII, Insets.EMPTY))); + setBackground(new Background(new BackgroundFill(event.getType().getColor().deriveColor(0, 1, 1, .1), CORNER_RADII, Insets.EMPTY))); } } @@ -451,7 +449,7 @@ public class AggregateEventNode extends StackPane { * * @param newLOD */ - private void loadSubClusters(DescriptionLOD newLOD) { + synchronized private void loadSubClusters(DescriptionLOD newLOD) { getSubNodePane().getChildren().clear(); if (newLOD == event.getLOD()) { getSubNodePane().getChildren().clear(); @@ -533,8 +531,10 @@ public class AggregateEventNode extends StackPane { } @Subscribe - public void handleEventsUnTagged(EventsUnTaggedEvent tagEvent) { - if (event.removeTags(tagEvent.getEventIDs())) { + synchronized public void handleEventsUnTagged(EventsUnTaggedEvent tagEvent) { + AggregateEvent withTagsRemoved = event.withTagsRemoved(tagEvent.getEventIDs()); + if (withTagsRemoved != event) { + event = withTagsRemoved; tooltip = null; boolean hasTags = event.getEventIDsWithTags().isEmpty() == false; Platform.runLater(() -> { @@ -545,8 +545,10 @@ public class AggregateEventNode extends StackPane { } @Subscribe - public void handleEventsTagged(EventsTaggedEvent tagEvent) { - if (event.addTags(tagEvent.getEventIDs())) { + synchronized public void handleEventsTagged(EventsTaggedEvent tagEvent) { + AggregateEvent withTagsAdded = event.withTagsAdded(tagEvent.getEventIDs()); + if (withTagsAdded != event) { + event = withTagsAdded; tooltip = null; Platform.runLater(() -> { tagIV.setManaged(true); From 1e3a0ebaebec6ad5cb09034ae4cffa1505550825 Mon Sep 17 00:00:00 2001 From: jmillman Date: Tue, 4 Aug 2015 10:19:19 -0400 Subject: [PATCH 5/7] fix bug when expanding/collapsing aggergate event nodes and events where always marked as tagged --- .../timeline/events/AggregateEvent.java | 4 +- .../autopsy/timeline/events/db/EventDB.java | 67 ++++++++----------- .../timeline/events/db/EventsRepository.java | 24 +++---- .../ui/detailview/AggregateEventNode.java | 32 ++++----- .../timeline/zooming/DescriptionLOD.java | 6 +- .../autopsy/timeline/zooming/ZoomParams.java | 41 ++---------- 6 files changed, 68 insertions(+), 106 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/AggregateEvent.java b/Core/src/org/sleuthkit/autopsy/timeline/events/AggregateEvent.java index d46761a3fb..c57e229095 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/events/AggregateEvent.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/events/AggregateEvent.java @@ -21,6 +21,7 @@ package org.sleuthkit.autopsy.timeline.events; import com.google.common.collect.Sets; import java.util.Collections; import java.util.Set; +import javax.annotation.concurrent.Immutable; import org.joda.time.Interval; import org.sleuthkit.autopsy.timeline.events.type.EventType; import org.sleuthkit.autopsy.timeline.utils.IntervalUtils; @@ -31,6 +32,7 @@ import org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD; * the sub events should have the same type and matching descriptions at the * designated 'zoom level'. */ +@Immutable public class AggregateEvent { /** the smallest time interval containing all the aggregated events */ @@ -137,7 +139,7 @@ public class AggregateEvent { public AggregateEvent withTagsRemoved(Set unTaggedIDs) { Sets.SetView difference = Sets.difference(tagged, unTaggedIDs); if (difference.size() < tagged.size()) { - return new AggregateEvent(span, type, unTaggedIDs, hashHits, difference.immutableCopy(), description, lod); + return new AggregateEvent(span, type, eventIDs, hashHits, difference.immutableCopy(), description, lod); } return this; //no change } diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java b/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java index 42b397ce17..603428d24c 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java @@ -623,7 +623,7 @@ public class EventDB { return hasDBColumn("hash_hit"); } - void insertEvent(long time, EventType type, long datasourceID, Long objID, + void insertEvent(long time, EventType type, long datasourceID, long objID, Long artifactID, String fullDescription, String medDescription, String shortDescription, TskData.FileKnown known, Set hashSets, boolean tagged) { @@ -638,7 +638,7 @@ public class EventDB { * @param f * @param transaction */ - void insertEvent(long time, EventType type, long datasourceID, Long objID, + void insertEvent(long time, EventType type, long datasourceID, long objID, Long artifactID, String fullDescription, String medDescription, String shortDescription, TskData.FileKnown known, Set hashSetNames, boolean tagged, @@ -659,15 +659,11 @@ public class EventDB { //"INSERT INTO events (datasource_id,file_id ,artifact_id, time, sub_type, base_type, full_description, med_description, short_description, known_state, hashHit, tagged) " insertRowStmt.clearParameters(); insertRowStmt.setLong(1, datasourceID); - if (objID != null) { - insertRowStmt.setLong(2, objID); - } else { - insertRowStmt.setNull(2, Types.INTEGER); - } + insertRowStmt.setLong(2, objID); if (artifactID != null) { insertRowStmt.setLong(3, artifactID); } else { - insertRowStmt.setNull(3, Types.INTEGER); + insertRowStmt.setNull(3, Types.NULL); } insertRowStmt.setLong(4, time); @@ -723,21 +719,22 @@ public class EventDB { } } - Set markEventsTagged(long objectID, Long artifactID, boolean tagged) { + Set markEventsTagged(long objectID, Long artifactID, boolean tagged) { HashSet eventIDs = new HashSet<>(); DBLock.lock(); + try { selectEventsFromOBjectAndArtifactStmt.clearParameters(); selectEventsFromOBjectAndArtifactStmt.setLong(1, objectID); if (Objects.isNull(artifactID)) { - selectEventsFromOBjectAndArtifactStmt.setNull(2, Types.INTEGER); + selectEventsFromOBjectAndArtifactStmt.setNull(2, Types.NULL); } else { selectEventsFromOBjectAndArtifactStmt.setLong(2, artifactID); } - try (ResultSet eventsToUpdateRS = selectEventsFromOBjectAndArtifactStmt.executeQuery();) { - while (eventsToUpdateRS.next()) { - eventIDs.add(eventsToUpdateRS.getLong("event_id")); + try (ResultSet executeQuery = selectEventsFromOBjectAndArtifactStmt.executeQuery();) { + while (executeQuery.next()) { + eventIDs.add(executeQuery.getLong("event_id")); } try (Statement updateStatement = con.createStatement();) { updateStatement.executeUpdate("UPDATE events SET tagged = " + (tagged ? 1 : 0) @@ -951,40 +948,34 @@ public class EventDB { System.out.println(query); // scoop up requested events in groups organized by interval, type, and desription try (ResultSet rs = con.createStatement().executeQuery(query);) { - - Stopwatch stopwatch = new Stopwatch(); - stopwatch.start(); - - stopwatch.stop(); - System.out.println(stopwatch.elapsedMillis() / 1000.0 + " seconds"); while (rs.next()) { + Interval interval = new Interval(rs.getLong("Min(time)") * 1000, rs.getLong("Max(time)") * 1000, TimeLineController.getJodaTimeZone()); String eventIDS = rs.getString("event_ids"); - HashSet hashHits = new HashSet<>(); - try (Statement st2 = con.createStatement();) { - - ResultSet executeQuery = st2.executeQuery("select event_id from events where event_id in (" + eventIDS + ") and hash_hit = 1"); - while (executeQuery.next()) { - hashHits.add(executeQuery.getLong("event_id")); - } - } - HashSet tagged = new HashSet<>(); - try (Statement st3 = con.createStatement();) { - - ResultSet executeQuery = st3.executeQuery("select event_id from events where event_id in (" + eventIDS + ") and tagged = 1"); - while (executeQuery.next()) { - tagged.add(executeQuery.getLong("event_id")); - } - } - EventType type = useSubTypes ? RootEventType.allTypes.get(rs.getInt("sub_type")) : BaseTypes.values()[rs.getInt("base_type")]; + HashSet hashHits = new HashSet<>(); + HashSet tagged = new HashSet<>(); + try (Statement st2 = con.createStatement(); + ResultSet hashQueryResults = st2.executeQuery("select event_id , tagged, hash_hit from events where event_id in (" + eventIDS + ")");) { + while (hashQueryResults.next()) { + long eventID = hashQueryResults.getLong("event_id"); + if (hashQueryResults.getInt("tagged") != 0) { + tagged.add(eventID); + } + if (hashQueryResults.getInt("hash_hit") != 0) { + hashHits.add(eventID); + } + } + } + AggregateEvent aggregateEvent = new AggregateEvent( - new Interval(rs.getLong("Min(time)") * 1000, rs.getLong("Max(time)") * 1000, TimeLineController.getJodaTimeZone()), // NON-NLS + interval, // NON-NLS type, Stream.of(eventIDS.split(",")).map(Long::valueOf).collect(Collectors.toSet()), // NON-NLS hashHits, tagged, - rs.getString(descriptionColumn), lod); + rs.getString(descriptionColumn), + lod); //put events in map from type/descrition -> event SetMultimap descrMap = typeMap.get(type); diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventsRepository.java b/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventsRepository.java index ca22cbeb20..8e5fd68982 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventsRepository.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventsRepository.java @@ -25,7 +25,6 @@ import com.google.common.cache.RemovalNotification; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; @@ -130,13 +129,13 @@ public class EventsRepository { this.eventDB = EventDB.getEventDB(autoCase); populateFilterMaps(autoCase.getSleuthkitCase()); idToEventCache = CacheBuilder.newBuilder().maximumSize(5000L).expireAfterAccess(10, TimeUnit.MINUTES).removalListener((RemovalNotification rn) -> { - //LOGGER.log(Level.INFO, "evicting event: {0}", rn.toString()); +// LOGGER.log(Level.INFO, "evicting event: {0}", rn.toString()); }).build(CacheLoader.from(eventDB::getEventById)); eventCountsCache = CacheBuilder.newBuilder().maximumSize(1000L).expireAfterAccess(10, TimeUnit.MINUTES).removalListener((RemovalNotification> rn) -> { //LOGGER.log(Level.INFO, "evicting counts: {0}", rn.toString()); }).build(CacheLoader.from(eventDB::countEventsByType)); aggregateEventsCache = CacheBuilder.newBuilder().maximumSize(1000L).expireAfterAccess(10, TimeUnit.MINUTES).removalListener((RemovalNotification> rn) -> { - //LOGGER.log(Level.INFO, "evicting aggregated events: {0}", rn.toString()); +// LOGGER.log(Level.INFO, "evicting aggregated events: {0}", rn.toString()); }).build(CacheLoader.from(eventDB::getAggregatedEvents)); maxCache = CacheBuilder.newBuilder().build(CacheLoader.from(eventDB::getMaxTime)); minCache = CacheBuilder.newBuilder().build(CacheLoader.from(eventDB::getMinTime)); @@ -183,19 +182,18 @@ public class EventsRepository { return idToEventCache.getUnchecked(eventID); } - public Set getEventsById(Collection eventIDs) { + synchronized public Set getEventsById(Collection eventIDs) { return eventIDs.stream() .map(idToEventCache::getUnchecked) .collect(Collectors.toSet()); } - public List getAggregatedEvents(ZoomParams params) { - + synchronized public List getAggregatedEvents(ZoomParams params) { return aggregateEventsCache.getUnchecked(params); } - public Map countEvents(ZoomParams params) { + synchronized public Map countEvents(ZoomParams params) { return eventCountsCache.getUnchecked(params); } @@ -204,6 +202,7 @@ public class EventsRepository { maxCache.invalidateAll(); eventCountsCache.invalidateAll(); aggregateEventsCache.invalidateAll(); + idToEventCache.invalidateAll(); } public Set getEventIDs(Interval timeRange, RootFilter filter) { @@ -286,8 +285,8 @@ public class EventsRepository { String shortDesc = datasourceName + "/" + StringUtils.defaultIfBlank(rootFolder, ""); String medD = datasourceName + parentPath; final TskData.FileKnown known = f.getKnown(); - boolean hashHit = f.getArtifactsCount(BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT) > 0; - Set hashSets = hashHit ? HashHitUtils.getHashSetNamesForFile(skCase, f.getId()) : Collections.emptySet(); + + Set hashSets = HashHitUtils.getHashSetNamesForFile(skCase, f.getId()); boolean tagged = !tagsManager.getContentTagsByContent(f).isEmpty(); //insert it into the db if time is > 0 => time is legitimate (drops logical files) @@ -397,8 +396,7 @@ public class EventsRepository { long datasourceID = skCase.getContentById(bbart.getObjectID()).getDataSource().getId(); AbstractFile f = skCase.getAbstractFileById(bbart.getObjectID()); - boolean hashHit = f.getArtifactsCount(BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT) > 0; - Set hashSets = hashHit ? HashHitUtils.getHashSetNamesForFile(skCase, f.getId()) : Collections.emptySet(); + Set hashSets = HashHitUtils.getHashSetNamesForFile(skCase, f.getId()) ; boolean tagged = tagsManager.getContentTagsByContent(f).isEmpty() == false; tagged |= tagsManager.getBlackboardArtifactTagsByArtifact(bbart).isEmpty() == false; @@ -441,10 +439,10 @@ public class EventsRepository { } synchronized public Set markEventsTagged(long objID, Long artifactID, boolean tagged) { - Set updatedEventIDs = eventDB.markEventsTagged(objID, artifactID, true); + Set updatedEventIDs = eventDB.markEventsTagged(objID, artifactID, tagged); if (!updatedEventIDs.isEmpty()) { aggregateEventsCache.invalidateAll(); - idToEventCache.invalidateAll(); + idToEventCache.invalidateAll(updatedEventIDs); } return updatedEventIDs; } diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/AggregateEventNode.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/AggregateEventNode.java index ff9a3177a8..d2afd5b0f2 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/AggregateEventNode.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/AggregateEventNode.java @@ -59,7 +59,6 @@ import javafx.scene.paint.Color; import org.apache.commons.lang3.StringUtils; import org.joda.time.DateTime; import org.joda.time.Interval; -import org.openide.util.Exceptions; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.coreutils.ColorUtilities; import org.sleuthkit.autopsy.coreutils.LoggedTask; @@ -87,7 +86,9 @@ import org.sleuthkit.datamodel.TskCoreException; /** Represents an {@link AggregateEvent} in a {@link EventDetailChart}. */ public class AggregateEventNode extends StackPane { - private static final Image HASH_PIN = new Image(AggregateEventNode.class.getResourceAsStream("/org/sleuthkit/autopsy/images/hashset_hits.png")); + private static final Logger LOGGER = Logger.getLogger(AggregateEventNode.class.getName()); + + private static final Image HASH_PIN = new Image("/org/sleuthkit/autopsy/images/hashset_hits.png"); private final static Image PLUS = new Image("/org/sleuthkit/autopsy/timeline/images/plus-button.png"); // NON-NLS private final static Image MINUS = new Image("/org/sleuthkit/autopsy/timeline/images/minus-button.png"); // NON-NLS private final static Image TAG = new Image("/org/sleuthkit/autopsy/images/green-tag-icon-16.png"); // NON-NLS @@ -282,7 +283,7 @@ public class AggregateEventNode extends StackPane { } } } catch (TskCoreException ex) { - Logger.getLogger(AggregateEventNode.class.getName()).log(Level.SEVERE, "Error getting hashset hit info for event.", ex); + LOGGER.log(Level.SEVERE, "Error getting hashset hit info for event.", ex); } } @@ -307,7 +308,7 @@ public class AggregateEventNode extends StackPane { } } } catch (TskCoreException ex) { - Logger.getLogger(AggregateEventNode.class.getName()).log(Level.SEVERE, "Error getting tag info for event.", ex); + LOGGER.log(Level.SEVERE, "Error getting tag info for event.", ex); } } @@ -447,16 +448,15 @@ public class AggregateEventNode extends StackPane { /** * loads sub-clusters at the given Description LOD * - * @param newLOD + * @param newDescriptionLOD */ - synchronized private void loadSubClusters(DescriptionLOD newLOD) { + synchronized private void loadSubClusters(DescriptionLOD newDescriptionLOD) { getSubNodePane().getChildren().clear(); - if (newLOD == event.getLOD()) { - getSubNodePane().getChildren().clear(); + if (newDescriptionLOD == event.getLOD()) { chart.setRequiresLayout(true); chart.requestChartLayout(); } else { - RootFilter combinedFilter = chart.getFilteredEvents().filter().get().copyOf(); + RootFilter combinedFilter = eventsModel.filter().get().copyOf(); //make a new filter intersecting the global filter with text(description) and type filters to restrict sub-clusters combinedFilter.getSubFilters().addAll(new TextFilter(event.getDescription()), new TypeFilter(event.getType())); @@ -471,14 +471,14 @@ public class AggregateEventNode extends StackPane { @Override protected List call() throws Exception { //query for the sub-clusters - List aggregatedEvents = chart.getFilteredEvents().getAggregatedEvents(new ZoomParams(span, - chart.getFilteredEvents().eventTypeZoom().get(), + List aggregatedEvents = eventsModel.getAggregatedEvents(new ZoomParams(span, + eventsModel.eventTypeZoom().get(), combinedFilter, - newLOD)); + newDescriptionLOD)); //for each sub cluster make an AggregateEventNode to visually represent it, and set x-position - return aggregatedEvents.stream().map((AggregateEvent t) -> { - AggregateEventNode subNode = new AggregateEventNode(t, AggregateEventNode.this, chart); - subNode.setLayoutX(chart.getXAxis().getDisplayPosition(new DateTime(t.getSpan().getStartMillis())) - getLayoutXCompensation()); + return aggregatedEvents.stream().map(aggEvent -> { + AggregateEventNode subNode = new AggregateEventNode(aggEvent, AggregateEventNode.this, chart); + subNode.setLayoutX(chart.getXAxis().getDisplayPosition(new DateTime(aggEvent.getSpan().getStartMillis())) - getLayoutXCompensation()); return subNode; }).collect(Collectors.toList()); // return list of AggregateEventNodes representing subclusters } @@ -494,7 +494,7 @@ public class AggregateEventNode extends StackPane { chart.requestChartLayout(); chart.setCursor(null); } catch (InterruptedException | ExecutionException ex) { - Exceptions.printStackTrace(ex); + LOGGER.log(Level.SEVERE, "Error loading subnodes", ex); } } }; diff --git a/Core/src/org/sleuthkit/autopsy/timeline/zooming/DescriptionLOD.java b/Core/src/org/sleuthkit/autopsy/timeline/zooming/DescriptionLOD.java index 54aa97e786..d1bdb4e067 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/zooming/DescriptionLOD.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/zooming/DescriptionLOD.java @@ -25,9 +25,9 @@ import org.openide.util.NbBundle; */ public enum DescriptionLOD { - SHORT(NbBundle.getMessage(DescriptionLOD.class, "DescriptionLOD.short")), MEDIUM( - NbBundle.getMessage(DescriptionLOD.class, "DescriptionLOD.medium")), FULL( - NbBundle.getMessage(DescriptionLOD.class, "DescriptionLOD.full")); + SHORT(NbBundle.getMessage(DescriptionLOD.class, "DescriptionLOD.short")), + MEDIUM(NbBundle.getMessage(DescriptionLOD.class, "DescriptionLOD.medium")), + FULL(NbBundle.getMessage(DescriptionLOD.class, "DescriptionLOD.full")); private final String displayName; diff --git a/Core/src/org/sleuthkit/autopsy/timeline/zooming/ZoomParams.java b/Core/src/org/sleuthkit/autopsy/timeline/zooming/ZoomParams.java index dc917d416c..c6de9f5d1e 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/zooming/ZoomParams.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/zooming/ZoomParams.java @@ -18,10 +18,7 @@ */ package org.sleuthkit.autopsy.timeline.zooming; -import java.util.Collections; -import java.util.EnumSet; import java.util.Objects; -import java.util.Set; import org.joda.time.Interval; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.timeline.filters.Filter; @@ -41,20 +38,6 @@ public class ZoomParams { private final DescriptionLOD descrLOD; - private final Set changedFields; - - public Set getChangedFields() { - return Collections.unmodifiableSet(changedFields); - } - - public enum Field { - - TIME, - EVENT_TYPE_ZOOM, - FILTER, - DESCRIPTION_LOD; - } - public Interval getTimeRange() { return timeRange; } @@ -76,35 +59,27 @@ public class ZoomParams { this.typeZoomLevel = zoomLevel; this.filter = filter; this.descrLOD = descrLOD; - changedFields = EnumSet.allOf(Field.class); - } - public ZoomParams(Interval timeRange, EventTypeZoomLevel zoomLevel, RootFilter filter, DescriptionLOD descrLOD, EnumSet changed) { - this.timeRange = timeRange; - this.typeZoomLevel = zoomLevel; - this.filter = filter; - this.descrLOD = descrLOD; - changedFields = changed; } public ZoomParams withTimeAndType(Interval timeRange, EventTypeZoomLevel zoomLevel) { - return new ZoomParams(timeRange, zoomLevel, filter, descrLOD, EnumSet.of(Field.TIME, Field.EVENT_TYPE_ZOOM)); + return new ZoomParams(timeRange, zoomLevel, filter, descrLOD); } public ZoomParams withTypeZoomLevel(EventTypeZoomLevel zoomLevel) { - return new ZoomParams(timeRange, zoomLevel, filter, descrLOD, EnumSet.of(Field.EVENT_TYPE_ZOOM)); + return new ZoomParams(timeRange, zoomLevel, filter, descrLOD); } public ZoomParams withTimeRange(Interval timeRange) { - return new ZoomParams(timeRange, typeZoomLevel, filter, descrLOD, EnumSet.of(Field.TIME)); + return new ZoomParams(timeRange, typeZoomLevel, filter, descrLOD); } public ZoomParams withDescrLOD(DescriptionLOD descrLOD) { - return new ZoomParams(timeRange, typeZoomLevel, filter, descrLOD, EnumSet.of(Field.DESCRIPTION_LOD)); + return new ZoomParams(timeRange, typeZoomLevel, filter, descrLOD); } public ZoomParams withFilter(RootFilter filter) { - return new ZoomParams(timeRange, typeZoomLevel, filter, descrLOD, EnumSet.of(Field.FILTER)); + return new ZoomParams(timeRange, typeZoomLevel, filter, descrLOD); } public boolean hasFilter(Filter filterSet) { @@ -153,11 +128,7 @@ public class ZoomParams { if (this.filter.equals(other.filter) == false) { return false; } - if (this.descrLOD != other.descrLOD) { - return false; - } - - return true; + return this.descrLOD == other.descrLOD; } @Override From 685934fa55d3155ef777687fdad0b8cd9daee89b Mon Sep 17 00:00:00 2001 From: jmillman Date: Wed, 5 Aug 2015 11:43:59 -0400 Subject: [PATCH 6/7] cleanup, comments --- .../timeline/events/EventsTaggedEvent.java | 26 +++++++++++++----- .../timeline/events/EventsUnTaggedEvent.java | 27 ++++++++++++++----- .../autopsy/timeline/events/db/EventDB.java | 16 ++++------- .../timeline/events/db/EventsRepository.java | 22 ++++++++------- 4 files changed, 58 insertions(+), 33 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/EventsTaggedEvent.java b/Core/src/org/sleuthkit/autopsy/timeline/events/EventsTaggedEvent.java index b6f7205443..114fe053a8 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/events/EventsTaggedEvent.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/events/EventsTaggedEvent.java @@ -1,14 +1,29 @@ /* - * To change this license header, choose License Headers in Project Properties. - * To change this template file, choose Tools | Templates - * and open the template in the editor. + * Autopsy Forensic Browser + * + * Copyright 2015 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package org.sleuthkit.autopsy.timeline.events; +import java.util.Collections; import java.util.Set; /** - * + * Posted to eventbus when a tag as been added to a file artifact that + * corresponds to an event */ public class EventsTaggedEvent { @@ -19,7 +34,6 @@ public class EventsTaggedEvent { } public Set getEventIDs() { - return eventIDs; + return Collections.unmodifiableSet(eventIDs); } - } diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/EventsUnTaggedEvent.java b/Core/src/org/sleuthkit/autopsy/timeline/events/EventsUnTaggedEvent.java index 230de58fb6..474676b65a 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/events/EventsUnTaggedEvent.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/events/EventsUnTaggedEvent.java @@ -1,21 +1,36 @@ /* - * To change this license header, choose License Headers in Project Properties. - * To change this template file, choose Tools | Templates - * and open the template in the editor. + * Autopsy Forensic Browser + * + * Copyright 2015 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package org.sleuthkit.autopsy.timeline.events; +import java.util.Collections; import java.util.Set; /** - * + * Posted to eventbus when a tag as been removed from a file artifact that + * corresponds to an event */ -public class EventsUnTaggedEvent { +public class EventsUnTaggedEvent { private final Set eventIDs; public Set getEventIDs() { - return eventIDs; + return Collections.unmodifiableSet(eventIDs); } public EventsUnTaggedEvent(Set eventIDs) { diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java b/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java index 5bec0dcf27..66e8092068 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java @@ -862,11 +862,10 @@ public class EventDB { + " from events" + useHashHitTablesHelper(filter) + " where time >= " + startTime + " and time < " + endTime + " and " + SQLHelper.getSQLWhere(filter) // NON-NLS + " GROUP BY " + useSubTypeHelper(useSubTypes); // NON-NLS - ResultSet rs = null; DBLock.lock(); - try (Statement stmt = con.createStatement();) { + try (Statement stmt = con.createStatement(); + ResultSet rs = stmt.executeQuery(queryString);) { while (rs.next()) { - EventType type = useSubTypes ? RootEventType.allTypes.get(rs.getInt("sub_type")) : BaseTypes.values()[rs.getInt("base_type")]; @@ -875,13 +874,8 @@ public class EventDB { } } catch (Exception ex) { - LOGGER.log(Level.SEVERE, "error getting count of events from db.", ex); // NON-NLS + LOGGER.log(Level.SEVERE, "Error getting count of events from db.", ex); // NON-NLS } finally { - try { - rs.close(); - } catch (SQLException ex) { - Exceptions.printStackTrace(ex); - } DBLock.unlock(); } return typeMap; @@ -892,13 +886,13 @@ public class EventDB { } /** - * //TODO: update javadoc, and split this into helper methods + * //TODO: update javadoc //TODO: split this into helper methods * * get a list of {@link AggregateEvent}s. * * General algorithm is as follows: * - * 1) get all aggregate events, via one db query. 2) sort them into a map + * 1)get all aggregate events, via one db query. 2) sort them into a map * from (type, description)-> aggevent 3) for each key in map, merge the * events and accumulate them in a list to return * diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventsRepository.java b/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventsRepository.java index fe5207e3fb..19c8946466 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventsRepository.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventsRepository.java @@ -21,7 +21,6 @@ package org.sleuthkit.autopsy.timeline.events.db; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; -import com.google.common.cache.RemovalNotification; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -127,15 +126,18 @@ public class EventsRepository { //TODO: we should check that case is open, or get passed a case object/directory -jm this.eventDB = EventDB.getEventDB(autoCase); populateFilterMaps(autoCase.getSleuthkitCase()); - idToEventCache = CacheBuilder.newBuilder().maximumSize(5000L).expireAfterAccess(10, TimeUnit.MINUTES).removalListener((RemovalNotification rn) -> { -// LOGGER.log(Level.INFO, "evicting event: {0}", rn.toString()); - }).build(CacheLoader.from(eventDB::getEventById)); - eventCountsCache = CacheBuilder.newBuilder().maximumSize(1000L).expireAfterAccess(10, TimeUnit.MINUTES).removalListener((RemovalNotification> rn) -> { - //LOGGER.log(Level.INFO, "evicting counts: {0}", rn.toString()); - }).build(CacheLoader.from(eventDB::countEventsByType)); - aggregateEventsCache = CacheBuilder.newBuilder().maximumSize(1000L).expireAfterAccess(10, TimeUnit.MINUTES).removalListener((RemovalNotification> rn) -> { -// LOGGER.log(Level.INFO, "evicting aggregated events: {0}", rn.toString()); - }).build(CacheLoader.from(eventDB::getAggregatedEvents)); + idToEventCache = CacheBuilder.newBuilder() + .maximumSize(5000L) + .expireAfterAccess(10, TimeUnit.MINUTES) + .build(CacheLoader.from(eventDB::getEventById)); + eventCountsCache = CacheBuilder.newBuilder() + .maximumSize(1000L) + .expireAfterAccess(10, TimeUnit.MINUTES) + .build(CacheLoader.from(eventDB::countEventsByType)); + aggregateEventsCache = CacheBuilder.newBuilder() + .maximumSize(1000L) + .expireAfterAccess(10, TimeUnit.MINUTES + ).build(CacheLoader.from(eventDB::getAggregatedEvents)); maxCache = CacheBuilder.newBuilder().build(CacheLoader.from(eventDB::getMaxTime)); minCache = CacheBuilder.newBuilder().build(CacheLoader.from(eventDB::getMinTime)); this.modelInstance = new FilteredEventsModel(this, currentStateProperty); From 084125ca3c3dfc7bf5b08ec95097de2e03bfc0df Mon Sep 17 00:00:00 2001 From: jmillman Date: Wed, 5 Aug 2015 13:00:46 -0400 Subject: [PATCH 7/7] move the registering for tag events to EventDetailChart and let it forward to all the aggregate nodes. this avoids a build up of references to unused nodes in the eventbus --- .../timeline/events/AggregateEvent.java | 34 ++-- .../timeline/events/FilteredEventsModel.java | 24 +-- .../autopsy/timeline/events/db/EventDB.java | 2 - .../ui/detailview/AggregateEventNode.java | 70 +++++--- .../ui/detailview/DetailViewPane.java | 15 +- .../ui/detailview/EventDetailChart.java | 154 ++++++++++++------ 6 files changed, 194 insertions(+), 105 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/AggregateEvent.java b/Core/src/org/sleuthkit/autopsy/timeline/events/AggregateEvent.java index c57e229095..c281cd03a6 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/events/AggregateEvent.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/events/AggregateEvent.java @@ -28,26 +28,36 @@ import org.sleuthkit.autopsy.timeline.utils.IntervalUtils; import org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD; /** - * Represents a set of other (TimeLineEvent) events aggregated together. All - * the sub events should have the same type and matching descriptions at the + * Represents a set of other (TimeLineEvent) events aggregated together. All the + * sub events should have the same type and matching descriptions at the * designated 'zoom level'. */ @Immutable public class AggregateEvent { - /** the smallest time interval containing all the aggregated events */ + /** + * the smallest time interval containing all the aggregated events + */ final private Interval span; - /** the type of all the aggregted events */ + /** + * the type of all the aggregted events + */ final private EventType type; - /** the common description of all the aggregated events */ + /** + * the common description of all the aggregated events + */ final private String description; - /** the description level of detail that the events were aggregated at. */ + /** + * the description level of detail that the events were aggregated at. + */ private final DescriptionLOD lod; - /** the set of ids of the aggregated events */ + /** + * the set of ids of the aggregated events + */ final private Set eventIDs; /** @@ -137,9 +147,9 @@ public class AggregateEvent { * AggregateEvent if no event ids would be removed */ public AggregateEvent withTagsRemoved(Set unTaggedIDs) { - Sets.SetView difference = Sets.difference(tagged, unTaggedIDs); - if (difference.size() < tagged.size()) { - return new AggregateEvent(span, type, eventIDs, hashHits, difference.immutableCopy(), description, lod); + Sets.SetView stillTagged = Sets.difference(tagged, unTaggedIDs); + if (stillTagged.size() < tagged.size()) { + return new AggregateEvent(span, type, eventIDs, hashHits, stillTagged.immutableCopy(), description, lod); } return this; //no change } @@ -155,9 +165,9 @@ public class AggregateEvent { * AggregateEvent if no event ids would be added */ public AggregateEvent withTagsAdded(Set taggedIDs) { - Sets.SetView taggedIdsInAgg = Sets.intersection(eventIDs, taggedIDs);//events that are in this aggregate and marked as tagged + Sets.SetView taggedIdsInAgg = Sets.intersection(eventIDs, taggedIDs);//events that are in this aggregate and (newly) marked as tagged if (taggedIdsInAgg.size() > 0) { - Sets.SetView notYetIncludedTagged = Sets.difference(taggedIdsInAgg,tagged); // events that are tagged, but not already marked as tagged in this Agg + Sets.SetView notYetIncludedTagged = Sets.difference(taggedIdsInAgg, tagged); // events that are tagged, but not already marked as tagged in this Agg if (notYetIncludedTagged.size() > 0) { return new AggregateEvent(span, type, eventIDs, hashHits, Sets.union(tagged, taggedIdsInAgg).immutableCopy(), description, lod); } diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/FilteredEventsModel.java b/Core/src/org/sleuthkit/autopsy/timeline/events/FilteredEventsModel.java index 8a8678657e..ab1e5f7c28 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/events/FilteredEventsModel.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/events/FilteredEventsModel.java @@ -112,7 +112,9 @@ public final class FilteredEventsModel { private final EventsRepository repo; private final Case autoCase; - /** @return the default filter used at startup */ + /** + * @return the default filter used at startup + */ public RootFilter getDefaultFilter() { DataSourcesFilter dataSourcesFilter = new DataSourcesFilter(); @@ -252,8 +254,8 @@ public final class FilteredEventsModel { /** * @return the time (in seconds from unix epoch) of the absolutely first - * event available from the repository, ignoring any filters or requested - * ranges + * event available from the repository, ignoring any filters or + * requested ranges */ public Long getMinTime() { return repo.getMinTime(); @@ -261,8 +263,8 @@ public final class FilteredEventsModel { /** * @return the time (in seconds from unix epoch) of the absolutely last - * event available from the repository, ignoring any filters or requested - * ranges + * event available from the repository, ignoring any filters or + * requested ranges */ public Long getMaxTime() { return repo.getMaxTime(); @@ -272,8 +274,8 @@ public final class FilteredEventsModel { * @param aggregation * * @return a list of aggregated events that are within the requested time - * range and pass the requested filter, using the given aggregation to - * control the grouping of events + * range and pass the requested filter, using the given aggregation + * to control the grouping of events */ public List getAggregatedEvents() { final Interval range; @@ -293,8 +295,8 @@ public final class FilteredEventsModel { * @param aggregation * * @return a list of aggregated events that are within the requested time - * range and pass the requested filter, using the given aggregation to - * control the grouping of events + * range and pass the requested filter, using the given aggregation + * to control the grouping of events */ public List getAggregatedEvents(ZoomParams params) { return repo.getAggregatedEvents(params); @@ -354,11 +356,11 @@ public final class FilteredEventsModel { } } - public void register(Object o) { + synchronized public void registerForEvents(Object o) { eventbus.register(o); } - public void unRegister(Object o) { + synchronized public void unRegisterForEvents(Object o) { eventbus.unregister(0); } } diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java b/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java index 66e8092068..df98e0b016 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java @@ -195,8 +195,6 @@ public class EventDB { public Interval getSpanningInterval(Collection eventIDs) { DBLock.lock(); try (Statement stmt = con.createStatement(); - //You can't inject multiple values into one ? paramater in prepared statement, - //so we make new statement each time... ResultSet rs = stmt.executeQuery("select Min(time), Max(time) from events where event_id in (" + StringUtils.join(eventIDs, ", ") + ")");) { // NON-NLS while (rs.next()) { return new Interval(rs.getLong("Min(time)"), rs.getLong("Max(time)") + 1, DateTimeZone.UTC); // NON-NLS diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/AggregateEventNode.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/AggregateEventNode.java index d445117397..81ddc585ab 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/AggregateEventNode.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/AggregateEventNode.java @@ -81,7 +81,9 @@ import org.sleuthkit.datamodel.ContentTag; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; -/** Represents an {@link AggregateEvent} in a {@link EventDetailChart}. */ +/** + * Represents an {@link AggregateEvent} in a {@link EventDetailChart}. + */ public class AggregateEventNode extends StackPane { private static final Logger LOGGER = Logger.getLogger(AggregateEventNode.class.getName()); @@ -93,42 +95,59 @@ public class AggregateEventNode extends StackPane { private static final CornerRadii CORNER_RADII = new CornerRadii(3); - /** the border to apply when this node is 'selected' */ + /** + * the border to apply when this node is 'selected' + */ private static final Border selectionBorder = new Border(new BorderStroke(Color.BLACK, BorderStrokeStyle.SOLID, CORNER_RADII, new BorderWidths(2))); - /** The event this AggregateEventNode represents visually */ + /** + * The event this AggregateEventNode represents visually + */ private AggregateEvent aggEvent; private final AggregateEventNode parentEventNode; - /** the region that represents the time span of this node's event */ + /** + * the region that represents the time span of this node's event + */ private final Region spanRegion = new Region(); - /** The label used to display this node's event's description */ + /** + * The label used to display this node's event's description + */ private final Label descrLabel = new Label(); - /** The label used to display this node's event count */ + /** + * The label used to display this node's event count + */ private final Label countLabel = new Label(); - /** The IamgeView used to show the icon for this node's event's type */ + /** + * The IamgeView used to show the icon for this node's event's type + */ private final ImageView eventTypeImageView = new ImageView(); - /** Pane that contains AggregateEventNodes of any 'subevents' if they are + /** + * Pane that contains AggregateEventNodes of any 'subevents' if they are * displayed * * //TODO: move more of the control of subnodes/events here and out of - * EventDetail Chart */ + * EventDetail Chart + */ private final Pane subNodePane = new Pane(); - /** the context menu that with the slider that controls subnode/event - * display + /** + * the context menu that with the slider that controls subnode/event display * - * //TODO: move more of the control of subnodes/events here and out - * of EventDetail Chart */ + * //TODO: move more of the control of subnodes/events here and out of + * EventDetail Chart + */ private final SimpleObjectProperty contextMenu = new SimpleObjectProperty<>(); - /** the Background used to fill the spanRegion, this varies epending on the - * selected/highlighted state of this node in its parent EventDetailChart */ + /** + * the Background used to fill the spanRegion, this varies epending on the + * selected/highlighted state of this node in its parent EventDetailChart + */ private Background spanFill; private final Button plusButton = new Button(null, new ImageView(PLUS)) { @@ -163,7 +182,7 @@ public class AggregateEventNode extends StackPane { this.chart = chart; sleuthkitCase = chart.getController().getAutopsyCase().getSleuthkitCase(); eventsModel = chart.getController().getEventsModel(); - eventsModel.register(this); + final Region region = new Region(); HBox.setHgrow(region, Priority.ALWAYS); @@ -353,7 +372,9 @@ public class AggregateEventNode extends StackPane { descrLabel.setMaxWidth(w); } - /** @param descrVis the level of description that should be displayed */ + /** + * @param descrVis the level of description that should be displayed + */ synchronized final void setDescriptionVisibility(DescriptionVisibility descrVis) { this.descrVis = descrVis; final int size = aggEvent.getEventIDs().size(); @@ -379,7 +400,8 @@ public class AggregateEventNode extends StackPane { } } - /** apply the 'effect' to visually indicate selection + /** + * apply the 'effect' to visually indicate selection * * @param applied true to apply the selection 'effect', false to remove it */ @@ -393,7 +415,8 @@ public class AggregateEventNode extends StackPane { }); } - /** apply the 'effect' to visually indicate highlighted nodes + /** + * apply the 'effect' to visually indicate highlighted nodes * * @param applied true to apply the highlight 'effect', false to remove it */ @@ -494,7 +517,9 @@ public class AggregateEventNode extends StackPane { } } - /** event handler used for mouse events on {@link AggregateEventNode}s */ + /** + * event handler used for mouse events on {@link AggregateEventNode}s + */ private class EventMouseHandler implements EventHandler { @Override @@ -520,8 +545,7 @@ public class AggregateEventNode extends StackPane { } } - @Subscribe - synchronized public void handleEventsUnTagged(EventsUnTaggedEvent tagEvent) { + synchronized void handleEventsUnTagged(EventsUnTaggedEvent tagEvent) { AggregateEvent withTagsRemoved = aggEvent.withTagsRemoved(tagEvent.getEventIDs()); if (withTagsRemoved != aggEvent) { aggEvent = withTagsRemoved; @@ -535,7 +559,7 @@ public class AggregateEventNode extends StackPane { } @Subscribe - synchronized public void handleEventsTagged(EventsTaggedEvent tagEvent) { + synchronized void handleEventsTagged(EventsTaggedEvent tagEvent) { AggregateEvent withTagsAdded = aggEvent.withTagsAdded(tagEvent.getEventIDs()); if (withTagsAdded != aggEvent) { aggEvent = withTagsAdded; diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/DetailViewPane.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/DetailViewPane.java index 2608598276..5f0136a352 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/DetailViewPane.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/DetailViewPane.java @@ -211,8 +211,8 @@ public class DetailViewPane extends AbstractVisualization { highlightedNodes.clear(); selectedNodes.stream().forEach((tn) -> { - for (AggregateEventNode n : chart.getNodes(( - AggregateEventNode t) -> t.getEvent().getDescription().equals(tn.getEvent().getDescription()))) { + for (AggregateEventNode n : chart.getNodes((AggregateEventNode t) + -> t.getEvent().getDescription().equals(tn.getEvent().getDescription()))) { highlightedNodes.add(n); } }); @@ -230,8 +230,7 @@ public class DetailViewPane extends AbstractVisualization { highlightedNodes.clear(); for (TreeItem tn : treeSelectionModel.getSelectedItems()) { - for (AggregateEventNode n : chart.getNodes(( - AggregateEventNode t) + for (AggregateEventNode n : chart.getNodes((AggregateEventNode t) -> t.getEvent().getDescription().equals(tn.getValue().getDescription()))) { highlightedNodes.add(n); } @@ -264,13 +263,15 @@ public class DetailViewPane extends AbstractVisualization getSeries(final EventType et) { XYChart.Series series = eventTypeToSeriesMap.get(et); if (series == null) { diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/EventDetailChart.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/EventDetailChart.java index c3cc431f4e..8c1305a38d 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/EventDetailChart.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/EventDetailChart.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2013-14 Basis Technology Corp. + * Copyright 2013-15 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -19,6 +19,7 @@ package org.sleuthkit.autopsy.timeline.ui.detailview; import com.google.common.collect.Collections2; +import com.google.common.eventbus.Subscribe; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -72,6 +73,8 @@ import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.actions.Back; import org.sleuthkit.autopsy.timeline.actions.Forward; import org.sleuthkit.autopsy.timeline.events.AggregateEvent; +import org.sleuthkit.autopsy.timeline.events.EventsTaggedEvent; +import org.sleuthkit.autopsy.timeline.events.EventsUnTaggedEvent; import org.sleuthkit.autopsy.timeline.events.FilteredEventsModel; import org.sleuthkit.autopsy.timeline.events.type.EventType; import org.sleuthkit.autopsy.timeline.ui.TimeLineChart; @@ -88,15 +91,18 @@ import org.sleuthkit.autopsy.timeline.ui.TimeLineChart; * Series help organize events for the banding by event type, we could add a * node to contain each band if we need a place for per band controls. * - * //TODO: refactor the projected lines to a separate class. -jm */ + * //TODO: refactor the projected lines to a separate class. -jm + */ public final class EventDetailChart extends XYChart implements TimeLineChart { private static final int PROJECTED_LINE_Y_OFFSET = 5; private static final int PROJECTED_LINE_STROKE_WIDTH = 5; - /** true == layout each event type in its own band, false == mix all the - * events together during layout */ + /** + * true == layout each event type in its own band, false == mix all the + * events together during layout + */ private final SimpleBooleanProperty bandByType = new SimpleBooleanProperty(false); // I don't like having these package visible, but it was the easiest way to @@ -106,18 +112,26 @@ public final class EventDetailChart extends XYChart im private FilteredEventsModel filteredEvents; - /** how much detail of the description to show in the ui */ + /** + * how much detail of the description to show in the ui + */ private final SimpleObjectProperty descrVisibility = new SimpleObjectProperty<>(DescriptionVisibility.SHOWN); - /** a user position-able vertical line to help the compare events */ + /** + * a user position-able vertical line to help the compare events + */ private Line guideLine; - /** * the user can drag out a time range to zoom into and this + /** + * * the user can drag out a time range to zoom into and this * {@link IntervalSelector} is the visual representation of it while the - * user is dragging */ + * user is dragging + */ private IntervalSelector intervalSelector; - /** listener that triggers layout pass */ + /** + * listener that triggers layout pass + */ private final InvalidationListener layoutInvalidationListener = ( Observable o) -> { synchronized (EventDetailChart.this) { @@ -126,7 +140,9 @@ public final class EventDetailChart extends XYChart im } }; - /** the maximum y value used so far during the most recent layout pass */ + /** + * the maximum y value used so far during the most recent layout pass + */ private final ReadOnlyDoubleWrapper maxY = new ReadOnlyDoubleWrapper(0.0); /** @@ -135,7 +151,9 @@ public final class EventDetailChart extends XYChart im */ private final Group nodeGroup = new Group(); - /** map from event to node */ + /** + * map from event to node + */ private final Map nodeMap = new TreeMap<>(( AggregateEvent o1, AggregateEvent o2) -> { @@ -147,14 +165,18 @@ public final class EventDetailChart extends XYChart im } }); - /** true == enforce that no two events can share the same 'row', leading to + /** + * true == enforce that no two events can share the same 'row', leading to * sparser but possibly clearer layout. false == put unrelated events in the - * same 'row', creating a denser more compact layout */ + * same 'row', creating a denser more compact layout + */ private final SimpleBooleanProperty oneEventPerRow = new SimpleBooleanProperty(false); private final ObservableMap projectionMap = FXCollections.observableHashMap(); - /** flag indicating whether this chart actually needs a layout pass */ + /** + * flag indicating whether this chart actually needs a layout pass + */ @GuardedBy(value = "this") private boolean requiresLayout = true; @@ -173,14 +195,18 @@ public final class EventDetailChart extends XYChart im return Integer.compare(collect.indexOf(s1.getName()), collect.indexOf(s2.getName())); }); - /** true == truncate all the labels to the greater of the size of their + /** + * true == truncate all the labels to the greater of the size of their * timespan indicator or the value of truncateWidth. false == don't truncate * the labels, alow them to extend past the timespan indicator and off the - * edge of the screen */ + * edge of the screen + */ private final SimpleBooleanProperty truncateAll = new SimpleBooleanProperty(false); - /** the width to truncate all labels to if truncateAll is true. adjustable - * via slider if truncateAll is true */ + /** + * the width to truncate all labels to if truncateAll is true. adjustable + * via slider if truncateAll is true + */ private final SimpleDoubleProperty truncateWidth = new SimpleDoubleProperty(200.0); EventDetailChart(DateAxis dateAxis, final Axis verticalAxis, ObservableList selectedNodes) { @@ -222,32 +248,32 @@ public final class EventDetailChart extends XYChart im chartContextMenu = ActionUtils.createContextMenu(Arrays.asList(new Action( NbBundle.getMessage(this.getClass(), "EventDetailChart.chartContextMenu.placeMarker.name")) { - { - setGraphic(new ImageView(new Image("/org/sleuthkit/autopsy/timeline/images/marker.png", 16, 16, true, true, true))); // NON-NLS - setEventHandler((ActionEvent t) -> { - if (guideLine == null) { - guideLine = new GuideLine(0, 0, 0, getHeight(), dateAxis); - guideLine.relocate(clickEvent.getX(), 0); - guideLine.endYProperty().bind(heightProperty().subtract(dateAxis.heightProperty().subtract(dateAxis.tickLengthProperty()))); + { + setGraphic(new ImageView(new Image("/org/sleuthkit/autopsy/timeline/images/marker.png", 16, 16, true, true, true))); // NON-NLS + setEventHandler((ActionEvent t) -> { + if (guideLine == null) { + guideLine = new GuideLine(0, 0, 0, getHeight(), dateAxis); + guideLine.relocate(clickEvent.getX(), 0); + guideLine.endYProperty().bind(heightProperty().subtract(dateAxis.heightProperty().subtract(dateAxis.tickLengthProperty()))); - getChartChildren().add(guideLine); + getChartChildren().add(guideLine); - guideLine.setOnMouseClicked((MouseEvent event) -> { - if (event.getButton() == MouseButton.SECONDARY) { - clearGuideLine(); - event.consume(); + guideLine.setOnMouseClicked((MouseEvent event) -> { + if (event.getButton() == MouseButton.SECONDARY) { + clearGuideLine(); + event.consume(); + } + }); + } else { + guideLine.relocate(clickEvent.getX(), 0); } }); - } else { - guideLine.relocate(clickEvent.getX(), 0); } - }); - } - }, new ActionGroup( - NbBundle.getMessage(this.getClass(), "EventDetailChart.contextMenu.zoomHistory.name"), - new Back(controller), - new Forward(controller)))); + }, new ActionGroup( + NbBundle.getMessage(this.getClass(), "EventDetailChart.contextMenu.zoomHistory.name"), + new Back(controller), + new Forward(controller)))); chartContextMenu.setAutoHide(true); chartContextMenu.show(EventDetailChart.this, clickEvent.getScreenX(), clickEvent.getScreenY()); clickEvent.consume(); @@ -318,15 +344,22 @@ public final class EventDetailChart extends XYChart im @Override public void setModel(FilteredEventsModel filteredEvents) { - this.filteredEvents = filteredEvents; - filteredEvents.getRequestedZoomParamters().addListener(o -> { - clearGuideLine(); - clearIntervalSelector(); + if (this.filteredEvents != null) { + this.filteredEvents.unRegisterForEvents(this); + } + if (this.filteredEvents != filteredEvents) { + filteredEvents.registerForEvents(this); + filteredEvents.getRequestedZoomParamters().addListener(o -> { + clearGuideLine(); + clearIntervalSelector(); + + selectedNodes.clear(); + projectionMap.clear(); + controller.selectEventIDs(Collections.emptyList()); + }); + } + this.filteredEvents = filteredEvents; - selectedNodes.clear(); - projectionMap.clear(); - controller.selectEventIDs(Collections.emptyList()); - }); } @Override @@ -338,7 +371,8 @@ public final class EventDetailChart extends XYChart im bandByType.set(t1); } - /** get the DateTime along the x-axis that corresponds to the given + /** + * get the DateTime along the x-axis that corresponds to the given * x-coordinate in the coordinate system of this {@link EventDetailChart} * * @param x a x-coordinate in the space of this {@link EventDetailChart} @@ -493,6 +527,10 @@ public final class EventDetailChart extends XYChart im return nodes; } + private Iterable getAllNodes() { + return getNodes(x -> true); + } + synchronized SimpleDoubleProperty getTruncateWidth() { return truncateWidth; } @@ -502,9 +540,8 @@ public final class EventDetailChart extends XYChart im nodeGroup.setTranslateY(-d * h); } - private void checkNode(AggregateEventNode node, Predicate p, List nodes) { + private static void checkNode(AggregateEventNode node, Predicate p, List nodes) { if (node != null) { - AggregateEvent event = node.getEvent(); if (p.test(node)) { nodes.add(node); } @@ -692,8 +729,25 @@ public final class EventDetailChart extends XYChart im requiresLayout = true; } + /** + * make this accessible to AggregateEventNode + */ @Override protected void requestChartLayout() { - super.requestChartLayout(); //To change body of generated methods, choose Tools | Templates. + super.requestChartLayout(); + } + + @Subscribe + synchronized public void handleEventsUnTagged(EventsUnTaggedEvent tagEvent) { + for (AggregateEventNode t : getAllNodes()) { + t.handleEventsUnTagged(tagEvent); + } + } + + @Subscribe + synchronized public void handleEventsTagged(EventsTaggedEvent tagEvent) { + for (AggregateEventNode t : getAllNodes()) { + t.handleEventsTagged(tagEvent); + } } }