Merge pull request #1496 from millmanorama/timeline_tags_visualiztion

Timeline tags visualiztion
This commit is contained in:
Richard Cordovano 2015-08-11 18:23:55 -04:00
commit bfd9d71bb3
15 changed files with 741 additions and 431 deletions

View File

@ -66,6 +66,10 @@ import static org.sleuthkit.autopsy.casemodule.Case.Events.DATA_SOURCE_ADDED;
import org.sleuthkit.autopsy.coreutils.History;
import org.sleuthkit.autopsy.coreutils.LoggedTask;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.events.BlackBoardArtifactTagAddedEvent;
import org.sleuthkit.autopsy.events.BlackBoardArtifactTagDeletedEvent;
import org.sleuthkit.autopsy.events.ContentTagAddedEvent;
import org.sleuthkit.autopsy.events.ContentTagDeletedEvent;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.autopsy.timeline.events.FilteredEventsModel;
import org.sleuthkit.autopsy.timeline.events.db.EventsRepository;
@ -162,10 +166,8 @@ public class TimeLineController {
@GuardedBy("this")
private boolean listeningToAutopsy = false;
private final PropertyChangeListener caseListener;
private final PropertyChangeListener caseListener = new AutopsyCaseListener();
private final PropertyChangeListener ingestJobListener = new AutopsyIngestJobListener();
private final PropertyChangeListener ingestModuleListener = new AutopsyIngestModuleListener();
@GuardedBy("this")
@ -239,8 +241,6 @@ public class TimeLineController {
DescriptionLOD.SHORT);
historyManager.advance(InitialZoomState);
//persistent listener instances
caseListener = new AutopsyCaseListener();
}
/**
@ -792,6 +792,18 @@ public class TimeLineController {
@Override
public void propertyChange(PropertyChangeEvent evt) {
switch (Case.Events.valueOf(evt.getPropertyName())) {
case BLACKBOARD_ARTIFACT_TAG_ADDED:
filteredEvents.handleTagAdded((BlackBoardArtifactTagAddedEvent) evt);
break;
case BLACKBOARD_ARTIFACT_TAG_DELETED:
filteredEvents.handleTagDeleted((BlackBoardArtifactTagDeletedEvent) evt);
break;
case CONTENT_TAG_ADDED:
filteredEvents.handleTagAdded((ContentTagAddedEvent) evt);
break;
case CONTENT_TAG_DELETED:
filteredEvents.handleTagDeleted((ContentTagDeletedEvent) evt);
break;
case DATA_SOURCE_ADDED:
// Content content = (Content) evt.getNewValue();
//if we are doing incremental updates, drop this
@ -804,7 +816,7 @@ public class TimeLineController {
});
break;
case CURRENT_CASE:
OpenTimelineAction.invalidateController();
OpenTimelineAction.invalidateController();
SwingUtilities.invokeLater(TimeLineController.this::closeTimeLine);
break;
}

View File

@ -28,32 +28,57 @@ import org.sleuthkit.autopsy.timeline.utils.IntervalUtils;
import org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD;
/**
* An event that represent a set of other events aggregated together. All the
* Represents a set of other (TimeLineEvent) events aggregated together. All the
* sub events should have the same type and matching descriptions at the
* designated 'zoom level'.
*/
@Immutable
public class AggregateEvent {
/**
* the smallest time interval containing all the aggregated events
*/
final private Interval span;
/**
* the type of all the aggregted events
*/
final private EventType type;
final private Set<Long> eventIDs;
/**
* the common description of all the aggregated events
*/
final private String description;
/**
* the description level of detail that the events were aggregated at.
*/
private final DescriptionLOD lod;
/**
* the set of ids of the aggregated events
*/
final private Set<Long> eventIDs;
/**
* the ids of the subset of aggregated events that have at least one tag
* applied to them
*/
private final Set<Long> tagged;
/**
* the ids of the subset of aggregated events that have at least one hash
* set hit
*/
private final Set<Long> hashHits;
public AggregateEvent(Interval spanningInterval, EventType type, Set<Long> eventIDs, Set<Long> hashHits, String description, DescriptionLOD lod) {
public AggregateEvent(Interval spanningInterval, EventType type, Set<Long> eventIDs, Set<Long> hashHits, Set<Long> tagged, String description, DescriptionLOD lod) {
this.span = spanningInterval;
this.type = type;
this.hashHits = hashHits;
this.tagged = tagged;
this.description = description;
this.eventIDs = eventIDs;
this.lod = lod;
}
@ -73,6 +98,10 @@ public class AggregateEvent {
return Collections.unmodifiableSet(hashHits);
}
public Set<Long> getEventIDsWithTags() {
return Collections.unmodifiableSet(tagged);
}
public String getDescription() {
return description;
}
@ -81,30 +110,72 @@ public class AggregateEvent {
return type;
}
/**
* merge two aggregate events into one new aggregate event.
*
* @param ag1
* @param ag2
*
* @return
*/
public static AggregateEvent merge(AggregateEvent ag1, AggregateEvent ag2) {
if (ag1.getType() != ag2.getType()) {
throw new IllegalArgumentException("aggregate events are not compatible they have different types");
}
if (!ag1.getDescription().equals(ag2.getDescription())) {
throw new IllegalArgumentException("aggregate events are not compatible they have different descriptions");
}
Sets.SetView<Long> idsUnion = Sets.union(ag1.getEventIDs(), ag2.getEventIDs());
Sets.SetView<Long> hashHitsUnion = Sets.union(ag1.getEventIDsWithHashHits(), ag2.getEventIDsWithHashHits());
return new AggregateEvent(IntervalUtils.span(ag1.span, ag2.span), ag1.getType(), idsUnion, hashHitsUnion, ag1.getDescription(), ag1.lod);
}
public DescriptionLOD getLOD() {
return lod;
}
/**
* merge two aggregate events into one new aggregate event.
*
* @param aggEvent1
* @param aggEVent2
*
* @return a new aggregate event that is the result of merging the given
* events
*/
public static AggregateEvent merge(AggregateEvent aggEvent1, AggregateEvent ag2) {
if (aggEvent1.getType() != ag2.getType()) {
throw new IllegalArgumentException("aggregate events are not compatible they have different types");
}
if (!aggEvent1.getDescription().equals(ag2.getDescription())) {
throw new IllegalArgumentException("aggregate events are not compatible they have different descriptions");
}
Sets.SetView<Long> idsUnion = Sets.union(aggEvent1.getEventIDs(), ag2.getEventIDs());
Sets.SetView<Long> hashHitsUnion = Sets.union(aggEvent1.getEventIDsWithHashHits(), ag2.getEventIDsWithHashHits());
Sets.SetView<Long> taggedUnion = Sets.union(aggEvent1.getEventIDsWithTags(), ag2.getEventIDsWithTags());
return new AggregateEvent(IntervalUtils.span(aggEvent1.span, ag2.span), aggEvent1.getType(), idsUnion, hashHitsUnion, taggedUnion, aggEvent1.getDescription(), aggEvent1.lod);
}
/**
* get an AggregateEvent the same as this one but with the given eventIDs
* removed from the list of tagged events
*
* @param unTaggedIDs
*
* @return a new Aggregate event that is the same as this one but with the
* given event Ids removed from the list of tagged ids, or, this
* AggregateEvent if no event ids would be removed
*/
public AggregateEvent withTagsRemoved(Set<Long> unTaggedIDs) {
Sets.SetView<Long> stillTagged = Sets.difference(tagged, unTaggedIDs);
if (stillTagged.size() < tagged.size()) {
return new AggregateEvent(span, type, eventIDs, hashHits, stillTagged.immutableCopy(), description, lod);
}
return this; //no change
}
/**
* get an AggregateEvent the same as this one but with the given eventIDs
* added to the list of tagged events if there are part of this Aggregate
*
* @param taggedIDs
*
* @return a new Aggregate event that is the same as this one but with the
* given event Ids added to the list of tagged ids, or, this
* AggregateEvent if no event ids would be added
*/
public AggregateEvent withTagsAdded(Set<Long> taggedIDs) {
Sets.SetView<Long> taggedIdsInAgg = Sets.intersection(eventIDs, taggedIDs);//events that are in this aggregate and (newly) marked as tagged
if (taggedIdsInAgg.size() > 0) {
Sets.SetView<Long> notYetIncludedTagged = Sets.difference(taggedIdsInAgg, tagged); // events that are tagged, but not already marked as tagged in this Agg
if (notYetIncludedTagged.size() > 0) {
return new AggregateEvent(span, type, eventIDs, hashHits, Sets.union(tagged, taggedIdsInAgg).immutableCopy(), description, lod);
}
}
return this; //no change
}
}

View File

@ -0,0 +1,39 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2015 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.timeline.events;
import java.util.Collections;
import java.util.Set;
/**
* Posted to eventbus when a tag as been added to a file artifact that
* corresponds to an event
*/
public class EventsTaggedEvent {
private final Set<Long> eventIDs;
public EventsTaggedEvent(Set<Long> eventIDs) {
this.eventIDs = eventIDs;
}
public Set<Long> getEventIDs() {
return Collections.unmodifiableSet(eventIDs);
}
}

View File

@ -0,0 +1,40 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2015 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.timeline.events;
import java.util.Collections;
import java.util.Set;
/**
* Posted to eventbus when a tag as been removed from a file artifact that
* corresponds to an event
*/
public class EventsUnTaggedEvent {
private final Set<Long> eventIDs;
public Set<Long> getEventIDs() {
return Collections.unmodifiableSet(eventIDs);
}
public EventsUnTaggedEvent(Set<Long> eventIDs) {
this.eventIDs = eventIDs;
}
}

View File

@ -18,10 +18,12 @@
*/
package org.sleuthkit.autopsy.timeline.events;
import com.google.common.eventbus.EventBus;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
import javafx.beans.Observable;
import javafx.beans.property.ReadOnlyObjectProperty;
import javafx.beans.property.ReadOnlyObjectWrapper;
@ -29,6 +31,12 @@ import javafx.collections.MapChangeListener;
import javax.annotation.concurrent.GuardedBy;
import org.joda.time.DateTimeZone;
import org.joda.time.Interval;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.events.BlackBoardArtifactTagAddedEvent;
import org.sleuthkit.autopsy.events.BlackBoardArtifactTagDeletedEvent;
import org.sleuthkit.autopsy.events.ContentTagAddedEvent;
import org.sleuthkit.autopsy.events.ContentTagDeletedEvent;
import org.sleuthkit.autopsy.timeline.TimeLineView;
import org.sleuthkit.autopsy.timeline.events.db.EventsRepository;
import org.sleuthkit.autopsy.timeline.events.type.EventType;
@ -45,6 +53,9 @@ import org.sleuthkit.autopsy.timeline.filters.TypeFilter;
import org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD;
import org.sleuthkit.autopsy.timeline.zooming.EventTypeZoomLevel;
import org.sleuthkit.autopsy.timeline.zooming.ZoomParams;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.TskCoreException;
/**
* This class acts as the model for a {@link TimeLineView}
@ -70,11 +81,9 @@ import org.sleuthkit.autopsy.timeline.zooming.ZoomParams;
*/
public final class FilteredEventsModel {
/*
* requested time range, filter, event_type zoom, and description level of
* detail. if specifics are not passed to methods, the values of these
* members are used to query repository.
*/
private static final Logger LOGGER = Logger.getLogger(FilteredEventsModel.class.getName());
/**
* time range that spans the filtered events
*/
@ -93,6 +102,8 @@ public final class FilteredEventsModel {
@GuardedBy("this")
private final ReadOnlyObjectWrapper<ZoomParams> requestedZoomParamters = new ReadOnlyObjectWrapper<>();
private final EventBus eventbus = new EventBus("Event_Repository_EventBus");
/**
* The underlying repo for events. Atomic access to repo is synchronized
* internally, but compound access should be done with the intrinsic lock of
@ -100,12 +111,14 @@ public final class FilteredEventsModel {
*/
@GuardedBy("this")
private final EventsRepository repo;
private final Case autoCase;
/**
* @return the default filter used at startup
*/
public RootFilter getDefaultFilter() {
DataSourcesFilter dataSourcesFilter = new DataSourcesFilter();
repo.getDatasourcesMap().entrySet().stream().forEach((Map.Entry<Long, String> t) -> {
DataSourceFilter dataSourceFilter = new DataSourceFilter(t.getValue(), t.getKey());
dataSourceFilter.setSelected(Boolean.TRUE);
@ -123,7 +136,7 @@ public final class FilteredEventsModel {
public FilteredEventsModel(EventsRepository repo, ReadOnlyObjectProperty<ZoomParams> currentStateProperty) {
this.repo = repo;
this.autoCase = repo.getAutoCase();
repo.getDatasourcesMap().addListener((MapChangeListener.Change<? extends Long, ? extends String> change) -> {
DataSourceFilter dataSourceFilter = new DataSourceFilter(change.getValueAdded(), change.getKey());
RootFilter rootFilter = filter().get();
@ -302,4 +315,53 @@ public final class FilteredEventsModel {
return requestedLOD.get();
}
synchronized public void handleTagAdded(BlackBoardArtifactTagAddedEvent e) {
BlackboardArtifact artifact = e.getTag().getArtifact();
Set<Long> updatedEventIDs = repo.markEventsTagged(artifact.getObjectID(), artifact.getArtifactID(), true);
if (!updatedEventIDs.isEmpty()) {
eventbus.post(new EventsTaggedEvent(updatedEventIDs));
}
}
synchronized public void handleTagDeleted(BlackBoardArtifactTagDeletedEvent e) {
BlackboardArtifact artifact = e.getTag().getArtifact();
try {
boolean tagged = autoCase.getServices().getTagsManager().getBlackboardArtifactTagsByArtifact(artifact).isEmpty() == false;
Set<Long> updatedEventIDs = repo.markEventsTagged(artifact.getObjectID(), artifact.getArtifactID(), tagged);
if (!updatedEventIDs.isEmpty()) {
eventbus.post(new EventsUnTaggedEvent(updatedEventIDs));
}
} catch (TskCoreException ex) {
LOGGER.log(Level.SEVERE, "unable to determine tagged status of attribute.", ex);
}
}
synchronized public void handleTagAdded(ContentTagAddedEvent e) {
Content content = e.getTag().getContent();
Set<Long> updatedEventIDs = repo.markEventsTagged(content.getId(), null, true);
if (!updatedEventIDs.isEmpty()) {
eventbus.post(new EventsTaggedEvent(updatedEventIDs));
}
}
synchronized public void handleTagDeleted(ContentTagDeletedEvent e) {
Content content = e.getTag().getContent();
try {
boolean tagged = autoCase.getServices().getTagsManager().getContentTagsByContent(content).isEmpty() == false;
Set<Long> updatedEventIDs = repo.markEventsTagged(content.getId(), null, tagged);
if (!updatedEventIDs.isEmpty()) {
eventbus.post(new EventsUnTaggedEvent(updatedEventIDs));
}
} catch (TskCoreException ex) {
LOGGER.log(Level.SEVERE, "unable to determine tagged status of content.", ex);
}
}
synchronized public void registerForEvents(Object o) {
eventbus.register(o);
}
synchronized public void unRegisterForEvents(Object o) {
eventbus.unregister(0);
}
}

View File

@ -18,6 +18,7 @@
*/
package org.sleuthkit.autopsy.timeline.events;
import javax.annotation.Nullable;
import org.sleuthkit.autopsy.timeline.events.type.EventType;
import org.sleuthkit.datamodel.TskData;
@ -41,8 +42,9 @@ public class TimeLineEvent {
private final TskData.FileKnown known;
private final boolean hashHit;
private final boolean tagged;
public TimeLineEvent(Long eventID, Long objID, Long artifactID, Long time, EventType type, String fullDescription, String medDescription, String shortDescription, TskData.FileKnown known, boolean hashHit) {
public TimeLineEvent(Long eventID, Long objID, @Nullable Long artifactID, Long time, EventType type, String fullDescription, String medDescription, String shortDescription, TskData.FileKnown known, boolean hashHit, boolean tagged) {
this.eventID = eventID;
this.fileID = objID;
this.artifactID = artifactID;
@ -54,12 +56,18 @@ public class TimeLineEvent {
this.shortDescription = shortDescription;
this.known = known;
this.hashHit = hashHit;
this.tagged = tagged;
}
public boolean isTagged() {
return tagged;
}
public boolean isHashHit() {
return hashHit;
}
@Nullable
public Long getArtifactID() {
return artifactID;
}

View File

@ -1,6 +0,0 @@
EventsRepository.progressWindow.msg.reinit_db=(re)initializing events database
EventsRepository.progressWindow.msg.populateMacEventsFiles=populating mac events for files\:
EventsRepository.progressWindow.msg.populateMacEventsFiles2=populating mac events for files\:
EventsRepository.progressWindow.msg.commitingDb=committing events db
EventsRepository.msgdlg.problem.text=There was a problem populating the timeline. Not all events may be present or accurate. See the log for details.
EventsRepository.progressWindow.populatingXevents=populating {0} events

View File

@ -18,7 +18,6 @@
*/
package org.sleuthkit.autopsy.timeline.events.db;
import com.google.common.base.Stopwatch;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.SetMultimap;
import java.nio.file.Paths;
@ -46,6 +45,7 @@ import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.logging.Level;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.annotation.Nonnull;
import org.apache.commons.lang3.StringUtils;
import org.joda.time.DateTimeZone;
import org.joda.time.Interval;
@ -87,42 +87,8 @@ import org.sqlite.SQLiteJDBCLoader;
*/
public class EventDB {
private PreparedStatement insertHashSetStmt;
private PreparedStatement insertHashHitStmt;
private PreparedStatement selectHashSetStmt;
/**
* enum to represent columns in the events table
*/
enum EventTableColumn {
EVENT_ID("event_id"), // NON-NLS
FILE_ID("file_id"), // NON-NLS
ARTIFACT_ID("artifact_id"), // NON-NLS
BASE_TYPE("base_type"), // NON-NLS
SUB_TYPE("sub_type"), // NON-NLS
KNOWN("known_state"), // NON-NLS
DATA_SOURCE_ID("datasource_id"), // NON-NLS
FULL_DESCRIPTION("full_description"), // NON-NLS
MED_DESCRIPTION("med_description"), // NON-NLS
SHORT_DESCRIPTION("short_description"), // NON-NLS
TIME("time"),
HASH_HIT("hash_hit"); // NON-NLS
private final String columnName;
private EventTableColumn(String columnName) {
this.columnName = columnName;
}
@Override
public String toString() {
return columnName;
}
}
/**
* enum to represent keys stored in db_info table
*/
private enum DBInfoKey {
@ -186,12 +152,19 @@ public class EventDB {
private PreparedStatement getDataSourceIDsStmt;
private PreparedStatement insertRowStmt;
private PreparedStatement recordDBInfoStmt;
private PreparedStatement insertHashSetStmt;
private PreparedStatement insertHashHitStmt;
private PreparedStatement selectHashSetStmt;
private PreparedStatement countAllEventsStmt;
private PreparedStatement dropEventsTableStmt;
private PreparedStatement dropHashSetHitsTableStmt;
private PreparedStatement dropHashSetsTableStmt;
private PreparedStatement dropDBInfoTableStmt;
private PreparedStatement selectEventsFromOBjectAndArtifactStmt;
private final Set<PreparedStatement> preparedStatements = new HashSet<>();
private final ReentrantReadWriteLock rwLock = new ReentrantReadWriteLock(true); //use fairness policy
private final Lock DBLock = rwLock.writeLock(); //using exclusive lock for all db ops for now
private final Lock DBLock = new ReentrantReadWriteLock(true).writeLock(); //using exclusive lock for all db ops for now
private EventDB(Case autoCase) throws SQLException, Exception {
//should this go into module output (or even cache, we should be able to rebuild it)?
@ -208,30 +181,6 @@ public class EventDB {
}
}
public Interval getSpanningInterval(Collection<Long> eventIDs) {
Interval span = null;
DBLock.lock();
try (Statement stmt = con.createStatement();
//You can't inject multiple values into one ? paramater in prepared statement,
//so we make new statement each time...
ResultSet rs = stmt.executeQuery("select Min(time), Max(time) from events where event_id in (" + StringUtils.join(eventIDs, ", ") + ")");) { // NON-NLS
while (rs.next()) {
span = new Interval(rs.getLong("Min(time)"), rs.getLong("Max(time)") + 1, DateTimeZone.UTC); // NON-NLS
}
} catch (SQLException ex) {
LOGGER.log(Level.SEVERE, "Error executing get spanning interval query.", ex); // NON-NLS
} finally {
DBLock.unlock();
}
return span;
}
EventTransaction beginTransaction() {
return new EventTransaction();
}
void closeDBCon() {
if (con != null) {
try {
@ -244,6 +193,25 @@ public class EventDB {
con = null;
}
public Interval getSpanningInterval(Collection<Long> eventIDs) {
DBLock.lock();
try (Statement stmt = con.createStatement();
ResultSet rs = stmt.executeQuery("select Min(time), Max(time) from events where event_id in (" + StringUtils.join(eventIDs, ", ") + ")");) { // NON-NLS
while (rs.next()) {
return new Interval(rs.getLong("Min(time)"), rs.getLong("Max(time)") + 1, DateTimeZone.UTC); // NON-NLS
}
} catch (SQLException ex) {
LOGGER.log(Level.SEVERE, "Error executing get spanning interval query.", ex); // NON-NLS
} finally {
DBLock.unlock();
}
return null;
}
EventTransaction beginTransaction() {
return new EventTransaction();
}
void commitTransaction(EventTransaction tr, Boolean notify) {
if (tr.isClosed()) {
throw new IllegalArgumentException("can't close already closed transaction"); // NON-NLS
@ -251,24 +219,34 @@ public class EventDB {
tr.commit(notify);
}
/**
* @return the total number of events in the database or, -1 if there is an
* error.
*/
int countAllEvents() {
int result = -1;
DBLock.lock();
//TODO convert this to prepared statement -jm
try (ResultSet rs = con.createStatement().executeQuery("select count(*) as count from events")) { // NON-NLS
try (ResultSet rs = countAllEventsStmt.executeQuery()) { // NON-NLS
while (rs.next()) {
result = rs.getInt("count"); // NON-NLS
break;
return rs.getInt("count"); // NON-NLS
}
} catch (SQLException ex) {
Exceptions.printStackTrace(ex);
LOGGER.log(Level.SEVERE, "Error counting all events", ex);
} finally {
DBLock.unlock();
}
return result;
return -1;
}
Map<EventType, Long> countEvents(ZoomParams params) {
/**
* get the count of all events that fit the given zoom params organized by
* the EvenType of the level spcified in the ZoomParams
*
* @param params the params that control what events to count and how to
* organize the returned map
*
* @return a map from event type( of the requested level) to event counts
*/
Map<EventType, Long> countEventsByType(ZoomParams params) {
if (params.getTimeRange() != null) {
return countEvents(params.getTimeRange().getStartMillis() / 1000,
params.getTimeRange().getEndMillis() / 1000,
@ -278,22 +256,25 @@ public class EventDB {
}
}
void dropEventsTable() {
//TODO: use prepared statement - jm
/**
* drop the tables from this database and recreate them in order to start
* over.
*/
void reInitializeDB() {
DBLock.lock();
try (Statement createStatement = con.createStatement()) {
createStatement.execute("drop table if exists events"); // NON-NLS
try {
dropEventsTableStmt.executeUpdate();
dropHashSetHitsTableStmt.executeUpdate();
dropHashSetsTableStmt.executeUpdate();
dropDBInfoTableStmt.executeUpdate();
initializeDB();;
} catch (SQLException ex) {
LOGGER.log(Level.SEVERE, "could not drop old events table", ex); // NON-NLS
LOGGER.log(Level.SEVERE, "could not drop old tables table", ex); // NON-NLS
} finally {
DBLock.unlock();
}
}
List<AggregateEvent> getAggregatedEvents(ZoomParams params) {
return getAggregatedEvents(params.getTimeRange(), params.getFilter(), params.getTypeZoomLevel(), params.getDescrLOD());
}
Interval getBoundingEventsInterval(Interval timeRange, RootFilter filter) {
long start = timeRange.getStartMillis() / 1000;
long end = timeRange.getEndMillis() / 1000;
@ -310,7 +291,6 @@ public class EventDB {
if (end2 == 0) {
end2 = getMaxTime();
}
//System.out.println(start2 + " " + start + " " + end + " " + end2);
return new Interval(start2 * 1000, (end2 + 1) * 1000, TimeLineController.getJodaTimeZone());
}
} catch (SQLException ex) {
@ -353,12 +333,11 @@ public class EventDB {
DBLock.lock();
final String query = "select event_id from from events" + useHashHitTablesHelper(filter) + " where time >= " + startTime + " and time <" + endTime + " and " + SQLHelper.getSQLWhere(filter); // NON-NLS
//System.out.println(query);
try (Statement stmt = con.createStatement();
ResultSet rs = stmt.executeQuery(query)) {
while (rs.next()) {
resultIDs.add(rs.getLong(EventTableColumn.EVENT_ID.toString()));
resultIDs.add(rs.getLong("event_id"));
}
} catch (SQLException sqlEx) {
@ -383,7 +362,7 @@ public class EventDB {
* this relies on the fact that no tskObj has ID 0 but 0 is the default
* value for the datasource_id column in the events table.
*/
return hasHashHitColumn() && hasDataSourceIDColumn()
return hasHashHitColumn() && hasDataSourceIDColumn() && hasTaggedColumn()
&& (getDataSourceIDs().isEmpty() == false);
}
@ -392,7 +371,7 @@ public class EventDB {
DBLock.lock();
try (ResultSet rs = getDataSourceIDsStmt.executeQuery()) {
while (rs.next()) {
long datasourceID = rs.getLong(EventTableColumn.DATA_SOURCE_ID.toString());
long datasourceID = rs.getLong("datasource_id");
//this relies on the fact that no tskObj has ID 0 but 0 is the default value for the datasource_id column in the events table.
if (datasourceID != 0) {
hashSet.add(datasourceID);
@ -494,7 +473,7 @@ public class EventDB {
+ "PRIMARY KEY (key))"; // NON-NLS
stmt.execute(sql);
} catch (SQLException ex) {
LOGGER.log(Level.SEVERE, "problem creating db_info table", ex); // NON-NLS
LOGGER.log(Level.SEVERE, "problem creating db_info table", ex); // NON-NLS
}
try (Statement stmt = con.createStatement()) {
@ -525,6 +504,15 @@ public class EventDB {
LOGGER.log(Level.SEVERE, "problem upgrading events table", ex); // NON-NLS
}
}
if (hasTaggedColumn() == false) {
try (Statement stmt = con.createStatement()) {
String sql = "ALTER TABLE events ADD COLUMN tagged INTEGER"; // NON-NLS
stmt.execute(sql);
} catch (SQLException ex) {
LOGGER.log(Level.SEVERE, "problem upgrading events table", ex); // NON-NLS
}
}
if (hasHashHitColumn() == false) {
try (Statement stmt = con.createStatement()) {
@ -554,26 +542,32 @@ public class EventDB {
LOGGER.log(Level.SEVERE, "problem creating hash_set_hits table", ex);
}
createEventsIndex(Arrays.asList(EventTableColumn.FILE_ID));
createEventsIndex(Arrays.asList(EventTableColumn.ARTIFACT_ID));
createEventsIndex(Arrays.asList(EventTableColumn.SUB_TYPE, EventTableColumn.TIME));
createEventsIndex(Arrays.asList(EventTableColumn.BASE_TYPE, EventTableColumn.TIME));
createEventsIndex(Arrays.asList(EventTableColumn.KNOWN));
createIndex("events", Arrays.asList("file_id"));
createIndex("events", Arrays.asList("artifact_id"));
createIndex("events", Arrays.asList("sub_type", "time"));
createIndex("events", Arrays.asList("base_type", "time"));
createIndex("events", Arrays.asList("known_state"));
try {
insertRowStmt = prepareStatement(
"INSERT INTO events (datasource_id,file_id ,artifact_id, time, sub_type, base_type, full_description, med_description, short_description, known_state, hash_hit) " // NON-NLS
+ "VALUES (?,?,?,?,?,?,?,?,?,?,?)"); // NON-NLS
"INSERT INTO events (datasource_id,file_id ,artifact_id, time, sub_type, base_type, full_description, med_description, short_description, known_state, hash_hit, tagged) " // NON-NLS
+ "VALUES (?,?,?,?,?,?,?,?,?,?,?,?)"); // NON-NLS
getDataSourceIDsStmt = prepareStatement("select distinct datasource_id from events"); // NON-NLS
getMaxTimeStmt = prepareStatement("select Max(time) as max from events"); // NON-NLS
getMinTimeStmt = prepareStatement("select Min(time) as min from events"); // NON-NLS
getEventByIDStmt = prepareStatement("select * from events where event_id = ?"); // NON-NLS
recordDBInfoStmt = prepareStatement("insert or replace into db_info (key, value) values (?, ?)"); // NON-NLS
getDBInfoStmt = prepareStatement("select value from db_info where key = ?"); // NON-NLS
insertHashSetStmt = prepareStatement("insert or ignore into hash_sets (hash_set_name) values (?)");
selectHashSetStmt = prepareStatement("select hash_set_id from hash_sets where hash_set_name = ?");
insertHashHitStmt = prepareStatement("insert or ignore into hash_set_hits (hash_set_id, event_id) values (?,?)");
getDataSourceIDsStmt = prepareStatement("SELECT DISTINCT datasource_id FROM events"); // NON-NLS
getMaxTimeStmt = prepareStatement("SELECT Max(time) AS max FROM events"); // NON-NLS
getMinTimeStmt = prepareStatement("SELECT Min(time) AS min FROM events"); // NON-NLS
getEventByIDStmt = prepareStatement("SELECT * FROM events WHERE event_id = ?"); // NON-NLS
recordDBInfoStmt = prepareStatement("INSERT OR REPLACE INTO db_info (key, value) values (?, ?)"); // NON-NLS
getDBInfoStmt = prepareStatement("SELECT value FROM db_info WHERE key = ?"); // NON-NLS
insertHashSetStmt = prepareStatement("INSERT OR IGNORE INTO hash_sets (hash_set_name) values (?)");
selectHashSetStmt = prepareStatement("SELECT hash_set_id FROM hash_sets WHERE hash_set_name = ?");
insertHashHitStmt = prepareStatement("INSERT OR IGNORE INTO hash_set_hits (hash_set_id, event_id) values (?,?)");
countAllEventsStmt = prepareStatement("SELECT count(*) AS count FROM events");
dropEventsTableStmt = prepareStatement("DROP TABLE IF EXISTS events");
dropHashSetHitsTableStmt = prepareStatement("DROP TABLE IF EXISTS hash_set_hits");
dropHashSetsTableStmt = prepareStatement("DROP TABLE IF EXISTS hash_sets");
dropDBInfoTableStmt = prepareStatement("DROP TABLE IF EXISTS db_ino");
selectEventsFromOBjectAndArtifactStmt = prepareStatement("SELECT event_id FROM events WHERE file_id == ? AND artifact_id IS ?");
} catch (SQLException sQLException) {
LOGGER.log(Level.SEVERE, "failed to prepareStatment", sQLException); // NON-NLS
}
@ -583,15 +577,6 @@ public class EventDB {
}
}
/**
* @param tableName the value of tableName
* @param columnList the value of columnList
*/
private void createEventsIndex(final List<EventTableColumn> columnList) {
createIndex("events",
columnList.stream().map(EventTableColumn::toString).collect(Collectors.toList()));
}
/**
*
* @param tableName the value of tableName
@ -614,12 +599,12 @@ public class EventDB {
*
* @return the boolean
*/
private boolean hasDBColumn(final EventTableColumn dbColumn) {
private boolean hasDBColumn(@Nonnull final String dbColumn) {
try (Statement stmt = con.createStatement()) {
ResultSet executeQuery = stmt.executeQuery("PRAGMA table_info(events)");
while (executeQuery.next()) {
if (dbColumn.toString().equals(executeQuery.getString("name"))) {
if (dbColumn.equals(executeQuery.getString("name"))) {
return true;
}
}
@ -630,20 +615,24 @@ public class EventDB {
}
private boolean hasDataSourceIDColumn() {
return hasDBColumn(EventTableColumn.DATA_SOURCE_ID);
return hasDBColumn("datasource_id");
}
private boolean hasTaggedColumn() {
return hasDBColumn("tagged");
}
private boolean hasHashHitColumn() {
return hasDBColumn(EventTableColumn.HASH_HIT);
return hasDBColumn("hash_hit");
}
void insertEvent(long time, EventType type, long datasourceID, Long objID,
void insertEvent(long time, EventType type, long datasourceID, long objID,
Long artifactID, String fullDescription, String medDescription,
String shortDescription, TskData.FileKnown known, Set<String> hashSets) {
String shortDescription, TskData.FileKnown known, Set<String> hashSets, boolean tagged) {
EventTransaction trans = beginTransaction();
insertEvent(time, type, datasourceID, objID, artifactID, fullDescription, medDescription, shortDescription, known, hashSets, trans);
commitTransaction(trans, true);
EventTransaction transaction = beginTransaction();
insertEvent(time, type, datasourceID, objID, artifactID, fullDescription, medDescription, shortDescription, known, hashSets, tagged, transaction);
commitTransaction(transaction, true);
}
/**
@ -652,9 +641,10 @@ public class EventDB {
* @param f
* @param transaction
*/
void insertEvent(long time, EventType type, long datasourceID, Long objID,
void insertEvent(long time, EventType type, long datasourceID, long objID,
Long artifactID, String fullDescription, String medDescription,
String shortDescription, TskData.FileKnown known, Set<String> hashSetNames,
boolean tagged,
EventTransaction transaction) {
if (transaction.isClosed()) {
@ -669,18 +659,14 @@ public class EventDB {
DBLock.lock();
try {
//"INSERT INTO events (datasource_id,file_id ,artifact_id, time, sub_type, base_type, full_description, med_description, short_description, known_state, hashHit) "
//"INSERT INTO events (datasource_id,file_id ,artifact_id, time, sub_type, base_type, full_description, med_description, short_description, known_state, hashHit, tagged) "
insertRowStmt.clearParameters();
insertRowStmt.setLong(1, datasourceID);
if (objID != null) {
insertRowStmt.setLong(2, objID);
} else {
insertRowStmt.setNull(2, Types.INTEGER);
}
insertRowStmt.setLong(2, objID);
if (artifactID != null) {
insertRowStmt.setLong(3, artifactID);
} else {
insertRowStmt.setNull(3, Types.INTEGER);
insertRowStmt.setNull(3, Types.NULL);
}
insertRowStmt.setLong(4, time);
@ -698,6 +684,7 @@ public class EventDB {
insertRowStmt.setByte(10, known == null ? TskData.FileKnown.UNKNOWN.getFileKnownValue() : known.getFileKnownValue());
insertRowStmt.setInt(11, hashSetNames.isEmpty() ? 0 : 1);
insertRowStmt.setInt(12, tagged ? 1 : 0);
insertRowStmt.executeUpdate();
@ -735,6 +722,36 @@ public class EventDB {
}
}
Set<Long> markEventsTagged(long objectID, Long artifactID, boolean tagged) {
HashSet<Long> eventIDs = new HashSet<>();
DBLock.lock();
try {
selectEventsFromOBjectAndArtifactStmt.clearParameters();
selectEventsFromOBjectAndArtifactStmt.setLong(1, objectID);
if (Objects.isNull(artifactID)) {
selectEventsFromOBjectAndArtifactStmt.setNull(2, Types.NULL);
} else {
selectEventsFromOBjectAndArtifactStmt.setLong(2, artifactID);
}
try (ResultSet executeQuery = selectEventsFromOBjectAndArtifactStmt.executeQuery();) {
while (executeQuery.next()) {
eventIDs.add(executeQuery.getLong("event_id"));
}
try (Statement updateStatement = con.createStatement();) {
updateStatement.executeUpdate("UPDATE events SET tagged = " + (tagged ? 1 : 0)
+ " WHERE event_id IN (" + StringUtils.join(eventIDs, ",") + ")");
}
}
} catch (SQLException ex) {
LOGGER.log(Level.SEVERE, "failed to mark events as " + (tagged ? "" : "(un)") + tagged, ex); // NON-NLS
} finally {
DBLock.unlock();
}
return eventIDs;
}
void recordLastArtifactID(long lastArtfID) {
recordDBInfo(DBInfoKey.LAST_ARTIFACT_ID, lastArtfID);
}
@ -800,15 +817,16 @@ public class EventDB {
}
private TimeLineEvent constructTimeLineEvent(ResultSet rs) throws SQLException {
return new TimeLineEvent(rs.getLong(EventTableColumn.EVENT_ID.toString()),
rs.getLong(EventTableColumn.FILE_ID.toString()),
rs.getLong(EventTableColumn.ARTIFACT_ID.toString()),
rs.getLong(EventTableColumn.TIME.toString()), RootEventType.allTypes.get(rs.getInt(EventTableColumn.SUB_TYPE.toString())),
rs.getString(EventTableColumn.FULL_DESCRIPTION.toString()),
rs.getString(EventTableColumn.MED_DESCRIPTION.toString()),
rs.getString(EventTableColumn.SHORT_DESCRIPTION.toString()),
TskData.FileKnown.valueOf(rs.getByte(EventTableColumn.KNOWN.toString())),
rs.getInt(EventTableColumn.HASH_HIT.toString()) != 0);
return new TimeLineEvent(rs.getLong("event_id"),
rs.getLong("file_id"),
rs.getLong("artifact_id"),
rs.getLong("time"), RootEventType.allTypes.get(rs.getInt("sub_type")),
rs.getString("full_description"),
rs.getString("med_description"),
rs.getString("short_description"),
TskData.FileKnown.valueOf(rs.getByte("known_state")),
rs.getInt("hash_hit") != 0,
rs.getInt("tagged") != 0);
}
/**
@ -843,38 +861,29 @@ public class EventDB {
+ " from events" + useHashHitTablesHelper(filter) + " where time >= " + startTime + " and time < " + endTime + " and " + SQLHelper.getSQLWhere(filter) // NON-NLS
+ " GROUP BY " + useSubTypeHelper(useSubTypes); // NON-NLS
ResultSet rs = null;
DBLock.lock();
//System.out.println(queryString);
try (Statement stmt = con.createStatement();) {
Stopwatch stopwatch = new Stopwatch();
stopwatch.start();
System.out.println(queryString);
rs = stmt.executeQuery(queryString);
stopwatch.stop();
// System.out.println(stopwatch.elapsedMillis() / 1000.0 + " seconds");
try (Statement stmt = con.createStatement();
ResultSet rs = stmt.executeQuery(queryString);) {
while (rs.next()) {
EventType type = useSubTypes
? RootEventType.allTypes.get(rs.getInt(EventTableColumn.SUB_TYPE.toString()))
: BaseTypes.values()[rs.getInt(EventTableColumn.BASE_TYPE.toString())];
? RootEventType.allTypes.get(rs.getInt("sub_type"))
: BaseTypes.values()[rs.getInt("base_type")];
typeMap.put(type, rs.getLong("count(*)")); // NON-NLS
}
} catch (Exception ex) {
LOGGER.log(Level.SEVERE, "error getting count of events from db.", ex); // NON-NLS
LOGGER.log(Level.SEVERE, "Error getting count of events from db.", ex); // NON-NLS
} finally {
try {
rs.close();
} catch (SQLException ex) {
Exceptions.printStackTrace(ex);
}
DBLock.unlock();
}
return typeMap;
}
List<AggregateEvent> getAggregatedEvents(ZoomParams params) {
return getAggregatedEvents(params.getTimeRange(), params.getFilter(), params.getTypeZoomLevel(), params.getDescrLOD());
}
/**
* //TODO: update javadoc //TODO: split this into helper methods
*
@ -882,9 +891,9 @@ public class EventDB {
*
* General algorithm is as follows:
*
* - get all aggregate events, via one db query. - sort them into a map from
* (type, description)-> aggevent - for each key in map, merge the events
* and accumulate them in a list to return
* 1)get all aggregate events, via one db query. 2) sort them into a map
* from (type, description)-> aggevent 3) for each key in map, merge the
* events and accumulate them in a list to return
*
*
* @param timeRange the Interval within in which all returned aggregate
@ -925,36 +934,36 @@ public class EventDB {
+ " from events" + useHashHitTablesHelper(filter) + " where " + "time >= " + start + " and time < " + end + " and " + SQLHelper.getSQLWhere(filter) // NON-NLS
+ " group by interval, " + useSubTypeHelper(useSubTypes) + " , " + descriptionColumn // NON-NLS
+ " order by Min(time)"; // NON-NLS
System.out.println(query);
ResultSet rs = null;
try (Statement stmt = con.createStatement(); // scoop up requested events in groups organized by interval, type, and desription
) {
Stopwatch stopwatch = new Stopwatch();
stopwatch.start();
rs = stmt.executeQuery(query);
stopwatch.stop();
System.out.println(stopwatch.elapsedMillis() / 1000.0 + " seconds");
// scoop up requested events in groups organized by interval, type, and desription
try (ResultSet rs = con.createStatement().executeQuery(query);) {
while (rs.next()) {
Interval interval = new Interval(rs.getLong("Min(time)") * 1000, rs.getLong("Max(time)") * 1000, TimeLineController.getJodaTimeZone());
String eventIDS = rs.getString("event_ids");
HashSet<Long> hashHits = new HashSet<>();
try (Statement st2 = con.createStatement();) {
EventType type = useSubTypes ? RootEventType.allTypes.get(rs.getInt("sub_type")) : BaseTypes.values()[rs.getInt("base_type")];
ResultSet executeQuery = st2.executeQuery("select event_id from events where event_id in (" + eventIDS + ") and hash_hit = 1");
while (executeQuery.next()) {
hashHits.add(executeQuery.getLong(EventTableColumn.EVENT_ID.toString()));
HashSet<Long> hashHits = new HashSet<>();
HashSet<Long> tagged = new HashSet<>();
try (Statement st2 = con.createStatement();
ResultSet hashQueryResults = st2.executeQuery("select event_id , tagged, hash_hit from events where event_id in (" + eventIDS + ")");) {
while (hashQueryResults.next()) {
long eventID = hashQueryResults.getLong("event_id");
if (hashQueryResults.getInt("tagged") != 0) {
tagged.add(eventID);
}
if (hashQueryResults.getInt("hash_hit") != 0) {
hashHits.add(eventID);
}
}
}
EventType type = useSubTypes ? RootEventType.allTypes.get(rs.getInt(EventTableColumn.SUB_TYPE.toString())) : BaseTypes.values()[rs.getInt(EventTableColumn.BASE_TYPE.toString())];
AggregateEvent aggregateEvent = new AggregateEvent(
new Interval(rs.getLong("Min(time)") * 1000, rs.getLong("Max(time)") * 1000, TimeLineController.getJodaTimeZone()), // NON-NLS
interval, // NON-NLS
type,
Stream.of(eventIDS.split(",")).map(Long::valueOf).collect(Collectors.toSet()), // NON-NLS
hashHits,
rs.getString(descriptionColumn), lod);
tagged,
rs.getString(descriptionColumn),
lod);
//put events in map from type/descrition -> event
SetMultimap<String, AggregateEvent> descrMap = typeMap.get(type);
@ -968,11 +977,6 @@ public class EventDB {
} catch (SQLException ex) {
Exceptions.printStackTrace(ex);
} finally {
try {
rs.close();
} catch (SQLException ex) {
Exceptions.printStackTrace(ex);
}
DBLock.unlock();
}
@ -1020,7 +1024,7 @@ public class EventDB {
}
private static String useSubTypeHelper(final boolean useSubTypes) {
return useSubTypes ? EventTableColumn.SUB_TYPE.toString() : EventTableColumn.BASE_TYPE.toString();
return useSubTypes ? "sub_type" : "base_type";
}
private long getDBInfo(DBInfoKey key, long defaultValue) {
@ -1049,12 +1053,12 @@ public class EventDB {
private String getDescriptionColumn(DescriptionLOD lod) {
switch (lod) {
case FULL:
return EventTableColumn.FULL_DESCRIPTION.toString();
return "full_description";
case MEDIUM:
return EventTableColumn.MED_DESCRIPTION.toString();
return "med_description";
case SHORT:
default:
return EventTableColumn.SHORT_DESCRIPTION.toString();
return "short_description";
}
}

View File

@ -21,11 +21,9 @@ package org.sleuthkit.autopsy.timeline.events.db;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.cache.RemovalNotification;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
@ -44,6 +42,7 @@ import org.apache.commons.lang3.StringUtils;
import org.joda.time.Interval;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.services.TagsManager;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.timeline.ProgressWindow;
import org.sleuthkit.autopsy.timeline.events.AggregateEvent;
@ -91,15 +90,17 @@ public class EventsRepository {
private final FilteredEventsModel modelInstance;
private final LoadingCache<Long, TimeLineEvent> idToEventCache;
private final LoadingCache<ZoomParams, Map<EventType, Long>> eventCountsCache;
private final LoadingCache<ZoomParams, List<AggregateEvent>> aggregateEventsCache;
private final ObservableMap<Long, String> datasourcesMap = FXCollections.observableHashMap();
private final ObservableMap<Long, String> hashSetMap = FXCollections.observableHashMap();
private final Case autoCase;
public Case getAutoCase() {
return autoCase;
}
synchronized public ObservableMap<Long, String> getDatasourcesMap() {
return datasourcesMap;
}
@ -125,19 +126,21 @@ public class EventsRepository {
//TODO: we should check that case is open, or get passed a case object/directory -jm
this.eventDB = EventDB.getEventDB(autoCase);
populateFilterMaps(autoCase.getSleuthkitCase());
idToEventCache = CacheBuilder.newBuilder().maximumSize(5000L).expireAfterAccess(10, TimeUnit.MINUTES).removalListener((RemovalNotification<Long, TimeLineEvent> rn) -> {
//LOGGER.log(Level.INFO, "evicting event: {0}", rn.toString());
}).build(CacheLoader.from(eventDB::getEventById));
eventCountsCache = CacheBuilder.newBuilder().maximumSize(1000L).expireAfterAccess(10, TimeUnit.MINUTES).removalListener((RemovalNotification<ZoomParams, Map<EventType, Long>> rn) -> {
//LOGGER.log(Level.INFO, "evicting counts: {0}", rn.toString());
}).build(CacheLoader.from(eventDB::countEvents));
aggregateEventsCache = CacheBuilder.newBuilder().maximumSize(1000L).expireAfterAccess(10, TimeUnit.MINUTES).removalListener((RemovalNotification<ZoomParams, List<AggregateEvent>> rn) -> {
//LOGGER.log(Level.INFO, "evicting aggregated events: {0}", rn.toString());
}).build(CacheLoader.from(eventDB::getAggregatedEvents));
idToEventCache = CacheBuilder.newBuilder()
.maximumSize(5000L)
.expireAfterAccess(10, TimeUnit.MINUTES)
.build(CacheLoader.from(eventDB::getEventById));
eventCountsCache = CacheBuilder.newBuilder()
.maximumSize(1000L)
.expireAfterAccess(10, TimeUnit.MINUTES)
.build(CacheLoader.from(eventDB::countEventsByType));
aggregateEventsCache = CacheBuilder.newBuilder()
.maximumSize(1000L)
.expireAfterAccess(10, TimeUnit.MINUTES
).build(CacheLoader.from(eventDB::getAggregatedEvents));
maxCache = CacheBuilder.newBuilder().build(CacheLoader.from(eventDB::getMaxTime));
minCache = CacheBuilder.newBuilder().build(CacheLoader.from(eventDB::getMinTime));
this.modelInstance = new FilteredEventsModel(this, currentStateProperty);
}
/**
@ -184,19 +187,18 @@ public class EventsRepository {
return idToEventCache.getUnchecked(eventID);
}
public Set<TimeLineEvent> getEventsById(Collection<Long> eventIDs) {
synchronized public Set<TimeLineEvent> getEventsById(Collection<Long> eventIDs) {
return eventIDs.stream()
.map(idToEventCache::getUnchecked)
.collect(Collectors.toSet());
}
public List<AggregateEvent> getAggregatedEvents(ZoomParams params) {
synchronized public List<AggregateEvent> getAggregatedEvents(ZoomParams params) {
return aggregateEventsCache.getUnchecked(params);
}
public Map<EventType, Long> countEvents(ZoomParams params) {
synchronized public Map<EventType, Long> countEvents(ZoomParams params) {
return eventCountsCache.getUnchecked(params);
}
@ -205,6 +207,7 @@ public class EventsRepository {
maxCache.invalidateAll();
eventCountsCache.invalidateAll();
aggregateEventsCache.invalidateAll();
idToEventCache.invalidateAll();
}
public Set<Long> getEventIDs(Interval timeRange, RootFilter filter) {
@ -234,30 +237,35 @@ public class EventsRepository {
//TODO: can we avoid this with a state listener? does it amount to the same thing?
//post population operation to execute
private final Runnable r;
private final Runnable postPopulationOperation;
private final SleuthkitCase skCase;
private final TagsManager tagsManager;
public DBPopulationWorker(Runnable r) {
public DBPopulationWorker(Runnable postPopulationOperation) {
progressDialog = new ProgressWindow(null, true, this);
progressDialog.setVisible(true);
this.r = r;
skCase = autoCase.getSleuthkitCase();
tagsManager = autoCase.getServices().getTagsManager();
this.postPopulationOperation = postPopulationOperation;
}
@Override
@NbBundle.Messages({"progressWindow.msg.populateMacEventsFiles=populating mac events for files:",
"progressWindow.msg.reinit_db=(re)initializing events database",
"progressWindow.msg.commitingDb=committing events db"})
protected Void doInBackground() throws Exception {
process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, -1, NbBundle.getMessage(this.getClass(),
"EventsRepository.progressWindow.msg.reinit_db"), "")));
process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, -1, Bundle.progressWindow_msg_reinit_db(), "")));
//reset database
//TODO: can we do more incremental updates? -jm
eventDB.dropEventsTable();
eventDB.initializeDB();
eventDB.reInitializeDB();
//grab ids of all files
SleuthkitCase skCase = autoCase.getSleuthkitCase();
List<Long> files = skCase.findAllFileIdsWhere("name != '.' AND name != '..'");
final int numFiles = files.size();
process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, numFiles, NbBundle.getMessage(this.getClass(),
"EventsRepository.progressWindow.msg.populateMacEventsFiles"), "")));
process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, numFiles, Bundle.progressWindow_msg_populateMacEventsFiles(), "")));
//insert file events into db
int i = 1;
@ -269,7 +277,9 @@ public class EventsRepository {
try {
AbstractFile f = skCase.getAbstractFileById(fID);
if (f != null) {
if (f == null) {
LOGGER.log(Level.WARNING, "Failed to get data for file : {0}", fID); // NON-NLS
} else {
//TODO: This is broken for logical files? fix -jm
//TODO: logical files don't necessarily have valid timestamps, so ... -jm
final String uniquePath = f.getUniquePath();
@ -279,29 +289,26 @@ public class EventsRepository {
String rootFolder = StringUtils.substringBetween(parentPath, "/", "/");
String shortDesc = datasourceName + "/" + StringUtils.defaultIfBlank(rootFolder, "");
String medD = datasourceName + parentPath;
final TskData.FileKnown known = f.getKnown();
boolean hashHit = f.getArtifactsCount(BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT) > 0;
Set<String> hashSets = hashHit ? f.getHashSetNames() : Collections.emptySet();
final TskData.FileKnown known = f.getKnown();
Set<String> hashSets = f.getHashSetNames() ;
boolean tagged = !tagsManager.getContentTagsByContent(f).isEmpty();
//insert it into the db if time is > 0 => time is legitimate (drops logical files)
if (f.getAtime() > 0) {
eventDB.insertEvent(f.getAtime(), FileSystemTypes.FILE_ACCESSED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, trans);
eventDB.insertEvent(f.getAtime(), FileSystemTypes.FILE_ACCESSED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, tagged, trans);
}
if (f.getMtime() > 0) {
eventDB.insertEvent(f.getMtime(), FileSystemTypes.FILE_MODIFIED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, trans);
eventDB.insertEvent(f.getMtime(), FileSystemTypes.FILE_MODIFIED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, tagged, trans);
}
if (f.getCtime() > 0) {
eventDB.insertEvent(f.getCtime(), FileSystemTypes.FILE_CHANGED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, trans);
eventDB.insertEvent(f.getCtime(), FileSystemTypes.FILE_CHANGED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, tagged, trans);
}
if (f.getCrtime() > 0) {
eventDB.insertEvent(f.getCrtime(), FileSystemTypes.FILE_CREATED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, trans);
eventDB.insertEvent(f.getCrtime(), FileSystemTypes.FILE_CREATED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, tagged, trans);
}
process(Arrays.asList(new ProgressWindow.ProgressUpdate(i, numFiles,
NbBundle.getMessage(this.getClass(),
"EventsRepository.progressWindow.msg.populateMacEventsFiles2"), f.getName())));
} else {
LOGGER.log(Level.WARNING, "failed to look up data for file : {0}", fID); // NON-NLS
Bundle.progressWindow_msg_populateMacEventsFiles(), f.getName())));
}
} catch (TskCoreException tskCoreException) {
LOGGER.log(Level.WARNING, "failed to insert mac event for file : " + fID, tskCoreException); // NON-NLS
@ -318,12 +325,11 @@ public class EventsRepository {
}
//skip file_system events, they are already handled above.
if (type instanceof ArtifactEventType) {
populateEventType((ArtifactEventType) type, trans, skCase);
populateEventType((ArtifactEventType) type, trans);
}
}
process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, -1, NbBundle.getMessage(this.getClass(),
"EventsRepository.progressWindow.msg.commitingDb"), "")));
process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, -1, Bundle.progressWindow_msg_commitingDb(), "")));
if (isCancelled()) {
eventDB.rollBackTransaction(trans);
} else {
@ -349,24 +355,23 @@ public class EventsRepository {
}
@Override
@NbBundle.Messages("msgdlg.problem.text=There was a problem populating the timeline."
+ " Not all events may be present or accurate. See the log for details.")
protected void done() {
super.done();
try {
progressDialog.close();
get();
} catch (CancellationException ex) {
LOGGER.log(Level.INFO, "Database population was cancelled by the user. Not all events may be present or accurate. See the log for details.", ex); // NON-NLS
} catch (InterruptedException | ExecutionException ex) {
LOGGER.log(Level.WARNING, "Exception while populating database.", ex); // NON-NLS
JOptionPane.showMessageDialog(null, NbBundle.getMessage(this.getClass(),
"EventsRepository.msgdlg.problem.text"));
JOptionPane.showMessageDialog(null, Bundle.msgdlg_problem_text());
} catch (Exception ex) {
LOGGER.log(Level.WARNING, "Unexpected exception while populating database.", ex); // NON-NLS
JOptionPane.showMessageDialog(null, NbBundle.getMessage(this.getClass(),
"EventsRepository.msgdlg.problem.text"));
JOptionPane.showMessageDialog(null, Bundle.msgdlg_problem_text());
}
r.run(); //execute post db population operation
postPopulationOperation.run(); //execute post db population operation
}
/**
@ -376,16 +381,15 @@ public class EventsRepository {
* @param trans the db transaction to use
* @param skCase a reference to the sleuthkit case
*/
private void populateEventType(final ArtifactEventType type, EventDB.EventTransaction trans, SleuthkitCase skCase) {
@NbBundle.Messages({"# {0} - event type ", "progressWindow.populatingXevents=populating {0} events"})
private void populateEventType(final ArtifactEventType type, EventDB.EventTransaction trans) {
try {
//get all the blackboard artifacts corresponding to the given event sub_type
final ArrayList<BlackboardArtifact> blackboardArtifacts = skCase.getBlackboardArtifacts(type.getArtifactType());
final int numArtifacts = blackboardArtifacts.size();
process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, numArtifacts,
NbBundle.getMessage(this.getClass(),
"EventsRepository.progressWindow.populatingXevents",
type.toString()), "")));
Bundle.progressWindow_populatingXevents(type.toString()), "")));
int i = 0;
for (final BlackboardArtifact bbart : blackboardArtifacts) {
@ -396,16 +400,15 @@ public class EventsRepository {
long datasourceID = skCase.getContentById(bbart.getObjectID()).getDataSource().getId();
AbstractFile f = skCase.getAbstractFileById(bbart.getObjectID());
boolean hashHit = f.getArtifactsCount(BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT) > 0;
Set<String> hashSets = hashHit ? f.getHashSetNames() : Collections.emptySet();
eventDB.insertEvent(eventDescription.getTime(), type, datasourceID, bbart.getObjectID(), bbart.getArtifactID(), eventDescription.getFullDescription(), eventDescription.getMedDescription(), eventDescription.getShortDescription(), null, hashSets, trans);
Set<String> hashSets = f.getHashSetNames();
boolean tagged = tagsManager.getBlackboardArtifactTagsByArtifact(bbart).isEmpty() == false;
eventDB.insertEvent(eventDescription.getTime(), type, datasourceID, bbart.getObjectID(), bbart.getArtifactID(), eventDescription.getFullDescription(), eventDescription.getMedDescription(), eventDescription.getShortDescription(), null, hashSets, tagged, trans);
}
i++;
process(Arrays.asList(new ProgressWindow.ProgressUpdate(i, numArtifacts,
NbBundle.getMessage(this.getClass(),
"EventsRepository.progressWindow.populatingXevents",
type.toString()), "")));
Bundle.progressWindow_populatingXevents(type), "")));
}
} catch (TskCoreException ex) {
LOGGER.log(Level.SEVERE, "There was a problem getting events with sub type = " + type.toString() + ".", ex); // NON-NLS
@ -436,4 +439,13 @@ public class EventsRepository {
}
}
}
synchronized public Set<Long> markEventsTagged(long objID, Long artifactID, boolean tagged) {
Set<Long> updatedEventIDs = eventDB.markEventsTagged(objID, artifactID, tagged);
if (!updatedEventIDs.isEmpty()) {
aggregateEventsCache.invalidateAll();
idToEventCache.invalidateAll(updatedEventIDs);
}
return updatedEventIDs;
}
}

View File

@ -89,9 +89,7 @@ public class SQLHelper {
static String getSQLWhere(HideKnownFilter filter) {
if (filter.isSelected()) {
return "(" + EventDB.EventTableColumn.KNOWN.toString()
+ " is not '" + TskData.FileKnown.KNOWN.getFileKnownValue()
+ "')"; // NON-NLS
return "(known_state IS NOT '" + TskData.FileKnown.KNOWN.getFileKnownValue() + "')"; // NON-NLS
} else {
return "1";
}
@ -111,11 +109,11 @@ public class SQLHelper {
}
static String getSQLWhere(DataSourceFilter filter) {
return (filter.isSelected()) ? "(" + EventDB.EventTableColumn.DATA_SOURCE_ID.toString() + " = '" + filter.getDataSourceID() + "')" : "1";
return (filter.isSelected()) ? "(datasource_id = '" + filter.getDataSourceID() + "')" : "1";
}
static String getSQLWhere(DataSourcesFilter filter) {
return (filter.isSelected()) ? "(" + EventDB.EventTableColumn.DATA_SOURCE_ID.toString() + " in ("
return (filter.isSelected()) ? "(datasource_id in ("
+ filter.getSubFilters().stream()
.filter(AbstractFilter::isSelected)
.map((dataSourceFilter) -> String.valueOf(dataSourceFilter.getDataSourceID()))
@ -127,10 +125,10 @@ public class SQLHelper {
if (StringUtils.isBlank(filter.getText())) {
return "1";
}
String strip = StringUtils.strip(filter.getText());
return "((" + EventDB.EventTableColumn.MED_DESCRIPTION.toString() + " like '%" + strip + "%') or (" // NON-NLS
+ EventDB.EventTableColumn.FULL_DESCRIPTION.toString() + " like '%" + strip + "%') or (" // NON-NLS
+ EventDB.EventTableColumn.SHORT_DESCRIPTION.toString() + " like '%" + strip + "%'))";
String strippedFilterText = StringUtils.strip(filter.getText());
return "((med_description like '%" + strippedFilterText + "%')"
+ " or (full_description like '%" + strippedFilterText + "%')"
+ " or (short_description like '%" + strippedFilterText + "%'))";
} else {
return "1";
}
@ -140,19 +138,20 @@ public class SQLHelper {
* generate a sql where clause for the given type filter, while trying to be
* as simple as possible to improve performance.
*
* @param filter
* @param typeFilter
*
* @return
*/
static String getSQLWhere(TypeFilter filter) {
if (filter.isSelected() == false) {
static String getSQLWhere(TypeFilter typeFilter) {
if (typeFilter.isSelected() == false) {
return "0";
} else if (filter.getEventType() instanceof RootEventType) {
if (filter.getSubFilters().stream().allMatch((Filter f) -> f.isSelected() && ((TypeFilter) f).getSubFilters().stream().allMatch(Filter::isSelected))) {
} else if (typeFilter.getEventType() instanceof RootEventType) {
if (typeFilter.getSubFilters().stream()
.allMatch(subFilter -> subFilter.isSelected() && subFilter.getSubFilters().stream().allMatch(Filter::isSelected))) {
return "1"; //then collapse clause to true
}
}
return "(" + EventDB.EventTableColumn.SUB_TYPE.toString() + " in (" + StringUtils.join(getActiveSubTypes(filter), ",") + "))";
return "(sub_type IN (" + StringUtils.join(getActiveSubTypes(typeFilter), ",") + "))";
}
}

View File

@ -18,10 +18,10 @@
*/
package org.sleuthkit.autopsy.timeline.ui.detailview;
import com.google.common.eventbus.Subscribe;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.logging.Level;
@ -59,13 +59,14 @@ import javafx.scene.paint.Color;
import org.apache.commons.lang3.StringUtils;
import org.joda.time.DateTime;
import org.joda.time.Interval;
import org.openide.util.Exceptions;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.ColorUtilities;
import org.sleuthkit.autopsy.coreutils.LoggedTask;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.timeline.TimeLineController;
import org.sleuthkit.autopsy.timeline.events.AggregateEvent;
import org.sleuthkit.autopsy.timeline.events.EventsTaggedEvent;
import org.sleuthkit.autopsy.timeline.events.EventsUnTaggedEvent;
import org.sleuthkit.autopsy.timeline.events.FilteredEventsModel;
import org.sleuthkit.autopsy.timeline.events.TimeLineEvent;
import org.sleuthkit.autopsy.timeline.filters.RootFilter;
@ -73,6 +74,10 @@ import org.sleuthkit.autopsy.timeline.filters.TextFilter;
import org.sleuthkit.autopsy.timeline.filters.TypeFilter;
import org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD;
import org.sleuthkit.autopsy.timeline.zooming.ZoomParams;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifactTag;
import org.sleuthkit.datamodel.ContentTag;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
@ -83,9 +88,10 @@ public class AggregateEventNode extends StackPane {
private static final Logger LOGGER = Logger.getLogger(AggregateEventNode.class.getName());
private static final Image HASH_PIN = new Image(AggregateEventNode.class.getResourceAsStream("/org/sleuthkit/autopsy/images/hashset_hits.png"));
private static final Image HASH_PIN = new Image("/org/sleuthkit/autopsy/images/hashset_hits.png");
private final static Image PLUS = new Image("/org/sleuthkit/autopsy/timeline/images/plus-button.png"); // NON-NLS
private final static Image MINUS = new Image("/org/sleuthkit/autopsy/timeline/images/minus-button.png"); // NON-NLS
private final static Image TAG = new Image("/org/sleuthkit/autopsy/images/green-tag-icon-16.png"); // NON-NLS
private static final CornerRadii CORNER_RADII = new CornerRadii(3);
@ -97,7 +103,7 @@ public class AggregateEventNode extends StackPane {
/**
* The event this AggregateEventNode represents visually
*/
private final AggregateEvent event;
private AggregateEvent aggEvent;
private final AggregateEventNode parentEventNode;
@ -164,22 +170,30 @@ public class AggregateEventNode extends StackPane {
private DescriptionVisibility descrVis;
private final SleuthkitCase sleuthkitCase;
private final FilteredEventsModel eventsModel;
private Map<String, Long> hashSetCounts = null;
private Tooltip tooltip;
public AggregateEventNode(final AggregateEvent event, AggregateEventNode parentEventNode, EventDetailChart chart) {
this.event = event;
descLOD.set(event.getLOD());
private Tooltip tooltip;
private final ImageView hashIV = new ImageView(HASH_PIN);
private final ImageView tagIV = new ImageView(TAG);
public AggregateEventNode(final AggregateEvent aggEvent, AggregateEventNode parentEventNode, EventDetailChart chart) {
this.aggEvent = aggEvent;
descLOD.set(aggEvent.getLOD());
this.parentEventNode = parentEventNode;
this.chart = chart;
sleuthkitCase = chart.getController().getAutopsyCase().getSleuthkitCase();
eventsModel = chart.getController().getEventsModel();
final Region region = new Region();
HBox.setHgrow(region, Priority.ALWAYS);
ImageView imageView = new ImageView(HASH_PIN);
final HBox hBox = new HBox(descrLabel, countLabel, region, imageView, minusButton, plusButton);
if (event.getEventIDsWithHashHits().isEmpty()) {
hBox.getChildren().remove(imageView);
final HBox hBox = new HBox(descrLabel, countLabel, region, hashIV, tagIV, minusButton, plusButton);
if (aggEvent.getEventIDsWithHashHits().isEmpty()) {
hashIV.setManaged(false);
hashIV.setVisible(false);
}
if (aggEvent.getEventIDsWithTags().isEmpty()) {
tagIV.setManaged(false);
tagIV.setVisible(false);
}
hBox.setPrefWidth(USE_COMPUTED_SIZE);
hBox.setMinWidth(USE_PREF_SIZE);
@ -211,7 +225,7 @@ public class AggregateEventNode extends StackPane {
subNodePane.setPickOnBounds(false);
//setup description label
eventTypeImageView.setImage(event.getType().getFXImage());
eventTypeImageView.setImage(aggEvent.getType().getFXImage());
descrLabel.setGraphic(eventTypeImageView);
descrLabel.setPrefWidth(USE_COMPUTED_SIZE);
descrLabel.setTextOverrun(OverrunStyle.CENTER_ELLIPSIS);
@ -220,7 +234,7 @@ public class AggregateEventNode extends StackPane {
setDescriptionVisibility(chart.getDescrVisibility().get());
//setup backgrounds
final Color evtColor = event.getType().getColor();
final Color evtColor = aggEvent.getType().getColor();
spanFill = new Background(new BackgroundFill(evtColor.deriveColor(0, 1, 1, .1), CORNER_RADII, Insets.EMPTY));
setBackground(new Background(new BackgroundFill(evtColor.deriveColor(0, 1, 1, .1), CORNER_RADII, Insets.EMPTY)));
setCursor(Cursor.HAND);
@ -237,7 +251,6 @@ public class AggregateEventNode extends StackPane {
minusButton.setManaged(true);
plusButton.setManaged(true);
toFront();
});
setOnMouseExited((MouseEvent e) -> {
@ -246,13 +259,12 @@ public class AggregateEventNode extends StackPane {
plusButton.setVisible(false);
minusButton.setManaged(false);
plusButton.setManaged(false);
});
setOnMouseClicked(new EventMouseHandler());
plusButton.disableProperty().bind(descLOD.isEqualTo(DescriptionLOD.FULL));
minusButton.disableProperty().bind(descLOD.isEqualTo(event.getLOD()));
minusButton.disableProperty().bind(descLOD.isEqualTo(aggEvent.getLOD()));
plusButton.setOnMouseClicked(e -> {
final DescriptionLOD next = descLOD.get().next();
@ -270,35 +282,64 @@ public class AggregateEventNode extends StackPane {
});
}
private void installTooltip() {
synchronized private void installTooltip() {
//TODO: all this work should probably go on a background thread...
if (tooltip == null) {
String collect = "";
if (!event.getEventIDsWithHashHits().isEmpty()) {
if (Objects.isNull(hashSetCounts)) {
hashSetCounts = new HashMap<>();
try {
for (TimeLineEvent tle : eventsModel.getEventsById(event.getEventIDsWithHashHits())) {
Set<String> hashSetNames = sleuthkitCase.getAbstractFileById(tle.getFileID()).getHashSetNames();
for (String hashSetName : hashSetNames) {
hashSetCounts.merge(hashSetName, 1L, Long::sum);
HashMap<String, Long> hashSetCounts = new HashMap<>();
if (!aggEvent.getEventIDsWithHashHits().isEmpty()) {
hashSetCounts = new HashMap<>();
try {
for (TimeLineEvent tle : eventsModel.getEventsById(aggEvent.getEventIDsWithHashHits())) {
Set<String> hashSetNames = sleuthkitCase.getAbstractFileById(tle.getFileID()).getHashSetNames();
for (String hashSetName : hashSetNames) {
hashSetCounts.merge(hashSetName, 1L, Long::sum);
}
}
} catch (TskCoreException ex) {
LOGGER.log(Level.SEVERE, "Error getting hashset hit info for event.", ex);
}
}
Map<String, Long> tagCounts = new HashMap<>();
if (!aggEvent.getEventIDsWithTags().isEmpty()) {
try {
for (TimeLineEvent tle : eventsModel.getEventsById(aggEvent.getEventIDsWithTags())) {
AbstractFile abstractFileById = sleuthkitCase.getAbstractFileById(tle.getFileID());
List<ContentTag> contentTags = sleuthkitCase.getContentTagsByContent(abstractFileById);
for (ContentTag tag : contentTags) {
tagCounts.merge(tag.getName().getDisplayName(), 1l, Long::sum);
}
Long artifactID = tle.getArtifactID();
if (artifactID != 0) {
BlackboardArtifact blackboardArtifact = sleuthkitCase.getBlackboardArtifact(artifactID);
List<BlackboardArtifactTag> artifactTags = sleuthkitCase.getBlackboardArtifactTagsByArtifact(blackboardArtifact);
for (BlackboardArtifactTag tag : artifactTags) {
tagCounts.merge(tag.getName().getDisplayName(), 1l, Long::sum);
}
}
} catch (TskCoreException ex) {
LOGGER.log(Level.SEVERE, "Error getting hashset hit info for event.", ex);
}
} catch (TskCoreException ex) {
LOGGER.log(Level.SEVERE, "Error getting tag info for event.", ex);
}
collect = hashSetCounts.entrySet().stream()
.map((Map.Entry<String, Long> t) -> t.getKey() + " : " + t.getValue())
.collect(Collectors.joining("\n"));
}
String hashSetCountsString = hashSetCounts.entrySet().stream()
.map((Map.Entry<String, Long> t) -> t.getKey() + " : " + t.getValue())
.collect(Collectors.joining("\n"));
String tagCountsString = tagCounts.entrySet().stream()
.map((Map.Entry<String, Long> t) -> t.getKey() + " : " + t.getValue())
.collect(Collectors.joining("\n"));
tooltip = new Tooltip(
NbBundle.getMessage(this.getClass(), "AggregateEventNode.installTooltip.text",
getEvent().getEventIDs().size(), getEvent().getType(), getEvent().getDescription(),
getEvent().getSpan().getStart().toString(TimeLineController.getZonedFormatter()),
getEvent().getSpan().getEnd().toString(TimeLineController.getZonedFormatter()))
+ (collect.isEmpty() ? "" : "\n\nHash Set Hits\n" + collect));
+ (hashSetCountsString.isEmpty() ? "" : "\n\nHash Set Hits\n" + hashSetCountsString)
+ (tagCountsString.isEmpty() ? "" : "\n\nTags\n" + tagCountsString)
);
Tooltip.install(AggregateEventNode.this, tooltip);
}
}
@ -307,8 +348,8 @@ public class AggregateEventNode extends StackPane {
return subNodePane;
}
public AggregateEvent getEvent() {
return event;
synchronized public AggregateEvent getEvent() {
return aggEvent;
}
/**
@ -334,12 +375,11 @@ public class AggregateEventNode extends StackPane {
/**
* @param descrVis the level of description that should be displayed
*/
final void setDescriptionVisibility(DescriptionVisibility descrVis) {
synchronized final void setDescriptionVisibility(DescriptionVisibility descrVis) {
this.descrVis = descrVis;
final int size = event.getEventIDs().size();
final int size = aggEvent.getEventIDs().size();
switch (descrVis) {
case COUNT_ONLY:
descrLabel.setText("");
countLabel.setText(String.valueOf(size));
@ -350,7 +390,7 @@ public class AggregateEventNode extends StackPane {
break;
default:
case SHOWN:
String description = event.getDescription();
String description = aggEvent.getDescription();
description = parentEventNode != null
? " ..." + StringUtils.substringAfter(description, parentEventNode.getEvent().getDescription())
: description;
@ -380,18 +420,18 @@ public class AggregateEventNode extends StackPane {
*
* @param applied true to apply the highlight 'effect', false to remove it
*/
void applyHighlightEffect(boolean applied) {
synchronized void applyHighlightEffect(boolean applied) {
if (applied) {
descrLabel.setStyle("-fx-font-weight: bold;"); // NON-NLS
spanFill = new Background(new BackgroundFill(getEvent().getType().getColor().deriveColor(0, 1, 1, .3), CORNER_RADII, Insets.EMPTY));
spanFill = new Background(new BackgroundFill(aggEvent.getType().getColor().deriveColor(0, 1, 1, .3), CORNER_RADII, Insets.EMPTY));
spanRegion.setBackground(spanFill);
setBackground(new Background(new BackgroundFill(getEvent().getType().getColor().deriveColor(0, 1, 1, .2), CORNER_RADII, Insets.EMPTY)));
setBackground(new Background(new BackgroundFill(aggEvent.getType().getColor().deriveColor(0, 1, 1, .2), CORNER_RADII, Insets.EMPTY)));
} else {
descrLabel.setStyle("-fx-font-weight: normal;"); // NON-NLS
spanFill = new Background(new BackgroundFill(getEvent().getType().getColor().deriveColor(0, 1, 1, .1), CORNER_RADII, Insets.EMPTY));
spanFill = new Background(new BackgroundFill(aggEvent.getType().getColor().deriveColor(0, 1, 1, .1), CORNER_RADII, Insets.EMPTY));
spanRegion.setBackground(spanFill);
setBackground(new Background(new BackgroundFill(getEvent().getType().getColor().deriveColor(0, 1, 1, .1), CORNER_RADII, Insets.EMPTY)));
setBackground(new Background(new BackgroundFill(aggEvent.getType().getColor().deriveColor(0, 1, 1, .1), CORNER_RADII, Insets.EMPTY)));
}
}
@ -421,22 +461,21 @@ public class AggregateEventNode extends StackPane {
/**
* loads sub-clusters at the given Description LOD
*
* @param newLOD
* @param newDescriptionLOD
*/
private void loadSubClusters(DescriptionLOD newLOD) {
synchronized private void loadSubClusters(DescriptionLOD newDescriptionLOD) {
getSubNodePane().getChildren().clear();
if (newLOD == event.getLOD()) {
getSubNodePane().getChildren().clear();
if (newDescriptionLOD == aggEvent.getLOD()) {
chart.setRequiresLayout(true);
chart.requestChartLayout();
} else {
RootFilter combinedFilter = chart.getFilteredEvents().filter().get().copyOf();
RootFilter combinedFilter = eventsModel.filter().get().copyOf();
//make a new filter intersecting the global filter with text(description) and type filters to restrict sub-clusters
combinedFilter.getSubFilters().addAll(new TextFilter(event.getDescription()),
new TypeFilter(event.getType()));
combinedFilter.getSubFilters().addAll(new TextFilter(aggEvent.getDescription()),
new TypeFilter(aggEvent.getType()));
//make a new end inclusive span (to 'filter' with)
final Interval span = event.getSpan().withEndMillis(event.getSpan().getEndMillis() + 1000);
final Interval span = aggEvent.getSpan().withEndMillis(aggEvent.getSpan().getEndMillis() + 1000);
//make a task to load the subnodes
LoggedTask<List<AggregateEventNode>> loggedTask = new LoggedTask<List<AggregateEventNode>>(
@ -445,14 +484,14 @@ public class AggregateEventNode extends StackPane {
@Override
protected List<AggregateEventNode> call() throws Exception {
//query for the sub-clusters
List<AggregateEvent> aggregatedEvents = chart.getFilteredEvents().getAggregatedEvents(new ZoomParams(span,
chart.getFilteredEvents().eventTypeZoom().get(),
List<AggregateEvent> aggregatedEvents = eventsModel.getAggregatedEvents(new ZoomParams(span,
eventsModel.eventTypeZoom().get(),
combinedFilter,
newLOD));
newDescriptionLOD));
//for each sub cluster make an AggregateEventNode to visually represent it, and set x-position
return aggregatedEvents.stream().map((AggregateEvent t) -> {
AggregateEventNode subNode = new AggregateEventNode(t, AggregateEventNode.this, chart);
subNode.setLayoutX(chart.getXAxis().getDisplayPosition(new DateTime(t.getSpan().getStartMillis())) - getLayoutXCompensation());
return aggregatedEvents.stream().map(aggEvent -> {
AggregateEventNode subNode = new AggregateEventNode(aggEvent, AggregateEventNode.this, chart);
subNode.setLayoutX(chart.getXAxis().getDisplayPosition(new DateTime(aggEvent.getSpan().getStartMillis())) - getLayoutXCompensation());
return subNode;
}).collect(Collectors.toList()); // return list of AggregateEventNodes representing subclusters
}
@ -468,7 +507,7 @@ public class AggregateEventNode extends StackPane {
chart.requestChartLayout();
chart.setCursor(null);
} catch (InterruptedException | ExecutionException ex) {
Exceptions.printStackTrace(ex);
LOGGER.log(Level.SEVERE, "Error loading subnodes", ex);
}
}
};
@ -505,4 +544,30 @@ public class AggregateEventNode extends StackPane {
}
}
}
synchronized void handleEventsUnTagged(EventsUnTaggedEvent tagEvent) {
AggregateEvent withTagsRemoved = aggEvent.withTagsRemoved(tagEvent.getEventIDs());
if (withTagsRemoved != aggEvent) {
aggEvent = withTagsRemoved;
tooltip = null;
boolean hasTags = aggEvent.getEventIDsWithTags().isEmpty() == false;
Platform.runLater(() -> {
tagIV.setManaged(hasTags);
tagIV.setVisible(hasTags);
});
}
}
@Subscribe
synchronized void handleEventsTagged(EventsTaggedEvent tagEvent) {
AggregateEvent withTagsAdded = aggEvent.withTagsAdded(tagEvent.getEventIDs());
if (withTagsAdded != aggEvent) {
aggEvent = withTagsAdded;
tooltip = null;
Platform.runLater(() -> {
tagIV.setManaged(true);
tagIV.setVisible(true);
});
}
}
}

View File

@ -37,7 +37,17 @@ import javafx.scene.Cursor;
import javafx.scene.chart.Axis;
import javafx.scene.chart.BarChart;
import javafx.scene.chart.XYChart;
import javafx.scene.control.*;
import javafx.scene.control.CheckBox;
import javafx.scene.control.CustomMenuItem;
import javafx.scene.control.Label;
import javafx.scene.control.MenuButton;
import javafx.scene.control.MultipleSelectionModel;
import javafx.scene.control.RadioButton;
import javafx.scene.control.ScrollBar;
import javafx.scene.control.SeparatorMenuItem;
import javafx.scene.control.Slider;
import javafx.scene.control.ToggleGroup;
import javafx.scene.control.TreeItem;
import javafx.scene.effect.Effect;
import static javafx.scene.input.KeyCode.DOWN;
import static javafx.scene.input.KeyCode.KP_DOWN;
@ -95,12 +105,6 @@ public class DetailViewPane extends AbstractVisualization<DateTime, AggregateEve
private MultipleSelectionModel<TreeItem<NavTreeNode>> treeSelectionModel;
@FXML
protected ResourceBundle resources;
@FXML
protected URL location;
//these three could be injected from fxml but it was causing npe's
private final DateAxis dateAxis = new DateAxis();
@ -207,8 +211,8 @@ public class DetailViewPane extends AbstractVisualization<DateTime, AggregateEve
selectedNodes.addListener((Observable observable) -> {
highlightedNodes.clear();
selectedNodes.stream().forEach((tn) -> {
for (AggregateEventNode n : chart.getNodes((
AggregateEventNode t) -> t.getEvent().getDescription().equals(tn.getEvent().getDescription()))) {
for (AggregateEventNode n : chart.getNodes((AggregateEventNode t)
-> t.getEvent().getDescription().equals(tn.getEvent().getDescription()))) {
highlightedNodes.add(n);
}
});
@ -226,8 +230,7 @@ public class DetailViewPane extends AbstractVisualization<DateTime, AggregateEve
treeSelectionModel.getSelectedItems().addListener((Observable observable) -> {
highlightedNodes.clear();
for (TreeItem<NavTreeNode> tn : treeSelectionModel.getSelectedItems()) {
for (AggregateEventNode n : chart.getNodes((
AggregateEventNode t)
for (AggregateEventNode n : chart.getNodes((AggregateEventNode t)
-> t.getEvent().getDescription().equals(tn.getValue().getDescription()))) {
highlightedNodes.add(n);
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2013-14 Basis Technology Corp.
* Copyright 2013-15 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -19,6 +19,7 @@
package org.sleuthkit.autopsy.timeline.ui.detailview;
import com.google.common.collect.Collections2;
import com.google.common.eventbus.Subscribe;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@ -72,6 +73,8 @@ import org.sleuthkit.autopsy.timeline.TimeLineController;
import org.sleuthkit.autopsy.timeline.actions.Back;
import org.sleuthkit.autopsy.timeline.actions.Forward;
import org.sleuthkit.autopsy.timeline.events.AggregateEvent;
import org.sleuthkit.autopsy.timeline.events.EventsTaggedEvent;
import org.sleuthkit.autopsy.timeline.events.EventsUnTaggedEvent;
import org.sleuthkit.autopsy.timeline.events.FilteredEventsModel;
import org.sleuthkit.autopsy.timeline.events.type.EventType;
import org.sleuthkit.autopsy.timeline.ui.TimeLineChart;
@ -341,15 +344,22 @@ public final class EventDetailChart extends XYChart<DateTime, AggregateEvent> im
@Override
public void setModel(FilteredEventsModel filteredEvents) {
this.filteredEvents = filteredEvents;
filteredEvents.getRequestedZoomParamters().addListener(o -> {
clearGuideLine();
clearIntervalSelector();
if (this.filteredEvents != null) {
this.filteredEvents.unRegisterForEvents(this);
}
if (this.filteredEvents != filteredEvents) {
filteredEvents.registerForEvents(this);
filteredEvents.getRequestedZoomParamters().addListener(o -> {
clearGuideLine();
clearIntervalSelector();
selectedNodes.clear();
projectionMap.clear();
controller.selectEventIDs(Collections.emptyList());
});
}
this.filteredEvents = filteredEvents;
selectedNodes.clear();
projectionMap.clear();
controller.selectEventIDs(Collections.emptyList());
});
}
@Override
@ -517,6 +527,10 @@ public final class EventDetailChart extends XYChart<DateTime, AggregateEvent> im
return nodes;
}
private Iterable<AggregateEventNode> getAllNodes() {
return getNodes(x -> true);
}
synchronized SimpleDoubleProperty getTruncateWidth() {
return truncateWidth;
}
@ -526,9 +540,8 @@ public final class EventDetailChart extends XYChart<DateTime, AggregateEvent> im
nodeGroup.setTranslateY(-d * h);
}
private void checkNode(AggregateEventNode node, Predicate<AggregateEventNode> p, List<AggregateEventNode> nodes) {
private static void checkNode(AggregateEventNode node, Predicate<AggregateEventNode> p, List<AggregateEventNode> nodes) {
if (node != null) {
AggregateEvent event = node.getEvent();
if (p.test(node)) {
nodes.add(node);
}
@ -716,8 +729,25 @@ public final class EventDetailChart extends XYChart<DateTime, AggregateEvent> im
requiresLayout = true;
}
/**
* make this accessible to AggregateEventNode
*/
@Override
protected void requestChartLayout() {
super.requestChartLayout(); //To change body of generated methods, choose Tools | Templates.
super.requestChartLayout();
}
@Subscribe
synchronized public void handleEventsUnTagged(EventsUnTaggedEvent tagEvent) {
for (AggregateEventNode t : getAllNodes()) {
t.handleEventsUnTagged(tagEvent);
}
}
@Subscribe
synchronized public void handleEventsTagged(EventsTaggedEvent tagEvent) {
for (AggregateEventNode t : getAllNodes()) {
t.handleEventsTagged(tagEvent);
}
}
}

View File

@ -25,9 +25,9 @@ import org.openide.util.NbBundle;
*/
public enum DescriptionLOD {
SHORT(NbBundle.getMessage(DescriptionLOD.class, "DescriptionLOD.short")), MEDIUM(
NbBundle.getMessage(DescriptionLOD.class, "DescriptionLOD.medium")), FULL(
NbBundle.getMessage(DescriptionLOD.class, "DescriptionLOD.full"));
SHORT(NbBundle.getMessage(DescriptionLOD.class, "DescriptionLOD.short")),
MEDIUM(NbBundle.getMessage(DescriptionLOD.class, "DescriptionLOD.medium")),
FULL(NbBundle.getMessage(DescriptionLOD.class, "DescriptionLOD.full"));
private final String displayName;

View File

@ -18,10 +18,7 @@
*/
package org.sleuthkit.autopsy.timeline.zooming;
import java.util.Collections;
import java.util.EnumSet;
import java.util.Objects;
import java.util.Set;
import org.joda.time.Interval;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.timeline.filters.Filter;
@ -41,20 +38,6 @@ public class ZoomParams {
private final DescriptionLOD descrLOD;
private final Set<Field> changedFields;
public Set<Field> getChangedFields() {
return Collections.unmodifiableSet(changedFields);
}
public enum Field {
TIME,
EVENT_TYPE_ZOOM,
FILTER,
DESCRIPTION_LOD;
}
public Interval getTimeRange() {
return timeRange;
}
@ -76,35 +59,27 @@ public class ZoomParams {
this.typeZoomLevel = zoomLevel;
this.filter = filter;
this.descrLOD = descrLOD;
changedFields = EnumSet.allOf(Field.class);
}
public ZoomParams(Interval timeRange, EventTypeZoomLevel zoomLevel, RootFilter filter, DescriptionLOD descrLOD, EnumSet<Field> changed) {
this.timeRange = timeRange;
this.typeZoomLevel = zoomLevel;
this.filter = filter;
this.descrLOD = descrLOD;
changedFields = changed;
}
public ZoomParams withTimeAndType(Interval timeRange, EventTypeZoomLevel zoomLevel) {
return new ZoomParams(timeRange, zoomLevel, filter, descrLOD, EnumSet.of(Field.TIME, Field.EVENT_TYPE_ZOOM));
return new ZoomParams(timeRange, zoomLevel, filter, descrLOD);
}
public ZoomParams withTypeZoomLevel(EventTypeZoomLevel zoomLevel) {
return new ZoomParams(timeRange, zoomLevel, filter, descrLOD, EnumSet.of(Field.EVENT_TYPE_ZOOM));
return new ZoomParams(timeRange, zoomLevel, filter, descrLOD);
}
public ZoomParams withTimeRange(Interval timeRange) {
return new ZoomParams(timeRange, typeZoomLevel, filter, descrLOD, EnumSet.of(Field.TIME));
return new ZoomParams(timeRange, typeZoomLevel, filter, descrLOD);
}
public ZoomParams withDescrLOD(DescriptionLOD descrLOD) {
return new ZoomParams(timeRange, typeZoomLevel, filter, descrLOD, EnumSet.of(Field.DESCRIPTION_LOD));
return new ZoomParams(timeRange, typeZoomLevel, filter, descrLOD);
}
public ZoomParams withFilter(RootFilter filter) {
return new ZoomParams(timeRange, typeZoomLevel, filter, descrLOD, EnumSet.of(Field.FILTER));
return new ZoomParams(timeRange, typeZoomLevel, filter, descrLOD);
}
public boolean hasFilter(Filter filterSet) {
@ -153,11 +128,7 @@ public class ZoomParams {
if (this.filter.equals(other.filter) == false) {
return false;
}
if (this.descrLOD != other.descrLOD) {
return false;
}
return true;
return this.descrLOD == other.descrLOD;
}
@Override