show tag icons on detail view event groups, and tag counts in tooltip;

cleanup;  use NbBundle.messages to generate bundle strings
This commit is contained in:
jmillman 2015-07-31 15:37:48 -04:00
parent 6259869baf
commit 703c780f27
6 changed files with 270 additions and 155 deletions

View File

@ -27,37 +27,51 @@ import org.sleuthkit.autopsy.timeline.events.type.EventType;
import org.sleuthkit.autopsy.timeline.utils.IntervalUtils;
import org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD;
/** An event that represent a set of other events aggregated together. All the
* sub events should have the same type and matching descriptions at the
/** Represents a set of other (TimeLineEvent) events aggregated together. All
* the sub events should have the same type and matching descriptions at the
* designated 'zoom level'.
*/
@Immutable
public class AggregateEvent {
/** the smallest time interval containing all the aggregated events */
final private Interval span;
/** the type of all the aggregted events */
final private EventType type;
final private Set<Long> eventIDs;
/** the common description of all the aggregated events */
final private String description;
/** the description level of detail that the events were aggregated at. */
private final DescriptionLOD lod;
/** the set of ids of the aggregated events */
final private Set<Long> eventIDs;
/**
* the ids of the subset of aggregated events that have at least one tag
* applied to them
*/
private final Set<Long> tagged;
/**
* the ids of the subset of aggregated events that have at least one hash
* set hit
*/
private final Set<Long> hashHits;
public AggregateEvent(Interval spanningInterval, EventType type, Set<Long> eventIDs, Set<Long> hashHits, String description, DescriptionLOD lod) {
public AggregateEvent(Interval spanningInterval, EventType type, Set<Long> eventIDs, Set<Long> hashHits, Set<Long> tagged, String description, DescriptionLOD lod) {
this.span = spanningInterval;
this.type = type;
this.hashHits = hashHits;
this.tagged = tagged;
this.description = description;
this.eventIDs = eventIDs;
this.lod = lod;
}
/** @return the actual interval from the first event to the last event */
public Interval getSpan() {
return span;
}
@ -70,6 +84,10 @@ public class AggregateEvent {
return Collections.unmodifiableSet(hashHits);
}
public Set<Long> getEventIDsWithTags() {
return Collections.unmodifiableSet(tagged);
}
public String getDescription() {
return description;
}
@ -78,30 +96,33 @@ public class AggregateEvent {
return type;
}
/**
* merge two aggregate events into one new aggregate event.
*
* @param ag1
* @param ag2
*
* @return
*/
public static AggregateEvent merge(AggregateEvent ag1, AggregateEvent ag2) {
if (ag1.getType() != ag2.getType()) {
throw new IllegalArgumentException("aggregate events are not compatible they have different types");
}
if (!ag1.getDescription().equals(ag2.getDescription())) {
throw new IllegalArgumentException("aggregate events are not compatible they have different descriptions");
}
Sets.SetView<Long> idsUnion = Sets.union(ag1.getEventIDs(), ag2.getEventIDs());
Sets.SetView<Long> hashHitsUnion = Sets.union(ag1.getEventIDsWithHashHits(), ag2.getEventIDsWithHashHits());
return new AggregateEvent(IntervalUtils.span(ag1.span, ag2.span), ag1.getType(), idsUnion, hashHitsUnion, ag1.getDescription(), ag1.lod);
}
public DescriptionLOD getLOD() {
return lod;
}
/**
* merge two aggregate events into one new aggregate event.
*
* @param aggEvent1
* @param aggEVent2
*
* @return a new aggregate event that is the result of merging the given
* events
*/
public static AggregateEvent merge(AggregateEvent aggEvent1, AggregateEvent ag2) {
if (aggEvent1.getType() != ag2.getType()) {
throw new IllegalArgumentException("aggregate events are not compatible they have different types");
}
if (!aggEvent1.getDescription().equals(ag2.getDescription())) {
throw new IllegalArgumentException("aggregate events are not compatible they have different descriptions");
}
Sets.SetView<Long> idsUnion = Sets.union(aggEvent1.getEventIDs(), ag2.getEventIDs());
Sets.SetView<Long> hashHitsUnion = Sets.union(aggEvent1.getEventIDsWithHashHits(), ag2.getEventIDsWithHashHits());
Sets.SetView<Long> taggedUnion = Sets.union(aggEvent1.getEventIDsWithTags(), ag2.getEventIDsWithTags());
return new AggregateEvent(IntervalUtils.span(aggEvent1.span, ag2.span), aggEvent1.getType(), idsUnion, hashHitsUnion, taggedUnion, aggEvent1.getDescription(), aggEvent1.lod);
}
}

View File

@ -18,6 +18,7 @@
*/
package org.sleuthkit.autopsy.timeline.events;
import javax.annotation.Nullable;
import org.sleuthkit.autopsy.timeline.events.type.EventType;
import org.sleuthkit.datamodel.TskData;
@ -29,7 +30,7 @@ public class TimeLineEvent {
private final Long eventID;
private final Long fileID;
private final Long time;
private final Long artifactID;
@ -42,7 +43,7 @@ public class TimeLineEvent {
private final boolean hashHit;
public TimeLineEvent(Long eventID, Long objID, Long artifactID, Long time, EventType type, String fullDescription, String medDescription, String shortDescription, TskData.FileKnown known, boolean hashHit) {
public TimeLineEvent(Long eventID, Long objID, @Nullable Long artifactID, Long time, EventType type, String fullDescription, String medDescription, String shortDescription, TskData.FileKnown known, boolean hashHit) {
this.eventID = eventID;
this.fileID = objID;
this.artifactID = artifactID;
@ -60,6 +61,7 @@ public class TimeLineEvent {
return hashHit;
}
@Nullable
public Long getArtifactID() {
return artifactID;
}

View File

@ -1,6 +0,0 @@
EventsRepository.progressWindow.msg.reinit_db=(re)initializing events database
EventsRepository.progressWindow.msg.populateMacEventsFiles=populating mac events for files\:
EventsRepository.progressWindow.msg.populateMacEventsFiles2=populating mac events for files\:
EventsRepository.progressWindow.msg.commitingDb=committing events db
EventsRepository.msgdlg.problem.text=There was a problem populating the timeline. Not all events may be present or accurate. See the log for details.
EventsRepository.progressWindow.populatingXevents=populating {0} events

View File

@ -88,9 +88,10 @@ import org.sqlite.SQLiteJDBCLoader;
*/
public class EventDB {
private PreparedStatement insertHashSetStmt;
private PreparedStatement insertHashHitStmt;
private PreparedStatement selectHashSetStmt;
private PreparedStatement dropEventsTableStmt;
private PreparedStatement dropHashSetHitsTableStmt;
private PreparedStatement dropHashSetsTableStmt;
private PreparedStatement dropDBInfoTableStmt;
/** enum to represent columns in the events table */
enum EventTableColumn {
@ -105,8 +106,9 @@ public class EventDB {
FULL_DESCRIPTION("full_description"), // NON-NLS
MED_DESCRIPTION("med_description"), // NON-NLS
SHORT_DESCRIPTION("short_description"), // NON-NLS
TIME("time"),
HASH_HIT("hash_hit"); // NON-NLS
TIME("time"), // NON-NLS
HASH_HIT("hash_hit"), // NON-NLS
TAGGED("tagged"); // NON-NLS
private final String columnName;
@ -183,12 +185,14 @@ public class EventDB {
private PreparedStatement getDataSourceIDsStmt;
private PreparedStatement insertRowStmt;
private PreparedStatement recordDBInfoStmt;
private PreparedStatement insertHashSetStmt;
private PreparedStatement insertHashHitStmt;
private PreparedStatement selectHashSetStmt;
private PreparedStatement countAllEventsStmt;
private final Set<PreparedStatement> preparedStatements = new HashSet<>();
private final ReentrantReadWriteLock rwLock = new ReentrantReadWriteLock(true); //use fairness policy
private final Lock DBLock = rwLock.writeLock(); //using exclusive lock for all db ops for now
private final Lock DBLock = new ReentrantReadWriteLock(true).writeLock(); //using exclusive lock for all db ops for now
private EventDB(Case autoCase) throws SQLException, Exception {
//should this go into module output (or even cache, we should be able to rebuild it)?
@ -205,30 +209,6 @@ public class EventDB {
}
}
public Interval getSpanningInterval(Collection<Long> eventIDs) {
Interval span = null;
DBLock.lock();
try (Statement stmt = con.createStatement();
//You can't inject multiple values into one ? paramater in prepared statement,
//so we make new statement each time...
ResultSet rs = stmt.executeQuery("select Min(time), Max(time) from events where event_id in (" + StringUtils.join(eventIDs, ", ") + ")");) { // NON-NLS
while (rs.next()) {
span = new Interval(rs.getLong("Min(time)"), rs.getLong("Max(time)") + 1, DateTimeZone.UTC); // NON-NLS
}
} catch (SQLException ex) {
LOGGER.log(Level.SEVERE, "Error executing get spanning interval query.", ex); // NON-NLS
} finally {
DBLock.unlock();
}
return span;
}
EventTransaction beginTransaction() {
return new EventTransaction();
}
void closeDBCon() {
if (con != null) {
try {
@ -241,6 +221,27 @@ public class EventDB {
con = null;
}
public Interval getSpanningInterval(Collection<Long> eventIDs) {
DBLock.lock();
try (Statement stmt = con.createStatement();
//You can't inject multiple values into one ? paramater in prepared statement,
//so we make new statement each time...
ResultSet rs = stmt.executeQuery("select Min(time), Max(time) from events where event_id in (" + StringUtils.join(eventIDs, ", ") + ")");) { // NON-NLS
while (rs.next()) {
return new Interval(rs.getLong("Min(time)"), rs.getLong("Max(time)") + 1, DateTimeZone.UTC); // NON-NLS
}
} catch (SQLException ex) {
LOGGER.log(Level.SEVERE, "Error executing get spanning interval query.", ex); // NON-NLS
} finally {
DBLock.unlock();
}
return null;
}
EventTransaction beginTransaction() {
return new EventTransaction();
}
void commitTransaction(EventTransaction tr, Boolean notify) {
if (tr.isClosed()) {
throw new IllegalArgumentException("can't close already closed transaction"); // NON-NLS
@ -248,24 +249,34 @@ public class EventDB {
tr.commit(notify);
}
/**
* @return the total number of events in the database or,
* -1 if there is an error.
*/
int countAllEvents() {
int result = -1;
DBLock.lock();
//TODO convert this to prepared statement -jm
try (ResultSet rs = con.createStatement().executeQuery("select count(*) as count from events")) { // NON-NLS
try (ResultSet rs = countAllEventsStmt.executeQuery()) { // NON-NLS
while (rs.next()) {
result = rs.getInt("count"); // NON-NLS
break;
return rs.getInt("count"); // NON-NLS
}
} catch (SQLException ex) {
Exceptions.printStackTrace(ex);
LOGGER.log(Level.SEVERE, "Error counting all events", ex);
} finally {
DBLock.unlock();
}
return result;
return -1;
}
Map<EventType, Long> countEvents(ZoomParams params) {
/**
* get the count of all events that fit the given zoom params organized by
* the EvenType of the level spcified in the ZoomParams
*
* @param params the params that control what events to count and how to
* organize the returned map
*
* @return a map from event type( of the requested level) to event counts
*/
Map<EventType, Long> countEventsByType(ZoomParams params) {
if (params.getTimeRange() != null) {
return countEvents(params.getTimeRange().getStartMillis() / 1000,
params.getTimeRange().getEndMillis() / 1000,
@ -275,22 +286,25 @@ public class EventDB {
}
}
void dropEventsTable() {
//TODO: use prepared statement - jm
/**
* drop the tables from this database and recreate them in order to start
* over.
*/
void reInitializeDB() {
DBLock.lock();
try (Statement createStatement = con.createStatement()) {
createStatement.execute("drop table if exists events"); // NON-NLS
try {
dropEventsTableStmt.executeUpdate();
dropHashSetHitsTableStmt.executeUpdate();
dropHashSetsTableStmt.executeUpdate();
dropDBInfoTableStmt.executeUpdate();
initializeDB();;
} catch (SQLException ex) {
LOGGER.log(Level.SEVERE, "could not drop old events table", ex); // NON-NLS
LOGGER.log(Level.SEVERE, "could not drop old tables table", ex); // NON-NLS
} finally {
DBLock.unlock();
}
}
List<AggregateEvent> getAggregatedEvents(ZoomParams params) {
return getAggregatedEvents(params.getTimeRange(), params.getFilter(), params.getTypeZoomLevel(), params.getDescrLOD());
}
Interval getBoundingEventsInterval(Interval timeRange, RootFilter filter) {
long start = timeRange.getStartMillis() / 1000;
long end = timeRange.getEndMillis() / 1000;
@ -378,7 +392,7 @@ public class EventDB {
boolean hasNewColumns() {
/* this relies on the fact that no tskObj has ID 0 but 0 is the default
* value for the datasource_id column in the events table. */
return hasHashHitColumn() && hasDataSourceIDColumn()
return hasHashHitColumn() && hasDataSourceIDColumn() && hasTaggedColumn()
&& (getDataSourceIDs().isEmpty() == false);
}
@ -485,7 +499,7 @@ public class EventDB {
+ "PRIMARY KEY (key))"; // NON-NLS
stmt.execute(sql);
} catch (SQLException ex) {
LOGGER.log(Level.SEVERE, "problem creating db_info table", ex); // NON-NLS
LOGGER.log(Level.SEVERE, "problem creating db_info table", ex); // NON-NLS
}
try (Statement stmt = con.createStatement()) {
@ -516,6 +530,15 @@ public class EventDB {
LOGGER.log(Level.SEVERE, "problem upgrading events table", ex); // NON-NLS
}
}
if (hasTaggedColumn() == false) {
try (Statement stmt = con.createStatement()) {
String sql = "ALTER TABLE events ADD COLUMN tagged INTEGER"; // NON-NLS
stmt.execute(sql);
} catch (SQLException ex) {
LOGGER.log(Level.SEVERE, "problem upgrading events table", ex); // NON-NLS
}
}
if (hasHashHitColumn() == false) {
try (Statement stmt = con.createStatement()) {
@ -553,8 +576,8 @@ public class EventDB {
try {
insertRowStmt = prepareStatement(
"INSERT INTO events (datasource_id,file_id ,artifact_id, time, sub_type, base_type, full_description, med_description, short_description, known_state, hash_hit) " // NON-NLS
+ "VALUES (?,?,?,?,?,?,?,?,?,?,?)"); // NON-NLS
"INSERT INTO events (datasource_id,file_id ,artifact_id, time, sub_type, base_type, full_description, med_description, short_description, known_state, hash_hit, tagged) " // NON-NLS
+ "VALUES (?,?,?,?,?,?,?,?,?,?,?,?)"); // NON-NLS
getDataSourceIDsStmt = prepareStatement("select distinct datasource_id from events"); // NON-NLS
getMaxTimeStmt = prepareStatement("select Max(time) as max from events"); // NON-NLS
@ -565,6 +588,11 @@ public class EventDB {
insertHashSetStmt = prepareStatement("insert or ignore into hash_sets (hash_set_name) values (?)");
selectHashSetStmt = prepareStatement("select hash_set_id from hash_sets where hash_set_name = ?");
insertHashHitStmt = prepareStatement("insert or ignore into hash_set_hits (hash_set_id, event_id) values (?,?)");
countAllEventsStmt = prepareStatement("select count(*) as count from events");
dropEventsTableStmt = prepareStatement("drop table if exists events");
dropHashSetHitsTableStmt = prepareStatement("drop table if exists hash_set_hits");
dropHashSetsTableStmt = prepareStatement("drop table if exists hash_sets");
dropDBInfoTableStmt = prepareStatement("drop table if exists db_ino");
} catch (SQLException sQLException) {
LOGGER.log(Level.SEVERE, "failed to prepareStatment", sQLException); // NON-NLS
}
@ -624,17 +652,21 @@ public class EventDB {
return hasDBColumn(EventTableColumn.DATA_SOURCE_ID);
}
private boolean hasTaggedColumn() {
return hasDBColumn(EventTableColumn.TAGGED);
}
private boolean hasHashHitColumn() {
return hasDBColumn(EventTableColumn.HASH_HIT);
}
void insertEvent(long time, EventType type, long datasourceID, Long objID,
Long artifactID, String fullDescription, String medDescription,
String shortDescription, TskData.FileKnown known, Set<String> hashSets) {
String shortDescription, TskData.FileKnown known, Set<String> hashSets, boolean tagged) {
EventTransaction trans = beginTransaction();
insertEvent(time, type, datasourceID, objID, artifactID, fullDescription, medDescription, shortDescription, known, hashSets, trans);
commitTransaction(trans, true);
EventTransaction transaction = beginTransaction();
insertEvent(time, type, datasourceID, objID, artifactID, fullDescription, medDescription, shortDescription, known, hashSets, tagged, transaction);
commitTransaction(transaction, true);
}
/**
@ -646,6 +678,7 @@ public class EventDB {
void insertEvent(long time, EventType type, long datasourceID, Long objID,
Long artifactID, String fullDescription, String medDescription,
String shortDescription, TskData.FileKnown known, Set<String> hashSetNames,
boolean tagged,
EventTransaction transaction) {
if (transaction.isClosed()) {
@ -660,7 +693,7 @@ public class EventDB {
DBLock.lock();
try {
//"INSERT INTO events (datasource_id,file_id ,artifact_id, time, sub_type, base_type, full_description, med_description, short_description, known_state, hashHit) "
//"INSERT INTO events (datasource_id,file_id ,artifact_id, time, sub_type, base_type, full_description, med_description, short_description, known_state, hashHit, tagged) "
insertRowStmt.clearParameters();
insertRowStmt.setLong(1, datasourceID);
if (objID != null) {
@ -689,6 +722,7 @@ public class EventDB {
insertRowStmt.setByte(10, known == null ? TskData.FileKnown.UNKNOWN.getFileKnownValue() : known.getFileKnownValue());
insertRowStmt.setInt(11, hashSetNames.isEmpty() ? 0 : 1);
insertRowStmt.setInt(12, tagged ? 1 : 0);
insertRowStmt.executeUpdate();
@ -866,6 +900,10 @@ public class EventDB {
return typeMap;
}
List<AggregateEvent> getAggregatedEvents(ZoomParams params) {
return getAggregatedEvents(params.getTimeRange(), params.getFilter(), params.getTypeZoomLevel(), params.getDescrLOD());
}
/**
* //TODO: update javadoc //TODO: split this into helper methods
*
@ -938,6 +976,14 @@ public class EventDB {
hashHits.add(executeQuery.getLong(EventTableColumn.EVENT_ID.toString()));
}
}
HashSet<Long> tagged = new HashSet<>();
try (Statement st3 = con.createStatement();) {
ResultSet executeQuery = st3.executeQuery("select event_id from events where event_id in (" + eventIDS + ") and tagged = 1");
while (executeQuery.next()) {
tagged.add(executeQuery.getLong(EventTableColumn.EVENT_ID.toString()));
}
}
EventType type = useSubTypes ? RootEventType.allTypes.get(rs.getInt(EventTableColumn.SUB_TYPE.toString())) : BaseTypes.values()[rs.getInt(EventTableColumn.BASE_TYPE.toString())];
@ -946,6 +992,7 @@ public class EventDB {
type,
Stream.of(eventIDS.split(",")).map(Long::valueOf).collect(Collectors.toSet()), // NON-NLS
hashHits,
tagged,
rs.getString(descriptionColumn), lod);
//put events in map from type/descrition -> event

View File

@ -44,12 +44,18 @@ import org.apache.commons.lang3.StringUtils;
import org.joda.time.Interval;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.services.TagsManager;
import org.sleuthkit.autopsy.coreutils.HashHitUtils;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.timeline.ProgressWindow;
import org.sleuthkit.autopsy.timeline.events.AggregateEvent;
import org.sleuthkit.autopsy.timeline.events.FilteredEventsModel;
import org.sleuthkit.autopsy.timeline.events.TimeLineEvent;
import static org.sleuthkit.autopsy.timeline.events.db.Bundle.msgdlg_problem_text;
import static org.sleuthkit.autopsy.timeline.events.db.Bundle.progressWindow_msg_commitingDb;
import static org.sleuthkit.autopsy.timeline.events.db.Bundle.progressWindow_msg_populateMacEventsFiles;
import static org.sleuthkit.autopsy.timeline.events.db.Bundle.progressWindow_msg_reinit_db;
import static org.sleuthkit.autopsy.timeline.events.db.Bundle.progressWindow_populatingXevents;
import org.sleuthkit.autopsy.timeline.events.type.ArtifactEventType;
import org.sleuthkit.autopsy.timeline.events.type.EventType;
import org.sleuthkit.autopsy.timeline.events.type.FileSystemTypes;
@ -131,14 +137,13 @@ public class EventsRepository {
}).build(CacheLoader.from(eventDB::getEventById));
eventCountsCache = CacheBuilder.newBuilder().maximumSize(1000L).expireAfterAccess(10, TimeUnit.MINUTES).removalListener((RemovalNotification<ZoomParams, Map<EventType, Long>> rn) -> {
//LOGGER.log(Level.INFO, "evicting counts: {0}", rn.toString());
}).build(CacheLoader.from(eventDB::countEvents));
}).build(CacheLoader.from(eventDB::countEventsByType));
aggregateEventsCache = CacheBuilder.newBuilder().maximumSize(1000L).expireAfterAccess(10, TimeUnit.MINUTES).removalListener((RemovalNotification<ZoomParams, List<AggregateEvent>> rn) -> {
//LOGGER.log(Level.INFO, "evicting aggregated events: {0}", rn.toString());
}).build(CacheLoader.from(eventDB::getAggregatedEvents));
maxCache = CacheBuilder.newBuilder().build(CacheLoader.from(eventDB::getMaxTime));
minCache = CacheBuilder.newBuilder().build(CacheLoader.from(eventDB::getMinTime));
this.modelInstance = new FilteredEventsModel(this, currentStateProperty);
}
/** @return min time (in seconds from unix epoch) */
@ -231,30 +236,35 @@ public class EventsRepository {
//TODO: can we avoid this with a state listener? does it amount to the same thing?
//post population operation to execute
private final Runnable r;
private final Runnable postPopulationOperation;
private final SleuthkitCase skCase;
private final TagsManager tagsManager;
public DBPopulationWorker(Runnable r) {
public DBPopulationWorker(Runnable postPopulationOperation) {
progressDialog = new ProgressWindow(null, true, this);
progressDialog.setVisible(true);
this.r = r;
skCase = autoCase.getSleuthkitCase();
tagsManager = autoCase.getServices().getTagsManager();
this.postPopulationOperation = postPopulationOperation;
}
@Override
@NbBundle.Messages({"progressWindow.msg.populateMacEventsFiles=populating mac events for files:",
"progressWindow.msg.reinit_db=(re)initializing events database",
"progressWindow.msg.commitingDb=committing events db"})
protected Void doInBackground() throws Exception {
process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, -1, NbBundle.getMessage(this.getClass(),
"EventsRepository.progressWindow.msg.reinit_db"), "")));
process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, -1, progressWindow_msg_reinit_db(), "")));
//reset database
//TODO: can we do more incremental updates? -jm
eventDB.dropEventsTable();
eventDB.initializeDB();
eventDB.reInitializeDB();
//grab ids of all files
SleuthkitCase skCase = autoCase.getSleuthkitCase();
List<Long> files = skCase.findAllFileIdsWhere("name != '.' AND name != '..'");
final int numFiles = files.size();
process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, numFiles, NbBundle.getMessage(this.getClass(),
"EventsRepository.progressWindow.msg.populateMacEventsFiles"), "")));
process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, numFiles, progressWindow_msg_populateMacEventsFiles(), "")));
//insert file events into db
int i = 1;
@ -266,7 +276,9 @@ public class EventsRepository {
try {
AbstractFile f = skCase.getAbstractFileById(fID);
if (f != null) {
if (f == null) {
LOGGER.log(Level.WARNING, "Failed to get data for file : {0}", fID); // NON-NLS
} else {
//TODO: This is broken for logical files? fix -jm
//TODO: logical files don't necessarily have valid timestamps, so ... -jm
final String uniquePath = f.getUniquePath();
@ -279,26 +291,24 @@ public class EventsRepository {
final TskData.FileKnown known = f.getKnown();
boolean hashHit = f.getArtifactsCount(BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT) > 0;
Set<String> hashSets = hashHit ? HashHitUtils.getHashSetNamesForFile(skCase, f.getId()) : Collections.emptySet();
boolean tagged = !tagsManager.getContentTagsByContent(f).isEmpty();
//insert it into the db if time is > 0 => time is legitimate (drops logical files)
if (f.getAtime() > 0) {
eventDB.insertEvent(f.getAtime(), FileSystemTypes.FILE_ACCESSED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, trans);
eventDB.insertEvent(f.getAtime(), FileSystemTypes.FILE_ACCESSED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, tagged, trans);
}
if (f.getMtime() > 0) {
eventDB.insertEvent(f.getMtime(), FileSystemTypes.FILE_MODIFIED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, trans);
eventDB.insertEvent(f.getMtime(), FileSystemTypes.FILE_MODIFIED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, tagged, trans);
}
if (f.getCtime() > 0) {
eventDB.insertEvent(f.getCtime(), FileSystemTypes.FILE_CHANGED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, trans);
eventDB.insertEvent(f.getCtime(), FileSystemTypes.FILE_CHANGED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, tagged, trans);
}
if (f.getCrtime() > 0) {
eventDB.insertEvent(f.getCrtime(), FileSystemTypes.FILE_CREATED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, trans);
eventDB.insertEvent(f.getCrtime(), FileSystemTypes.FILE_CREATED, datasourceID, fID, null, uniquePath, medD, shortDesc, known, hashSets, tagged, trans);
}
process(Arrays.asList(new ProgressWindow.ProgressUpdate(i, numFiles,
NbBundle.getMessage(this.getClass(),
"EventsRepository.progressWindow.msg.populateMacEventsFiles2"), f.getName())));
} else {
LOGGER.log(Level.WARNING, "failed to look up data for file : {0}", fID); // NON-NLS
progressWindow_msg_populateMacEventsFiles(), f.getName())));
}
} catch (TskCoreException tskCoreException) {
LOGGER.log(Level.WARNING, "failed to insert mac event for file : " + fID, tskCoreException); // NON-NLS
@ -315,12 +325,11 @@ public class EventsRepository {
}
//skip file_system events, they are already handled above.
if (type instanceof ArtifactEventType) {
populateEventType((ArtifactEventType) type, trans, skCase);
populateEventType((ArtifactEventType) type, trans);
}
}
process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, -1, NbBundle.getMessage(this.getClass(),
"EventsRepository.progressWindow.msg.commitingDb"), "")));
process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, -1, progressWindow_msg_commitingDb(), "")));
if (isCancelled()) {
eventDB.rollBackTransaction(trans);
} else {
@ -346,6 +355,8 @@ public class EventsRepository {
}
@Override
@NbBundle.Messages("msgdlg.problem.text=There was a problem populating the timeline."
+ " Not all events may be present or accurate. See the log for details.")
protected void done() {
super.done();
try {
@ -356,14 +367,12 @@ public class EventsRepository {
LOGGER.log(Level.INFO, "Database population was cancelled by the user. Not all events may be present or accurate. See the log for details.", ex); // NON-NLS
} catch (InterruptedException | ExecutionException ex) {
LOGGER.log(Level.WARNING, "Exception while populating database.", ex); // NON-NLS
JOptionPane.showMessageDialog(null, NbBundle.getMessage(this.getClass(),
"EventsRepository.msgdlg.problem.text"));
JOptionPane.showMessageDialog(null, msgdlg_problem_text());
} catch (Exception ex) {
LOGGER.log(Level.WARNING, "Unexpected exception while populating database.", ex); // NON-NLS
JOptionPane.showMessageDialog(null, NbBundle.getMessage(this.getClass(),
"EventsRepository.msgdlg.problem.text"));
JOptionPane.showMessageDialog(null, msgdlg_problem_text());
}
r.run(); //execute post db population operation
postPopulationOperation.run(); //execute post db population operation
}
/**
@ -373,16 +382,15 @@ public class EventsRepository {
* @param trans the db transaction to use
* @param skCase a reference to the sleuthkit case
*/
private void populateEventType(final ArtifactEventType type, EventDB.EventTransaction trans, SleuthkitCase skCase) {
@NbBundle.Messages({"# {0} - event type ", "progressWindow.populatingXevents=populating {0} events"})
private void populateEventType(final ArtifactEventType type, EventDB.EventTransaction trans) {
try {
//get all the blackboard artifacts corresponding to the given event sub_type
final ArrayList<BlackboardArtifact> blackboardArtifacts = skCase.getBlackboardArtifacts(type.getArtifactType());
final int numArtifacts = blackboardArtifacts.size();
process(Arrays.asList(new ProgressWindow.ProgressUpdate(0, numArtifacts,
NbBundle.getMessage(this.getClass(),
"EventsRepository.progressWindow.populatingXevents",
type.toString()), "")));
progressWindow_populatingXevents(type.toString()), "")));
int i = 0;
for (final BlackboardArtifact bbart : blackboardArtifacts) {
@ -395,7 +403,11 @@ public class EventsRepository {
AbstractFile f = skCase.getAbstractFileById(bbart.getObjectID());
boolean hashHit = f.getArtifactsCount(BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT) > 0;
Set<String> hashSets = hashHit ? HashHitUtils.getHashSetNamesForFile(skCase, f.getId()) : Collections.emptySet();
eventDB.insertEvent(eventDescription.getTime(), type, datasourceID, bbart.getObjectID(), bbart.getArtifactID(), eventDescription.getFullDescription(), eventDescription.getMedDescription(), eventDescription.getShortDescription(), null, hashSets, trans);
boolean tagged = tagsManager.getContentTagsByContent(f).isEmpty() == false;
tagged |= tagsManager.getBlackboardArtifactTagsByArtifact(bbart).isEmpty() == false;
eventDB.insertEvent(eventDescription.getTime(), type, datasourceID, bbart.getObjectID(), bbart.getArtifactID(), eventDescription.getFullDescription(), eventDescription.getMedDescription(), eventDescription.getShortDescription(), null, hashSets, tagged, trans);
}
i++;

View File

@ -22,7 +22,6 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ExecutionException;
import java.util.logging.Level;
import java.util.stream.Collectors;
@ -73,9 +72,13 @@ import org.sleuthkit.autopsy.timeline.filters.TextFilter;
import org.sleuthkit.autopsy.timeline.filters.TypeFilter;
import org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD;
import org.sleuthkit.autopsy.timeline.zooming.ZoomParams;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifactTag;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.ContentTag;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TagName;
import org.sleuthkit.datamodel.TskCoreException;
/** Represents an {@link AggregateEvent} in a {@link EventDetailChart}. */
@ -84,6 +87,7 @@ public class AggregateEventNode extends StackPane {
private static final Image HASH_PIN = new Image(AggregateEventNode.class.getResourceAsStream("/org/sleuthkit/autopsy/images/hashset_hits.png"));
private final static Image PLUS = new Image("/org/sleuthkit/autopsy/timeline/images/plus-button.png"); // NON-NLS
private final static Image MINUS = new Image("/org/sleuthkit/autopsy/timeline/images/minus-button.png"); // NON-NLS
private final static Image TAG = new Image("/org/sleuthkit/autopsy/images/green-tag-icon-16.png"); // NON-NLS
private static final CornerRadii CORNER_RADII = new CornerRadii(3);
@ -145,7 +149,7 @@ public class AggregateEventNode extends StackPane {
private DescriptionVisibility descrVis;
private final SleuthkitCase sleuthkitCase;
private final FilteredEventsModel eventsModel;
private Map<String, Long> hashSetCounts = null;
private Tooltip tooltip;
public AggregateEventNode(final AggregateEvent event, AggregateEventNode parentEventNode, EventDetailChart chart) {
@ -157,10 +161,14 @@ public class AggregateEventNode extends StackPane {
eventsModel = chart.getController().getEventsModel();
final Region region = new Region();
HBox.setHgrow(region, Priority.ALWAYS);
ImageView imageView = new ImageView(HASH_PIN);
final HBox hBox = new HBox(descrLabel, countLabel, region, imageView, minusButton, plusButton);
ImageView hashIV = new ImageView(HASH_PIN);
ImageView tagIV = new ImageView(TAG);
final HBox hBox = new HBox(descrLabel, countLabel, region, hashIV, tagIV, minusButton, plusButton);
if (event.getEventIDsWithHashHits().isEmpty()) {
hBox.getChildren().remove(imageView);
hBox.getChildren().remove(hashIV);
}
if (event.getEventIDsWithTags().isEmpty()) {
hBox.getChildren().remove(tagIV);
}
hBox.setPrefWidth(USE_COMPUTED_SIZE);
hBox.setMinWidth(USE_PREF_SIZE);
@ -252,39 +260,70 @@ public class AggregateEventNode extends StackPane {
}
private void installTooltip() {
//TODO: all this work should probably go on a background thread...
if (tooltip == null) {
String collect = "";
HashMap<String, Long> hashSetCounts = new HashMap<>();
if (!event.getEventIDsWithHashHits().isEmpty()) {
if (Objects.isNull(hashSetCounts)) {
hashSetCounts = new HashMap<>();
try {
for (TimeLineEvent tle : eventsModel.getEventsById(event.getEventIDsWithHashHits())) {
ArrayList<BlackboardArtifact> blackboardArtifacts = sleuthkitCase.getBlackboardArtifacts(BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT, tle.getFileID());
for (BlackboardArtifact artf : blackboardArtifacts) {
for (BlackboardAttribute attr : artf.getAttributes()) {
if (attr.getAttributeTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()) {
hashSetCounts.merge(attr.getValueString(), 1L, Long::sum);
};
try {
for (TimeLineEvent tle : eventsModel.getEventsById(event.getEventIDsWithHashHits())) {
ArrayList<BlackboardArtifact> blackboardArtifacts = sleuthkitCase.getBlackboardArtifacts(BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT, tle.getFileID());
for (BlackboardArtifact artf : blackboardArtifacts) {
for (BlackboardAttribute attr : artf.getAttributes()) {
if (attr.getAttributeTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()) {
hashSetCounts.merge(attr.getValueString(), 1L, Long::sum);
}
}
}
} catch (TskCoreException ex) {
Logger.getLogger(AggregateEventNode.class.getName()).log(Level.SEVERE, "Error getting hashset hit info for event.", ex);
}
} catch (TskCoreException ex) {
Logger.getLogger(AggregateEventNode.class.getName()).log(Level.SEVERE, "Error getting hashset hit info for event.", ex);
}
collect = hashSetCounts.entrySet().stream()
.map((Map.Entry<String, Long> t) -> t.getKey() + " : " + t.getValue())
.collect(Collectors.joining("\n"));
}
Map<Long, TagName> tags = new HashMap<>();
if (!event.getEventIDsWithTags().isEmpty()) {
try {
for (TimeLineEvent tle : eventsModel.getEventsById(event.getEventIDsWithTags())) {
AbstractFile abstractFileById = sleuthkitCase.getAbstractFileById(tle.getFileID());
List<ContentTag> contentTagsByContent = sleuthkitCase.getContentTagsByContent(abstractFileById);
for (ContentTag tag : contentTagsByContent) {
tags.putIfAbsent(tag.getId(), tag.getName());
}
Long artifactID = tle.getArtifactID();
if (artifactID != 0) {
BlackboardArtifact blackboardArtifact = sleuthkitCase.getBlackboardArtifact(artifactID);
List<BlackboardArtifactTag> blackboardArtifactTagsByArtifact = sleuthkitCase.getBlackboardArtifactTagsByArtifact(blackboardArtifact);
for (BlackboardArtifactTag tag : blackboardArtifactTagsByArtifact) {
tags.putIfAbsent(tag.getId(), tag.getName());
}
}
}
} catch (TskCoreException ex) {
Logger.getLogger(AggregateEventNode.class.getName()).log(Level.SEVERE, "Error getting tag info for event.", ex);
}
}
Map<String, Long> tagCounts = tags.values().stream()
.collect(Collectors.toMap(TagName::getDisplayName, anything -> 1L, Long::sum));
String hashSetCountsString = hashSetCounts.entrySet().stream()
.map((Map.Entry<String, Long> t) -> t.getKey() + " : " + t.getValue())
.collect(Collectors.joining("\n"));
String tagCountsString = tagCounts.entrySet().stream()
.map((Map.Entry<String, Long> t) -> t.getKey() + " : " + t.getValue())
.collect(Collectors.joining("\n"));
tooltip = new Tooltip(
NbBundle.getMessage(this.getClass(), "AggregateEventNode.installTooltip.text",
getEvent().getEventIDs().size(), getEvent().getType(), getEvent().getDescription(),
getEvent().getSpan().getStart().toString(TimeLineController.getZonedFormatter()),
getEvent().getSpan().getEnd().toString(TimeLineController.getZonedFormatter()))
+ (collect.isEmpty() ? "" : "\n\nHash Set Hits\n" + collect));
+ (hashSetCountsString.isEmpty() ? "" : "\n\nHash Set Hits\n" + hashSetCountsString)
+ (tagCountsString.isEmpty() ? "" : "\n\nTags\n" + tagCountsString)
);
Tooltip.install(AggregateEventNode.this, tooltip);
}
}