mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-16 01:37:43 +00:00
fix bug in inserting Tags into new table, cleanup in EventDB
cleanup member order in FilteredEventsModel
This commit is contained in:
parent
9296507ea5
commit
e708a43a93
@ -20,7 +20,6 @@ package org.sleuthkit.autopsy.timeline.datamodel;
|
||||
|
||||
import com.google.common.eventbus.EventBus;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
@ -121,34 +120,6 @@ public final class FilteredEventsModel {
|
||||
private final EventsRepository repo;
|
||||
private final Case autoCase;
|
||||
|
||||
/**
|
||||
* @return the default filter used at startup
|
||||
*/
|
||||
public RootFilter getDefaultFilter() {
|
||||
DataSourcesFilter dataSourcesFilter = new DataSourcesFilter();
|
||||
|
||||
repo.getDatasourcesMap().entrySet().stream().forEach((Map.Entry<Long, String> t) -> {
|
||||
DataSourceFilter dataSourceFilter = new DataSourceFilter(t.getValue(), t.getKey());
|
||||
dataSourceFilter.setSelected(Boolean.TRUE);
|
||||
dataSourcesFilter.addSubFilter(dataSourceFilter);
|
||||
});
|
||||
|
||||
HashHitsFilter hashHitsFilter = new HashHitsFilter();
|
||||
repo.getHashSetMap().entrySet().stream().forEach((Map.Entry<Long, String> t) -> {
|
||||
HashSetFilter hashSetFilter = new HashSetFilter(t.getValue(), t.getKey());
|
||||
hashSetFilter.setSelected(Boolean.TRUE);
|
||||
hashHitsFilter.addSubFilter(hashSetFilter);
|
||||
});
|
||||
|
||||
TagsFilter tagsFilter = new TagsFilter();
|
||||
repo.getTagNames().stream().forEach(t -> {
|
||||
TagNameFilter tagNameFilter = new TagNameFilter(t, autoCase);
|
||||
tagNameFilter.setSelected(Boolean.TRUE);
|
||||
tagsFilter.addSubFilter(tagNameFilter);
|
||||
});
|
||||
return new RootFilter(new HideKnownFilter(), tagsFilter, hashHitsFilter, new TextFilter(), new TypeFilter(RootEventType.getInstance()), dataSourcesFilter);
|
||||
}
|
||||
|
||||
public FilteredEventsModel(EventsRepository repo, ReadOnlyObjectProperty<ZoomParams> currentStateProperty) {
|
||||
this.repo = repo;
|
||||
this.autoCase = repo.getAutoCase();
|
||||
@ -192,14 +163,77 @@ public final class FilteredEventsModel {
|
||||
requestedZoomParamters.bind(currentStateProperty);
|
||||
}
|
||||
|
||||
public Interval getBoundingEventsInterval() {
|
||||
return repo.getBoundingEventsInterval(zoomParamtersProperty().get().getTimeRange(), zoomParamtersProperty().get().getFilter());
|
||||
}
|
||||
|
||||
synchronized public ReadOnlyObjectProperty<ZoomParams> zoomParamtersProperty() {
|
||||
return requestedZoomParamters.getReadOnlyProperty();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return a read only view of the time range requested via
|
||||
* {@link #requestTimeRange(org.joda.time.Interval)}
|
||||
*/
|
||||
synchronized public ReadOnlyObjectProperty<Interval> timeRangeProperty() {
|
||||
if (requestedTimeRange.get() == null) {
|
||||
requestedTimeRange.set(getSpanningInterval());
|
||||
}
|
||||
return requestedTimeRange.getReadOnlyProperty();
|
||||
}
|
||||
|
||||
synchronized public ReadOnlyObjectProperty<DescriptionLOD> descriptionLODProperty() {
|
||||
return requestedLOD.getReadOnlyProperty();
|
||||
}
|
||||
|
||||
synchronized public ReadOnlyObjectProperty<RootFilter> filterProperty() {
|
||||
return requestedFilter.getReadOnlyProperty();
|
||||
}
|
||||
|
||||
synchronized public ReadOnlyObjectProperty<EventTypeZoomLevel> eventTypeZoomProperty() {
|
||||
return requestedTypeZoom.getReadOnlyProperty();
|
||||
}
|
||||
|
||||
synchronized public DescriptionLOD getDescriptionLOD() {
|
||||
return requestedLOD.get();
|
||||
}
|
||||
|
||||
synchronized public RootFilter getFilter() {
|
||||
return requestedFilter.get();
|
||||
}
|
||||
|
||||
synchronized public EventTypeZoomLevel getEventTypeZoom() {
|
||||
return requestedTypeZoom.get();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the default filter used at startup
|
||||
*/
|
||||
public RootFilter getDefaultFilter() {
|
||||
DataSourcesFilter dataSourcesFilter = new DataSourcesFilter();
|
||||
|
||||
repo.getDatasourcesMap().entrySet().stream().forEach((Map.Entry<Long, String> t) -> {
|
||||
DataSourceFilter dataSourceFilter = new DataSourceFilter(t.getValue(), t.getKey());
|
||||
dataSourceFilter.setSelected(Boolean.TRUE);
|
||||
dataSourcesFilter.addSubFilter(dataSourceFilter);
|
||||
});
|
||||
|
||||
HashHitsFilter hashHitsFilter = new HashHitsFilter();
|
||||
repo.getHashSetMap().entrySet().stream().forEach((Map.Entry<Long, String> t) -> {
|
||||
HashSetFilter hashSetFilter = new HashSetFilter(t.getValue(), t.getKey());
|
||||
hashSetFilter.setSelected(Boolean.TRUE);
|
||||
hashHitsFilter.addSubFilter(hashSetFilter);
|
||||
});
|
||||
|
||||
TagsFilter tagsFilter = new TagsFilter();
|
||||
repo.getTagNames().stream().forEach(t -> {
|
||||
TagNameFilter tagNameFilter = new TagNameFilter(t, autoCase);
|
||||
tagNameFilter.setSelected(Boolean.TRUE);
|
||||
tagsFilter.addSubFilter(tagNameFilter);
|
||||
});
|
||||
return new RootFilter(new HideKnownFilter(), tagsFilter, hashHitsFilter, new TextFilter(), new TypeFilter(RootEventType.getInstance()), dataSourcesFilter);
|
||||
}
|
||||
|
||||
public Interval getBoundingEventsInterval() {
|
||||
return repo.getBoundingEventsInterval(zoomParamtersProperty().get().getTimeRange(), zoomParamtersProperty().get().getFilter());
|
||||
}
|
||||
|
||||
public TimeLineEvent getEventById(Long eventID) {
|
||||
return repo.getEventById(eventID);
|
||||
}
|
||||
@ -240,25 +274,6 @@ public final class FilteredEventsModel {
|
||||
return repo.countEvents(new ZoomParams(timeRange, typeZoom, filter, null));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return a read only view of the time range requested via
|
||||
* {@link #requestTimeRange(org.joda.time.Interval)}
|
||||
*/
|
||||
synchronized public ReadOnlyObjectProperty<Interval> timeRangeProperty() {
|
||||
if (requestedTimeRange.get() == null) {
|
||||
requestedTimeRange.set(getSpanningInterval());
|
||||
}
|
||||
return requestedTimeRange.getReadOnlyProperty();
|
||||
}
|
||||
|
||||
synchronized public ReadOnlyObjectProperty<DescriptionLOD> descriptionLODProperty() {
|
||||
return requestedLOD.getReadOnlyProperty();
|
||||
}
|
||||
|
||||
synchronized public ReadOnlyObjectProperty<RootFilter> filterProperty() {
|
||||
return requestedFilter.getReadOnlyProperty();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the smallest interval spanning all the events from the
|
||||
* repository, ignoring any filters or requested ranges
|
||||
@ -324,29 +339,17 @@ public final class FilteredEventsModel {
|
||||
return repo.getAggregatedEvents(params);
|
||||
}
|
||||
|
||||
synchronized public ReadOnlyObjectProperty<EventTypeZoomLevel> eventTypeZoomProperty() {
|
||||
return requestedTypeZoom.getReadOnlyProperty();
|
||||
}
|
||||
|
||||
synchronized public EventTypeZoomLevel getEventTypeZoom() {
|
||||
return requestedTypeZoom.get();
|
||||
}
|
||||
|
||||
synchronized public DescriptionLOD getDescriptionLOD() {
|
||||
return requestedLOD.get();
|
||||
}
|
||||
|
||||
synchronized public boolean handleContentTagAdded(ContentTagAddedEvent evt) {
|
||||
ContentTag contentTag = evt.getTag();
|
||||
Content content = contentTag.getContent();
|
||||
HashSet<Long> updatedEventIDs = repo.addTag(content.getId(), null, contentTag);
|
||||
Set<Long> updatedEventIDs = repo.addTag(content.getId(), null, contentTag);
|
||||
return postTagsUpdated(updatedEventIDs);
|
||||
}
|
||||
|
||||
synchronized public boolean handleArtifactTagAdded(BlackBoardArtifactTagAddedEvent evt) {
|
||||
BlackboardArtifactTag artifactTag = evt.getTag();
|
||||
BlackboardArtifact artifact = artifactTag.getArtifact();
|
||||
HashSet<Long> updatedEventIDs = repo.addTag(artifact.getObjectID(), artifact.getArtifactID(), artifactTag);;
|
||||
Set<Long> updatedEventIDs = repo.addTag(artifact.getObjectID(), artifact.getArtifactID(), artifactTag);;
|
||||
return postTagsUpdated(updatedEventIDs);
|
||||
}
|
||||
|
||||
@ -355,7 +358,7 @@ public final class FilteredEventsModel {
|
||||
Content content = contentTag.getContent();
|
||||
try {
|
||||
boolean tagged = autoCase.getServices().getTagsManager().getContentTagsByContent(content).isEmpty() == false;
|
||||
HashSet<Long> updatedEventIDs = repo.deleteTag(content.getId(), null, contentTag, tagged);
|
||||
Set<Long> updatedEventIDs = repo.deleteTag(content.getId(), null, contentTag, tagged);
|
||||
return postTagsUpdated(updatedEventIDs);
|
||||
} catch (TskCoreException ex) {
|
||||
LOGGER.log(Level.SEVERE, "unable to determine tagged status of content.", ex);
|
||||
@ -368,7 +371,7 @@ public final class FilteredEventsModel {
|
||||
BlackboardArtifact artifact = artifactTag.getArtifact();
|
||||
try {
|
||||
boolean tagged = autoCase.getServices().getTagsManager().getBlackboardArtifactTagsByArtifact(artifact).isEmpty() == false;
|
||||
HashSet<Long> updatedEventIDs = repo.deleteTag(artifact.getObjectID(), artifact.getArtifactID(), artifactTag, tagged);
|
||||
Set<Long> updatedEventIDs = repo.deleteTag(artifact.getObjectID(), artifact.getArtifactID(), artifactTag, tagged);
|
||||
return postTagsUpdated(updatedEventIDs);
|
||||
} catch (TskCoreException ex) {
|
||||
LOGGER.log(Level.SEVERE, "unable to determine tagged status of artifact.", ex);
|
||||
@ -376,7 +379,7 @@ public final class FilteredEventsModel {
|
||||
return false;
|
||||
}
|
||||
|
||||
private boolean postTagsUpdated(HashSet<Long> updatedEventIDs) {
|
||||
private boolean postTagsUpdated(Set<Long> updatedEventIDs) {
|
||||
boolean tagsUpdated = !updatedEventIDs.isEmpty();
|
||||
if (tagsUpdated) {
|
||||
eventbus.post(new TagsUpdatedEvent(updatedEventIDs));
|
||||
|
@ -731,48 +731,47 @@ public class EventDB {
|
||||
}
|
||||
}
|
||||
|
||||
HashSet<Long> addTag(long objectID, Long artifactID, Tag tag) {
|
||||
HashSet<Long> eventIDs = new HashSet<>();
|
||||
|
||||
Set<Long> addTag(long objectID, Long artifactID, Tag tag) {
|
||||
DBLock.lock();
|
||||
try {
|
||||
markEventsTagged(objectID, artifactID, eventIDs, true);
|
||||
Set<Long> eventIDs = markEventsTagged(objectID, artifactID, true);
|
||||
for (Long eventID : eventIDs) {
|
||||
//could this be one insert? is there a performance win?
|
||||
//"INSERT OR IGNORE INTO tags (tag_id, tag_name_id, event_id) values (?,?,?)"
|
||||
insertTagStmt.clearParameters();
|
||||
insertTagStmt.setLong(1, tag.getId());
|
||||
insertTagStmt.setLong(2, tag.getName().getId());
|
||||
insertTagStmt.setLong(1, eventID);
|
||||
insertTagStmt.setLong(3, eventID);
|
||||
insertTagStmt.executeUpdate();
|
||||
}
|
||||
return eventIDs;
|
||||
} catch (SQLException ex) {
|
||||
LOGGER.log(Level.SEVERE, "failed to add tag to event", ex); // NON-NLS
|
||||
} finally {
|
||||
DBLock.unlock();
|
||||
}
|
||||
return eventIDs;
|
||||
return Collections.emptySet();
|
||||
}
|
||||
|
||||
HashSet<Long> deleteTag(long objectID, Long artifactID, Tag tag, boolean stillTagged) {
|
||||
HashSet<Long> eventIDs = new HashSet<>();
|
||||
|
||||
Set<Long> deleteTag(long objectID, Long artifactID, Tag tag, boolean stillTagged) {
|
||||
DBLock.lock();
|
||||
try {
|
||||
markEventsTagged(objectID, artifactID, eventIDs, stillTagged);
|
||||
Set<Long> eventIDs = markEventsTagged(objectID, artifactID, stillTagged);
|
||||
//"DELETE FROM tags WHERE tag_id = ?
|
||||
deleteTagStmt.clearParameters();
|
||||
deleteTagStmt.setLong(1, tag.getId());
|
||||
deleteTagStmt.executeUpdate();
|
||||
return eventIDs;
|
||||
} catch (SQLException ex) {
|
||||
LOGGER.log(Level.SEVERE, "failed to add tag to event", ex); // NON-NLS
|
||||
} finally {
|
||||
DBLock.unlock();
|
||||
}
|
||||
return eventIDs;
|
||||
return Collections.emptySet();
|
||||
}
|
||||
|
||||
private void markEventsTagged(long objectID, Long artifactID, HashSet<Long> eventIDs, boolean tagged) throws SQLException {
|
||||
private Set<Long> markEventsTagged(long objectID, Long artifactID, boolean tagged) throws SQLException {
|
||||
HashSet<Long> eventIDs = new HashSet<>();
|
||||
selectEventIDsFromOBjectAndArtifactStmt.clearParameters();
|
||||
selectEventIDsFromOBjectAndArtifactStmt.setLong(1, objectID);
|
||||
if (Objects.isNull(artifactID)) {
|
||||
@ -789,6 +788,7 @@ public class EventDB {
|
||||
+ " WHERE event_id IN (" + StringUtils.join(eventIDs, ",") + ")");
|
||||
}
|
||||
}
|
||||
return eventIDs;
|
||||
}
|
||||
|
||||
void recordLastArtifactID(long lastArtfID) {
|
||||
@ -830,16 +830,16 @@ public class EventDB {
|
||||
DBLock.lock();
|
||||
//this should match Sleuthkit db setup
|
||||
try (Statement statement = con.createStatement()) {
|
||||
String autopsyDBFileName = Paths.get(dbPath).getParent().resolve("autopsy.db").toString();
|
||||
|
||||
ResultSet rs = statement.executeQuery("PRAGMA database_list");
|
||||
boolean found = false;
|
||||
while (rs.next() && !found) {
|
||||
found |= "autopsy".equalsIgnoreCase(rs.getString("name"));
|
||||
}
|
||||
if (!found) {
|
||||
statement.execute("ATTACH DATABASE 'file:" + autopsyDBFileName + "?mode=ro' AS autopsy");
|
||||
}
|
||||
// String autopsyDBFileName = Paths.get(dbPath).getParent().resolve("autopsy.db").toString();
|
||||
//
|
||||
// ResultSet rs = statement.executeQuery("PRAGMA database_list");
|
||||
// boolean found = false;
|
||||
// while (rs.next() && !found) {
|
||||
// found |= "autopsy".equalsIgnoreCase(rs.getString("name"));
|
||||
// }
|
||||
// if (!found) {
|
||||
// statement.execute("ATTACH DATABASE 'file:" + autopsyDBFileName + "?mode=ro' AS autopsy");
|
||||
// }
|
||||
//reduce i/o operations, we have no OS crash recovery anyway
|
||||
statement.execute("PRAGMA synchronous = OFF;"); // NON-NLS
|
||||
//we don't use this feature, so turn it off for minimal speed up on queries
|
||||
@ -907,10 +907,11 @@ public class EventDB {
|
||||
final boolean useSubTypes = (zoomLevel == EventTypeZoomLevel.SUB_TYPE);
|
||||
|
||||
//get some info about the range of dates requested
|
||||
final String queryString = "SELECT count(DISTINCT event_id) AS count, " + typeColumnHelper(useSubTypes)
|
||||
final String queryString = "SELECT count(DISTINCT events.event_id) AS count, " + typeColumnHelper(useSubTypes)
|
||||
+ " FROM events" + useHashHitTablesHelper(filter) + useTagTablesHelper(filter) + " WHERE time >= " + startTime + " AND time < " + endTime + " AND " + SQLHelper.getSQLWhere(filter) // NON-NLS
|
||||
+ " GROUP BY " + typeColumnHelper(useSubTypes); // NON-NLS
|
||||
|
||||
System.out.println(queryString);
|
||||
DBLock.lock();
|
||||
try (Statement stmt = con.createStatement();
|
||||
ResultSet rs = stmt.executeQuery(queryString);) {
|
||||
|
@ -24,7 +24,6 @@ import com.google.common.cache.LoadingCache;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
@ -457,23 +456,24 @@ public class EventsRepository {
|
||||
}
|
||||
}
|
||||
|
||||
synchronized public HashSet<Long> addTag(long objID, Long artifactID, Tag tag) {
|
||||
HashSet<Long> updatedEventIDs = eventDB.addTag(objID, artifactID, tag);
|
||||
synchronized public Set<Long> addTag(long objID, Long artifactID, Tag tag) {
|
||||
Set<Long> updatedEventIDs = eventDB.addTag(objID, artifactID, tag);
|
||||
if (!updatedEventIDs.isEmpty()) {
|
||||
invalidateCaches(updatedEventIDs);
|
||||
}
|
||||
return updatedEventIDs;
|
||||
}
|
||||
|
||||
synchronized public HashSet<Long> deleteTag(long objID, Long artifactID, Tag tag, boolean tagged) {
|
||||
HashSet<Long> updatedEventIDs = eventDB.deleteTag(objID, artifactID, tag, tagged);
|
||||
synchronized public Set<Long> deleteTag(long objID, Long artifactID, Tag tag, boolean tagged) {
|
||||
Set<Long> updatedEventIDs = eventDB.deleteTag(objID, artifactID, tag, tagged);
|
||||
if (!updatedEventIDs.isEmpty()) {
|
||||
invalidateCaches(updatedEventIDs);
|
||||
}
|
||||
return updatedEventIDs;
|
||||
}
|
||||
|
||||
synchronized private void invalidateCaches(HashSet<Long> updatedEventIDs) {
|
||||
synchronized private void invalidateCaches(Set<Long> updatedEventIDs) {
|
||||
eventCountsCache.invalidateAll();
|
||||
aggregateEventsCache.invalidateAll();
|
||||
idToEventCache.invalidateAll(updatedEventIDs);
|
||||
try {
|
||||
|
Loading…
x
Reference in New Issue
Block a user