Merge remote-tracking branch 'upstream/timeline-event-mgr' into 1126-timeline_event_publishing_names

# Conflicts:
#	Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java
#	Core/src/org/sleuthkit/autopsy/timeline/utils/FilterUtils.java
This commit is contained in:
millmanorama 2018-11-20 20:49:57 +01:00
commit ad64be1d0f
11 changed files with 303 additions and 250 deletions

View File

@ -32,16 +32,14 @@ import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import javafx.beans.Observable;
import javafx.beans.InvalidationListener;
import javafx.beans.property.ReadOnlyObjectProperty;
import javafx.beans.property.ReadOnlyObjectWrapper;
import javafx.collections.FXCollections;
import javafx.collections.ListChangeListener;
import javafx.collections.MapChangeListener;
import javafx.collections.ObservableList;
import javafx.collections.ObservableMap;
import javafx.collections.ObservableSet;
import javafx.collections.SetChangeListener;
import static org.apache.commons.collections4.CollectionUtils.emptyIfNull;
import static org.apache.commons.lang3.StringUtils.isBlank;
import org.joda.time.DateTimeZone;
import org.joda.time.Interval;
@ -61,6 +59,7 @@ import org.sleuthkit.autopsy.timeline.events.TagsAddedEvent;
import org.sleuthkit.autopsy.timeline.events.TagsDeletedEvent;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.TagsFilterState;
import org.sleuthkit.autopsy.timeline.utils.CacheLoaderImpl;
import org.sleuthkit.autopsy.timeline.utils.CheckedFunction;
import org.sleuthkit.autopsy.timeline.utils.FilterUtils;
@ -70,12 +69,14 @@ import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifactTag;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.ContentTag;
import org.sleuthkit.datamodel.DataSource;
import org.sleuthkit.datamodel.DescriptionLoD;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.Tag;
import org.sleuthkit.datamodel.TagName;
import org.sleuthkit.datamodel.TimelineManager;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskDataException;
import org.sleuthkit.datamodel.timeline.EventType;
import org.sleuthkit.datamodel.timeline.EventTypeZoomLevel;
import org.sleuthkit.datamodel.timeline.TimelineEvent;
@ -132,7 +133,7 @@ public final class FilteredEventsModel {
private final LoadingCache<Long, TimelineEvent> idToEventCache;
private final LoadingCache<ZoomState, Map<EventType, Long>> eventCountsCache;
/** Map from datasource id to datasource name. */
private final ObservableMap<Long, String> datasourcesMap = FXCollections.observableHashMap();
private final ObservableMap<Long, DataSource> datasourcesMap = FXCollections.observableHashMap();
private final ObservableSet< String> hashSets = FXCollections.observableSet();
private final ObservableList<TagName> tagNames = FXCollections.observableArrayList();
// end caches
@ -157,26 +158,19 @@ public final class FilteredEventsModel {
minCache = CacheBuilder.newBuilder()
.build(new CacheLoaderImpl<>(ignored -> eventManager.getMinTime()));
datasourcesMap.addListener((MapChangeListener.Change<? extends Long, ? extends String> change) -> {
DataSourceFilter dataSourceFilter = new DataSourceFilter(change.getValueAdded(), change.getKey());
InvalidationListener filterSyncListener = observable -> {
RootFilterState rootFilter = filterProperty().get();
rootFilter.getDataSourcesFilterState().getFilter().getSubFilters().add(dataSourceFilter);
syncFilters(rootFilter);
requestedFilter.set(rootFilter.copyOf());
});
hashSets.addListener((SetChangeListener.Change< ? extends String> change) -> {
HashSetFilter hashSetFilter = new HashSetFilter(change.getElementAdded());
RootFilterState rootFilter = filterProperty().get();
rootFilter.getHashHitsFilterState().getFilter().getSubFilters().add(hashSetFilter);
requestedFilter.set(rootFilter.copyOf());
});
tagNames.addListener((ListChangeListener.Change<? extends TagName> change) -> {
RootFilterState rootFilter = filterProperty().get();
syncTagsFilter(rootFilter);
requestedFilter.set(rootFilter.copyOf());
});
};
datasourcesMap.addListener(filterSyncListener);
hashSets.addListener(filterSyncListener);
tagNames.addListener(filterSyncListener);
requestedFilter.set(getDefaultFilter());
requestedZoomState.addListener((Observable observable) -> {
requestedZoomState.addListener(observable -> {
final ZoomState zoomState = requestedZoomState.get();
if (zoomState != null) {
@ -244,18 +238,22 @@ public final class FilteredEventsModel {
}
/**
* Use the given SleuthkitCase to update the data used to determine the
* available filters.
*
* @param skCase
* Update the data used to determine the available filters.
*/
synchronized private void populateFilterData() throws TskCoreException {
SleuthkitCase skCase = autoCase.getSleuthkitCase();
hashSets.addAll(eventManager.getHashSetNames());
Set<Long> dataSourceIDs = eventManager.getDataSourceIDs();
//because there is no way to remove a datasource we only add to this map.
for (Long id : eventManager.getDataSourceIDs()) {
datasourcesMap.putIfAbsent(id, skCase.getContentById(id).getDataSource().getName());
for (Long id : dataSourceIDs) {
try {
if (datasourcesMap.get(id) == null) {
datasourcesMap.put(id, skCase.getDataSource(id));
}
} catch (TskDataException ex) {
throw new TskCoreException("Error looking up datasource for id " + id, ex);
}
}
//should this only be tags applied to files or event bearing artifacts?
@ -263,21 +261,32 @@ public final class FilteredEventsModel {
}
/**
* "sync" the given tags filter with the tagnames in use: Disable filters
* for tags that are not in use in the case, and add new filters for tags
* that don't have them. New filters are selected by default.
* "sync" the given root filter with the state of the casee: Disable filters
* for tags that are not in use in the case, and add new filters for tags,
* hashsets, and datasources. that don't have them. New filters are selected
* by default.
*
* @param rootFilterState the filter state to modify so it is consistent
* with the tags in use in the case
*/
public void syncTagsFilter(RootFilterState rootFilterState) {
tagNames.forEach((tagName) -> {
rootFilterState.getTagsFilterState().getFilter().addSubFilter(new TagNameFilter(tagName));
});
for (FilterState<? extends TagNameFilter> filterState : rootFilterState.getTagsFilterState().getSubFilterStates()) {
filterState.setDisabled(tagNames.contains(filterState.getFilter().getTagName()) == false);
public void syncFilters(RootFilterState rootFilterState) {
TagsFilterState tagsFilterState = rootFilterState.getTagsFilterState();
for (TagName tagName : tagNames) {
tagsFilterState.getFilter().addSubFilter(new TagNameFilter(tagName));
}
for (FilterState<? extends TagNameFilter> tagFilterState : rootFilterState.getTagsFilterState().getSubFilterStates()) {
tagFilterState.setDisabled(tagNames.contains(tagFilterState.getFilter().getTagName()) == false);
}
DataSourcesFilter dataSourcesFilter = rootFilterState.getDataSourcesFilterState().getFilter();
for (Map.Entry<Long, DataSource> entry : datasourcesMap.entrySet()) {
dataSourcesFilter.addSubFilter(new DataSourceFilter(entry.getValue().getName(), entry.getKey()));
}
HashHitsFilter hashSetsFilter = rootFilterState.getHashHitsFilterState().getFilter();
for (String hashSet : hashSets) {
hashSetsFilter.addSubFilter(new HashSetFilter(hashSet));
}
}
/**
@ -340,7 +349,7 @@ public final class FilteredEventsModel {
public synchronized RootFilterState getDefaultFilter() {
DataSourcesFilter dataSourcesFilter = new DataSourcesFilter();
datasourcesMap.entrySet().forEach(dataSourceEntry
-> dataSourcesFilter.addSubFilter(new DataSourceFilter(dataSourceEntry.getValue(), dataSourceEntry.getKey()))
-> dataSourcesFilter.addSubFilter(new DataSourceFilter(dataSourceEntry.getValue().getName(), dataSourceEntry.getKey()))
);
HashHitsFilter hashHitsFilter = new HashHitsFilter();
@ -501,30 +510,22 @@ public final class FilteredEventsModel {
return postTagsAdded(updatedEventIDs);
}
synchronized public boolean handleContentTagDeleted(ContentTagDeletedEvent evt) {
synchronized public boolean handleContentTagDeleted(ContentTagDeletedEvent evt) throws TskCoreException {
DeletedContentTagInfo deletedTagInfo = evt.getDeletedTagInfo();
try {
Content content = autoCase.getSleuthkitCase().getContentById(deletedTagInfo.getContentID());
boolean tagged = autoCase.getServices().getTagsManager().getContentTagsByContent(content).isEmpty() == false;
Set<Long> updatedEventIDs = deleteTag(content.getId(), null, deletedTagInfo.getTagID(), tagged);
return postTagsDeleted(updatedEventIDs);
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "unable to determine tagged status of content.", ex); //NON-NLS
}
return false;
Content content = autoCase.getSleuthkitCase().getContentById(deletedTagInfo.getContentID());
boolean tagged = autoCase.getServices().getTagsManager().getContentTagsByContent(content).isEmpty() == false;
Set<Long> updatedEventIDs = deleteTag(content.getId(), null, deletedTagInfo.getTagID(), tagged);
return postTagsDeleted(updatedEventIDs);
}
synchronized public boolean handleArtifactTagDeleted(BlackBoardArtifactTagDeletedEvent evt) {
synchronized public boolean handleArtifactTagDeleted(BlackBoardArtifactTagDeletedEvent evt) throws TskCoreException {
DeletedBlackboardArtifactTagInfo deletedTagInfo = evt.getDeletedTagInfo();
try {
BlackboardArtifact artifact = autoCase.getSleuthkitCase().getBlackboardArtifact(deletedTagInfo.getArtifactID());
boolean tagged = autoCase.getServices().getTagsManager().getBlackboardArtifactTagsByArtifact(artifact).isEmpty() == false;
Set<Long> updatedEventIDs = deleteTag(artifact.getObjectID(), artifact.getArtifactID(), deletedTagInfo.getTagID(), tagged);
return postTagsDeleted(updatedEventIDs);
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "unable to determine tagged status of artifact.", ex); //NON-NLS
}
return false;
BlackboardArtifact artifact = autoCase.getSleuthkitCase().getBlackboardArtifact(deletedTagInfo.getArtifactID());
boolean tagged = autoCase.getServices().getTagsManager().getBlackboardArtifactTagsByArtifact(artifact).isEmpty() == false;
Set<Long> updatedEventIDs = deleteTag(artifact.getObjectID(), artifact.getArtifactID(), deletedTagInfo.getTagID(), tagged);
return postTagsDeleted(updatedEventIDs);
}
/**
@ -664,21 +665,26 @@ public final class FilteredEventsModel {
return updatedEventIDs;
}
synchronized void invalidateAllCaches() {
/**
* Invalidate the timeline caches for the given event IDs. Also forces the
* filter values to be updated with any new values from the case data.( data
* sources, tags, etc)
*
* @param updatedEventIDs A collection of the event IDs whose cached event
* objects should be invalidated. Can be null or an
* empty sett to invalidate the general caches, such
* as min/max time, or the counts per event type.
*
* @throws TskCoreException
*/
public synchronized void invalidateCaches(Collection<Long> updatedEventIDs) throws TskCoreException {
minCache.invalidateAll();
maxCache.invalidateAll();
idToEventCache.invalidateAll();
invalidateCaches(Collections.emptyList());
}
idToEventCache.invalidateAll(emptyIfNull(updatedEventIDs));
eventCountsCache.invalidateAll();
populateFilterData();
synchronized private void invalidateCaches(Collection<Long> updatedEventIDs) {
idToEventCache.invalidateAll(updatedEventIDs);
eventCountsCache.invalidateAll();
try {
populateFilterData();
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Failed topopulate filter data.", ex); //NON-NLS
}
eventbus.post(new CacheInvalidatedEvent());
}

View File

@ -19,14 +19,18 @@
package org.sleuthkit.autopsy.timeline;
import com.google.common.eventbus.EventBus;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import java.beans.PropertyChangeEvent;
import java.time.ZoneId;
import java.util.Collection;
import java.util.Collections;
import static java.util.Collections.singleton;
import java.util.Optional;
import java.util.TimeZone;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.logging.Level;
import javafx.application.Platform;
@ -40,7 +44,6 @@ import javafx.beans.property.ReadOnlyObjectProperty;
import javafx.beans.property.ReadOnlyObjectWrapper;
import javafx.beans.property.ReadOnlyStringProperty;
import javafx.beans.property.ReadOnlyStringWrapper;
import javafx.beans.value.ObservableValue;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.collections.ObservableSet;
@ -68,14 +71,18 @@ import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent;
import org.sleuthkit.autopsy.coreutils.History;
import org.sleuthkit.autopsy.coreutils.LoggedTask;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.coreutils.ThreadConfined;
import org.sleuthkit.autopsy.events.AutopsyEvent;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.timeline.events.EventAddedEvent;
import org.sleuthkit.autopsy.timeline.events.ViewInTimelineRequestedEvent;
import org.sleuthkit.autopsy.timeline.ui.detailview.datamodel.DetailViewEvent;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState;
import org.sleuthkit.autopsy.timeline.utils.IntervalUtils;
import org.sleuthkit.autopsy.timeline.zooming.TimeUnits;
import org.sleuthkit.autopsy.timeline.zooming.ZoomState;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
@ -83,24 +90,21 @@ import org.sleuthkit.datamodel.DescriptionLoD;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.timeline.EventType;
import org.sleuthkit.datamodel.timeline.EventTypeZoomLevel;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState;
import org.sleuthkit.autopsy.timeline.zooming.TimeUnits;
import org.sleuthkit.datamodel.timeline.TimelineFilter.DescriptionFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.EventTypeFilter;
/**
* Controller in the MVC design along with FilteredEventsModel TimeLineView.
* Forwards interpreted user gestures form views to model. Provides model to
* view. Is entry point for timeline module.
* view.
*
* Concurrency Policy:<ul>
* <li>Since filteredEvents is internally synchronized, only compound access to
* it needs external synchronization</li>
* * <li>Since eventsRepository is internally synchronized, only compound
* access to it needs external synchronization <li>
* <li>Other state including mainFrame, viewMode, and the
* listeners should only be accessed with this object's intrinsic lock held, or
* on the EDT as indicated.
*
* <li>Other state including topComponent, viewMode, and the listeners should
* only be accessed with this object's intrinsic lock held, or on the EDT as
* indicated.
* </li>
* </ul>
*/
@ -112,6 +116,15 @@ public class TimeLineController {
private static final ReadOnlyObjectWrapper<TimeZone> timeZone = new ReadOnlyObjectWrapper<>(TimeZone.getDefault());
private final ListeningExecutorService executor = MoreExecutors.listeningDecorator(Executors.newSingleThreadExecutor());
private final ReadOnlyListWrapper<Task<?>> tasks = new ReadOnlyListWrapper<>(FXCollections.observableArrayList());
private final ReadOnlyDoubleWrapper taskProgress = new ReadOnlyDoubleWrapper(-1);
private final ReadOnlyStringWrapper taskMessage = new ReadOnlyStringWrapper();
private final ReadOnlyStringWrapper taskTitle = new ReadOnlyStringWrapper();
private final ReadOnlyStringWrapper statusMessage = new ReadOnlyStringWrapper();
private final EventBus eventbus = new EventBus("TimeLineController_EventBus");
public static ZoneId getTimeZoneID() {
return timeZone.get().toZoneId();
}
@ -128,19 +141,6 @@ public class TimeLineController {
return timeZone.getReadOnlyProperty();
}
private final ExecutorService executor = Executors.newSingleThreadExecutor();
private final ReadOnlyListWrapper<Task<?>> tasks = new ReadOnlyListWrapper<>(FXCollections.observableArrayList());
private final ReadOnlyDoubleWrapper taskProgress = new ReadOnlyDoubleWrapper(-1);
private final ReadOnlyStringWrapper taskMessage = new ReadOnlyStringWrapper();
private final ReadOnlyStringWrapper taskTitle = new ReadOnlyStringWrapper();
private final ReadOnlyStringWrapper statusMessage = new ReadOnlyStringWrapper();
private final EventBus eventbus = new EventBus("TimeLineController_EventBus");
/**
* Status is a string that will be displayed in the status bar as a kind of
* user hint/information when it is not empty
@ -189,7 +189,6 @@ public class TimeLineController {
@ThreadConfined(type = ThreadConfined.ThreadType.AWT)
private TimeLineTopComponent topComponent;
@GuardedBy("this")
private final ReadOnlyObjectWrapper<ViewMode> viewMode = new ReadOnlyObjectWrapper<>(ViewMode.COUNTS);
@ -282,10 +281,11 @@ public class TimeLineController {
* TimeLineController. Do we need to do this with datasource or hash hit
* filters?
*/
historyManager.currentState().addListener((ObservableValue<? extends ZoomState> observable, ZoomState oldValue, ZoomState newValue) -> {
ZoomState historyManagerParams = newValue;
filteredEvents.syncTagsFilter(historyManagerParams.getFilterState());
currentParams.set(historyManagerParams);
historyManager.currentState().addListener((observable, oldState, newState) -> {
ZoomState historyManagerState = newState;
filteredEvents.syncFilters(historyManagerState.getFilterState());
currentParams.set(historyManagerState);
});
try {
@ -571,9 +571,9 @@ public class TimeLineController {
synchronized public void pushFilters(RootFilterState filter) {
ZoomState currentZoom = filteredEvents.zoomStateProperty().get();
if (currentZoom == null) {
advance(InitialZoomState.withFilterState(filter.copyOf()));
advance(InitialZoomState.withFilterState(filter));
} else if (currentZoom.hasFilterState(filter) == false) {
advance(currentZoom.withFilterState(filter.copyOf()));
advance(currentZoom.withFilterState(filter));
}
}
@ -665,6 +665,7 @@ public class TimeLineController {
taskTitle.bind(task.titleProperty());
switch (task.getState()) {
case READY:
//TODO: Check future result for errors....
executor.submit(task);
break;
case SCHEDULED:
@ -709,76 +710,94 @@ public class TimeLineController {
}
void handleIngestModuleEvent(PropertyChangeEvent evt) {
/**
* Checking for a current case is a stop gap measure until a
* different way of handling the closing of cases is worked out.
* Currently, remote events may be received for a case that is
* already closed.
*/
try {
Case.getCurrentCaseThrows();
} catch (NoCurrentCaseException notUsed) {
// Case is closed, do nothing.
return;
}
// ignore remote events. The node running the ingest should update the Case DB
// @@@ We should signal though that there is more data and flush caches...
if (((AutopsyEvent) evt).getSourceType() == AutopsyEvent.SourceType.REMOTE) {
return;
}
switch (IngestManager.IngestModuleEvent.valueOf(evt.getPropertyName())) {
case CONTENT_CHANGED:
// new files were already added to the events table from SleuthkitCase.
break;
case DATA_ADDED:
ModuleDataEvent eventData = (ModuleDataEvent) evt.getOldValue();
if (null != eventData && eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID()) {
executor.submit(() -> filteredEvents.setHashHit(eventData.getArtifacts(), true));
}
break;
case FILE_DONE:
/*
* Since the known state or hash hit state may have changed
* invalidate caches.
*/
//@@@ This causes HUGE slow downs during ingest when TL is open.
// executor.submit(filteredEvents::invalidateAllCaches);
// known state should have been udpated automatically via SleuthkitCase.setKnown();
// hashes should have been updated from event
}
/**
* Checking for a current case is a stop gap measure until a different
* way of handling the closing of cases is worked out. Currently, remote
* events may be received for a case that is already closed.
*/
try {
Case.getCurrentCaseThrows();
} catch (NoCurrentCaseException notUsed) {
// Case is closed, do nothing.
return;
}
// ignore remote events. The node running the ingest should update the Case DB
// @@@ We should signal though that there is more data and flush caches...
if (((AutopsyEvent) evt).getSourceType() == AutopsyEvent.SourceType.REMOTE) {
return;
}
void handleCaseEvent(PropertyChangeEvent evt) {
switch (Case.Events.valueOf(evt.getPropertyName())) {
case BLACKBOARD_ARTIFACT_TAG_ADDED:
executor.submit(() -> filteredEvents.handleArtifactTagAdded((BlackBoardArtifactTagAddedEvent) evt));
break;
case BLACKBOARD_ARTIFACT_TAG_DELETED:
executor.submit(() -> filteredEvents.handleArtifactTagDeleted((BlackBoardArtifactTagDeletedEvent) evt));
break;
case CONTENT_TAG_ADDED:
executor.submit(() -> filteredEvents.handleContentTagAdded((ContentTagAddedEvent) evt));
break;
case CONTENT_TAG_DELETED:
executor.submit(() -> filteredEvents.handleContentTagDeleted((ContentTagDeletedEvent) evt));
break;
case DATA_SOURCE_ADDED:
executor.submit(() -> filteredEvents.postAutopsyEventLocally((AutopsyEvent) evt));
break;
case CURRENT_CASE:
//close timeline on case changes.
SwingUtilities.invokeLater(TimeLineController.this::shutDownTimeLine);
break;
case TIMELINE_EVENT_ADDED:
executor.submit(filteredEvents::invalidateAllCaches);
break;
switch (IngestManager.IngestModuleEvent.valueOf(evt.getPropertyName())) {
case CONTENT_CHANGED:
// new files were already added to the events table from SleuthkitCase.
break;
case DATA_ADDED:
ModuleDataEvent eventData = (ModuleDataEvent) evt.getOldValue();
if (null != eventData && eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID()) {
logFutureException(executor.submit(() -> filteredEvents.setHashHit(eventData.getArtifacts(), true)),
"Error executing task in response to DATA_ADDED event.",
"Error executing response to new data.");
}
break;
case FILE_DONE:
/*
* Since the known state or hash hit state may have changed
* invalidate caches.
*/
//@@@ This causes HUGE slow downs during ingest when TL is open.
// executor.submit(filteredEvents::invalidateAllCaches);
// known state should have been udpated automatically via SleuthkitCase.setKnown();
// hashes should have been updated from event
}
}
}
void handleCaseEvent(PropertyChangeEvent evt) {
ListenableFuture<?> future = Futures.immediateFuture(null);
switch (Case.Events.valueOf(evt.getPropertyName())) {
case BLACKBOARD_ARTIFACT_TAG_ADDED:
future = executor.submit(() -> filteredEvents.handleArtifactTagAdded((BlackBoardArtifactTagAddedEvent) evt));
break;
case BLACKBOARD_ARTIFACT_TAG_DELETED:
future = executor.submit(() -> filteredEvents.handleArtifactTagDeleted((BlackBoardArtifactTagDeletedEvent) evt));
break;
case CONTENT_TAG_ADDED:
future = executor.submit(() -> filteredEvents.handleContentTagAdded((ContentTagAddedEvent) evt));
break;
case CONTENT_TAG_DELETED:
future = executor.submit(() -> filteredEvents.handleContentTagDeleted((ContentTagDeletedEvent) evt));
break;
case CURRENT_CASE:
//close timeline on case changes.
SwingUtilities.invokeLater(TimeLineController.this::shutDownTimeLine);
break;
case DATA_SOURCE_ADDED:
future = executor.submit(() -> {
filteredEvents.invalidateCaches(null);
return null;
});
break;
case TIMELINE_EVENT_ADDED:
future = executor.submit(() -> {
filteredEvents.invalidateCaches(singleton(((EventAddedEvent) evt).getAddedEventID()));
return null;
});
break;
}
logFutureException(future,
"Error executing task in response to " + evt.getPropertyName() + " event.",
"Error executing task in response to case event.");
}
private void logFutureException(ListenableFuture<?> future, String errorLogMessage, String errorUserMessage) {
future.addListener(() -> {
try {
future.get();
} catch (InterruptedException | ExecutionException ex) {
logger.log(Level.SEVERE, errorLogMessage, ex);
MessageNotifyUtil.Message.error(errorUserMessage);
}
}, MoreExecutors.directExecutor());
}
}

View File

@ -20,18 +20,20 @@ package org.sleuthkit.autopsy.timeline;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.util.logging.Level;
import javafx.application.Platform;
import javax.swing.SwingUtilities;
import org.sleuthkit.autopsy.casemodule.Case;
import static org.sleuthkit.autopsy.casemodule.Case.Events.CURRENT_CASE;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.datamodel.TskCoreException;
/**
* Manages listeners and the controller.
*
*
*/
public class TimeLineModule {
@ -48,17 +50,18 @@ public class TimeLineModule {
/**
* Get instance of the controller for the current case
* @return
* @throws NoCurrentCaseException
*
* @return the controller for the current case.
*
* @throws NoCurrentCaseException If there is no case open.
* @throws TskCoreException If there was a problem accessing the case
* database.
*
*/
public static TimeLineController getController() throws NoCurrentCaseException {
public static TimeLineController getController() throws NoCurrentCaseException, TskCoreException {
synchronized (controllerLock) {
if (controller == null) {
try {
controller = new TimeLineController(Case.getCurrentCaseThrows());
} catch (NoCurrentCaseException | TskCoreException ex) {
throw new NoCurrentCaseException("Error getting TimeLineController for the current case.", ex);
}
controller = new TimeLineController(Case.getCurrentCaseThrows());
}
return controller;
}
@ -84,19 +87,21 @@ public class TimeLineModule {
@Override
public void propertyChange(PropertyChangeEvent evt) {
try {
TimeLineController tlController = getController();
tlController.handleCaseEvent(evt);
getController().handleCaseEvent(evt);
} catch (NoCurrentCaseException ex) {
// ignore
return;
} catch (TskCoreException ex) {
MessageNotifyUtil.Message.error("Error creating timeline controller.");
logger.log(Level.SEVERE, "Error creating timeline controller", ex);
}
if (Case.Events.valueOf(evt.getPropertyName()).equals(CURRENT_CASE)) {
// we care only about case closing here
if (evt.getNewValue() == null) {
if (evt.getNewValue() == null) {
synchronized (controllerLock) {
if (controller != null) {
controller.shutDownTimeLine();
SwingUtilities.invokeLater(controller::shutDownTimeLine);
}
controller = null;
}
@ -113,11 +118,13 @@ public class TimeLineModule {
@Override
public void propertyChange(PropertyChangeEvent evt) {
try {
TimeLineController tlController = getController();
tlController.handleIngestModuleEvent(evt);
getController().handleIngestModuleEvent(evt);
} catch (NoCurrentCaseException ex) {
// ignore
return;
} catch (TskCoreException ex) {
MessageNotifyUtil.Message.error("Error creating timeline controller.");
logger.log(Level.SEVERE, "Error creating timeline controller", ex);
}
}
}

View File

@ -80,4 +80,13 @@ public class TimelineEventAddedEvent extends AutopsyEvent {
public TimelineEvent getAddedEvent() {
return getNewValue();
}
/**
* Gets the Id of the event that was added.
*
* @return The Id of the event that was added.
*/
public long getAddedEventID() {
return (long) super.getNewValue();
}
}

View File

@ -511,13 +511,13 @@ final public class ViewFrame extends BorderPane {
* NOTE: This ViewFrame must be registered with the filteredEventsModel's
* EventBus in order for this handler to be invoked.
*
* @param event The DataSourceAnalysisCompletedEvent to handle.
* @param event The CacheInvalidatedEvent to handle.
*/
@Subscribe
@NbBundle.Messages({
"# {0} - datasource name",
"ViewFrame.notification.analysisComplete=The event data has changed, the visualization may be out of date."})
public void handleEventAdded(FilteredEventsModel.CacheInvalidatedEvent event) {
public void handleCacheInvalidated(FilteredEventsModel.CacheInvalidatedEvent event) {
Platform.runLater(() -> {
if (hostedView.needsRefresh() == false) {
hostedView.setNeedsRefresh();

View File

@ -20,7 +20,7 @@ package org.sleuthkit.autopsy.timeline.ui.filtering;
import java.util.Arrays;
import javafx.application.Platform;
import javafx.beans.Observable;
import javafx.beans.InvalidationListener;
import javafx.beans.binding.Bindings;
import javafx.collections.FXCollections;
import javafx.collections.ObservableMap;
@ -116,14 +116,17 @@ final public class FilterSetPanel extends BorderPane {
legendColumn.setCellFactory(col -> new LegendCell(this.controller));
//type is the only filter expanded initialy
expansionMap.put(controller.getEventsModel().getFilterState().getFilter(), true);
expansionMap.put(controller.getEventsModel().getFilterState().getEventTypeFilterState().getFilter(), true);
expansionMap.put(filteredEvents.getFilterState().getFilter(), true);
expansionMap.put(filteredEvents.getFilterState().getEventTypeFilterState().getFilter(), true);
this.filteredEvents.eventTypeZoomProperty().addListener((Observable observable) -> applyFilters());
this.filteredEvents.descriptionLODProperty().addListener((Observable observable1) -> applyFilters());
this.filteredEvents.timeRangeProperty().addListener((Observable observable2) -> applyFilters());
InvalidationListener applyFiltersListener = observable -> applyFilters();
this.filteredEvents.filterProperty().addListener((observable, oldValue, newValue) -> refresh());
filteredEvents.eventTypeZoomProperty().addListener(applyFiltersListener);
filteredEvents.descriptionLODProperty().addListener(applyFiltersListener);
filteredEvents.timeRangeProperty().addListener(applyFiltersListener);
filteredEvents.filterProperty().addListener(observable -> refresh());
refresh();
hiddenDescriptionsListView.setItems(controller.getQuickHideFilters());
@ -164,13 +167,13 @@ final public class FilterSetPanel extends BorderPane {
private void refresh() {
Platform.runLater(() -> {
filterTreeTable.setRoot(new FilterTreeItem(filteredEvents.getFilterState().copyOf(), expansionMap));
filterTreeTable.setRoot(new FilterTreeItem(filteredEvents.filterProperty().get().copyOf(), expansionMap));
});
}
private void applyFilters() {
Platform.runLater(() -> {
controller.pushFilters(((RootFilterState) filterTreeTable.getRoot().getValue()));
controller.pushFilters(((RootFilterState) filterTreeTable.getRoot().getValue().copyOf()));
});
}

View File

@ -18,8 +18,6 @@
*/
package org.sleuthkit.autopsy.timeline.ui.filtering;
import javafx.beans.InvalidationListener;
import javafx.beans.Observable;
import javafx.collections.ListChangeListener;
import javafx.collections.ObservableMap;
import javafx.scene.control.TreeItem;
@ -74,12 +72,9 @@ final public class FilterTreeItem extends TreeItem<FilterState<?>> {
}
});
compoundFilter.selectedProperty().addListener(new InvalidationListener() {
@Override
public void invalidated(Observable observable) {
if (compoundFilter.isSelected()) {
setExpanded(true);
}
compoundFilter.selectedProperty().addListener( observable -> {
if (compoundFilter.isSelected()) {
setExpanded(true);
}
});
}

View File

@ -18,6 +18,7 @@
*/
package org.sleuthkit.autopsy.timeline.ui.filtering.datamodel;
import com.google.common.collect.Lists;
import java.util.Collection;
import java.util.List;
import java.util.stream.Collectors;
@ -27,9 +28,17 @@ import javafx.collections.ObservableList;
import org.sleuthkit.datamodel.timeline.TimelineFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.CompoundFilter;
class CompoundFilterStateImpl<SubFilterType extends TimelineFilter, C extends CompoundFilter<SubFilterType>>
extends DefaultFilterState<C>
implements CompoundFilterState<SubFilterType, C> {
/**
*
* Defualt implementation of CompoundFilterState
*
* @param <SubFilterType> The type of the subfilters in the underlying
* CompoundFilter
* @param <FilterType> The type of the underlying CompoundFilter
*/
class CompoundFilterStateImpl<SubFilterType extends TimelineFilter, FilterType extends CompoundFilter<SubFilterType>>
extends DefaultFilterState<FilterType>
implements CompoundFilterState<SubFilterType, FilterType> {
private final ObservableList<FilterState<SubFilterType>> subFilterStates = FXCollections.observableArrayList();
@ -39,14 +48,9 @@ class CompoundFilterStateImpl<SubFilterType extends TimelineFilter, C extends Co
*
* @param filter The CompoundFilter this will represent the state of.
*/
CompoundFilterStateImpl(C filter) {
CompoundFilterStateImpl(FilterType filter) {
super(filter);
filter.getSubFilters().forEach(this::addSubFilterState);
filter.getSubFilters().addListener((ListChangeListener.Change<? extends SubFilterType> change) -> {
while (change.next()) {
change.getAddedSubList().forEach(CompoundFilterStateImpl.this::addSubFilterState);
}
});
filter.getSubFilters().forEach(this::addStateForSubFilter);
configureListeners();
}
@ -60,7 +64,7 @@ class CompoundFilterStateImpl<SubFilterType extends TimelineFilter, C extends Co
* of.
* @param subFilterStates The filter states to use as the sub filter states.
*/
CompoundFilterStateImpl(C filter, Collection<FilterState<SubFilterType>> subFilterStates) {
CompoundFilterStateImpl(FilterType filter, Collection<FilterState<SubFilterType>> subFilterStates) {
super(filter);
subFilterStates.forEach(this::addSubFilterState);
@ -68,6 +72,13 @@ class CompoundFilterStateImpl<SubFilterType extends TimelineFilter, C extends Co
}
private void configureListeners() {
//Add a new subfilterstate whenever the underlying subfilters change.
getFilter().getSubFilters().addListener((ListChangeListener.Change<? extends SubFilterType> change) -> {
while (change.next()) {
change.getAddedSubList().forEach(this::addStateForSubFilter);
}
});
/*
* enforce the following relationship between a compound filter and its
* subfilters: if a compound filter's active property changes, disable
@ -77,10 +88,9 @@ class CompoundFilterStateImpl<SubFilterType extends TimelineFilter, C extends Co
disableSubFiltersIfNotActive();
selectedProperty().addListener(selectedProperty -> {
if (isSelected() && getSubFilterStates().stream().noneMatch(FilterState::isSelected)) {
getSubFilterStates().forEach(subFilterState -> subFilterState.setSelected(true));
subFilterStates.forEach(subFilterState -> subFilterState.setSelected(true));
}
});
}
/**
@ -91,28 +101,25 @@ class CompoundFilterStateImpl<SubFilterType extends TimelineFilter, C extends Co
private void disableSubFiltersIfNotActive() {
boolean inactive = isActive() == false;
for (FilterState<SubFilterType> subFilter : getSubFilterStates()) {
subFilter.setDisabled(inactive);
}
subFilterStates.forEach(subFilterState -> subFilterState.setDisabled(inactive));
}
@SuppressWarnings("unchecked")
private <X extends TimelineFilter, S extends CompoundFilter<X>> void addSubFilterState(SubFilterType subFilter) {
private void addStateForSubFilter(SubFilterType subFilter) {
if (subFilter instanceof CompoundFilter<?>) {
addSubFilterState((FilterState<SubFilterType>) new CompoundFilterStateImpl<>((S) subFilter));
addSubFilterState((FilterState<SubFilterType>) new CompoundFilterStateImpl<>((CompoundFilter<?>) subFilter));
} else {
addSubFilterState(new DefaultFilterState<>(subFilter));
}
}
private void addSubFilterState(FilterState<SubFilterType> newFilterModel) {
getSubFilterStates().add(newFilterModel);
newFilterModel.selectedProperty().addListener(selectedProperty -> {
//set this compound filter model selected af any of the subfilters are selected.
setSelected(getSubFilterStates().stream().anyMatch(FilterState::isSelected));
private void addSubFilterState(FilterState<SubFilterType> newSubFilterState) {
subFilterStates.add(newSubFilterState);
newSubFilterState.selectedProperty().addListener(selectedProperty -> {
//set this compound filter state selected af any of the subfilters are selected.
setSelected(subFilterStates.stream().anyMatch(FilterState::isSelected));
});
newSubFilterState.setDisabled(isActive() ==false);
}
@Override
@ -121,12 +128,11 @@ class CompoundFilterStateImpl<SubFilterType extends TimelineFilter, C extends Co
}
@Override
public CompoundFilterStateImpl<SubFilterType, C> copyOf() {
public CompoundFilterStateImpl<SubFilterType, FilterType> copyOf() {
@SuppressWarnings("unchecked")
CompoundFilterStateImpl<SubFilterType, C> copy = new CompoundFilterStateImpl<>((C) getFilter().copyOf(),
getSubFilterStates().stream().map(FilterState::copyOf).collect(Collectors.toList())
);
CompoundFilterStateImpl<SubFilterType, FilterType> copy
= new CompoundFilterStateImpl<>((FilterType) getFilter().copyOf(),
Lists.transform(subFilterStates, FilterState::copyOf));
copy.setSelected(isSelected());
copy.setDisabled(isDisabled());
@ -135,19 +141,19 @@ class CompoundFilterStateImpl<SubFilterType extends TimelineFilter, C extends Co
@Override
@SuppressWarnings("unchecked")
public C getActiveFilter() {
public FilterType getActiveFilter() {
if (isActive() == false) {
return null;
}
List<SubFilterType> activeSubFilters = getSubFilterStates().stream()
List<SubFilterType> activeSubFilters = subFilterStates.stream()
.filter(FilterState::isActive)
.map(FilterState::getActiveFilter)
.collect(Collectors.toList());
C copy = (C) getFilter().copyOf();
FilterType copy = (FilterType) getFilter().copyOf();
copy.getSubFilters().clear();
copy.getSubFilters().addAll(activeSubFilters);
return copy;
}
}
}

View File

@ -18,11 +18,12 @@
*/
package org.sleuthkit.autopsy.timeline.ui.filtering.datamodel;
import com.google.common.collect.Lists;
import javafx.beans.property.ReadOnlyBooleanProperty;
import javafx.beans.property.ReadOnlyBooleanWrapper;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import org.python.google.common.collect.Lists;
import static org.apache.commons.lang3.ObjectUtils.notEqual;
import org.sleuthkit.datamodel.timeline.TimelineFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.DataSourceFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.DataSourcesFilter;
@ -37,7 +38,7 @@ import org.sleuthkit.datamodel.timeline.TimelineFilter.TextFilter;
/**
*/
public class RootFilterState implements FilterState<RootFilter>, CompoundFilterState< TimelineFilter, RootFilter> {
public class RootFilterState implements CompoundFilterState< TimelineFilter, RootFilter> {
private final CompoundFilterState<EventTypeFilter, EventTypeFilter> eventTypeFilterState;
private final DefaultFilterState<HideKnownFilter> knownFilterState;
@ -194,17 +195,25 @@ public class RootFilterState implements FilterState<RootFilter>, CompoundFilterS
if (getClass() != obj.getClass()) {
return false;
}
RootFilter activeFilter = getActiveFilter();
RootFilter activeFilter1 = ((RootFilterState) obj).getActiveFilter();
return activeFilter.equals(activeFilter1);
RootFilterState otherFilterState = (RootFilterState) obj;
RootFilter activeFilter = getActiveFilter();
RootFilter activeFilter1 = otherFilterState.getActiveFilter();
if (notEqual(activeFilter, activeFilter1)) {
return false;
}
RootFilter filter = getFilter();
RootFilter filter1 = otherFilterState.getFilter();
return filter.equals(filter1);
}
@Override
public int hashCode() {
int hash = 7;
return hash;
return 7;
}
@Override
@ -261,4 +270,4 @@ public class RootFilterState implements FilterState<RootFilter>, CompoundFilterS
public void setSelected(Boolean act) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
}
}

View File

@ -18,6 +18,7 @@
*/
package org.sleuthkit.autopsy.timeline.ui.filtering.datamodel;
import com.google.common.collect.Lists;
import java.util.Collection;
import java.util.Set;
import java.util.stream.Collectors;
@ -51,10 +52,8 @@ public class TagsFilterState extends CompoundFilterStateImpl<TagNameFilter, Tags
@Override
public TagsFilterState copyOf() {
@SuppressWarnings("unchecked")
TagsFilterState copy = new TagsFilterState(getFilter().copyOf(),
getSubFilterStates().stream().map(FilterState::copyOf).collect(Collectors.toList())
);
Lists.transform(getSubFilterStates(), FilterState::copyOf));
copy.setSelected(isSelected());
copy.setDisabled(isDisabled());

View File

@ -55,7 +55,7 @@ public final class FilterUtils {
"application/x-msdos-program"//NON-NLS
);
private static final Set<String> DOCUMENT_MIME_TYPES = ImmutableSet.of(
private static final Set<String> DOCUMENT_MIME_TYPES =ImmutableSet.of(
"text/*", //NON-NLS
"application/rtf", //NON-NLS
"application/pdf", //NON-NLS