Merge remote-tracking branch 'upstream/timeline-event-mgr' into 1126-timeline_event_publishing_names

# Conflicts:
#	Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java
#	Core/src/org/sleuthkit/autopsy/timeline/utils/FilterUtils.java
This commit is contained in:
millmanorama 2018-11-20 20:49:57 +01:00
commit ad64be1d0f
11 changed files with 303 additions and 250 deletions

View File

@ -32,16 +32,14 @@ import java.util.Set;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.logging.Level; import java.util.logging.Level;
import javafx.beans.Observable; import javafx.beans.InvalidationListener;
import javafx.beans.property.ReadOnlyObjectProperty; import javafx.beans.property.ReadOnlyObjectProperty;
import javafx.beans.property.ReadOnlyObjectWrapper; import javafx.beans.property.ReadOnlyObjectWrapper;
import javafx.collections.FXCollections; import javafx.collections.FXCollections;
import javafx.collections.ListChangeListener;
import javafx.collections.MapChangeListener;
import javafx.collections.ObservableList; import javafx.collections.ObservableList;
import javafx.collections.ObservableMap; import javafx.collections.ObservableMap;
import javafx.collections.ObservableSet; import javafx.collections.ObservableSet;
import javafx.collections.SetChangeListener; import static org.apache.commons.collections4.CollectionUtils.emptyIfNull;
import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isBlank;
import org.joda.time.DateTimeZone; import org.joda.time.DateTimeZone;
import org.joda.time.Interval; import org.joda.time.Interval;
@ -61,6 +59,7 @@ import org.sleuthkit.autopsy.timeline.events.TagsAddedEvent;
import org.sleuthkit.autopsy.timeline.events.TagsDeletedEvent; import org.sleuthkit.autopsy.timeline.events.TagsDeletedEvent;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.TagsFilterState;
import org.sleuthkit.autopsy.timeline.utils.CacheLoaderImpl; import org.sleuthkit.autopsy.timeline.utils.CacheLoaderImpl;
import org.sleuthkit.autopsy.timeline.utils.CheckedFunction; import org.sleuthkit.autopsy.timeline.utils.CheckedFunction;
import org.sleuthkit.autopsy.timeline.utils.FilterUtils; import org.sleuthkit.autopsy.timeline.utils.FilterUtils;
@ -70,12 +69,14 @@ import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifactTag; import org.sleuthkit.datamodel.BlackboardArtifactTag;
import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.ContentTag; import org.sleuthkit.datamodel.ContentTag;
import org.sleuthkit.datamodel.DataSource;
import org.sleuthkit.datamodel.DescriptionLoD; import org.sleuthkit.datamodel.DescriptionLoD;
import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.Tag; import org.sleuthkit.datamodel.Tag;
import org.sleuthkit.datamodel.TagName; import org.sleuthkit.datamodel.TagName;
import org.sleuthkit.datamodel.TimelineManager; import org.sleuthkit.datamodel.TimelineManager;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskDataException;
import org.sleuthkit.datamodel.timeline.EventType; import org.sleuthkit.datamodel.timeline.EventType;
import org.sleuthkit.datamodel.timeline.EventTypeZoomLevel; import org.sleuthkit.datamodel.timeline.EventTypeZoomLevel;
import org.sleuthkit.datamodel.timeline.TimelineEvent; import org.sleuthkit.datamodel.timeline.TimelineEvent;
@ -132,7 +133,7 @@ public final class FilteredEventsModel {
private final LoadingCache<Long, TimelineEvent> idToEventCache; private final LoadingCache<Long, TimelineEvent> idToEventCache;
private final LoadingCache<ZoomState, Map<EventType, Long>> eventCountsCache; private final LoadingCache<ZoomState, Map<EventType, Long>> eventCountsCache;
/** Map from datasource id to datasource name. */ /** Map from datasource id to datasource name. */
private final ObservableMap<Long, String> datasourcesMap = FXCollections.observableHashMap(); private final ObservableMap<Long, DataSource> datasourcesMap = FXCollections.observableHashMap();
private final ObservableSet< String> hashSets = FXCollections.observableSet(); private final ObservableSet< String> hashSets = FXCollections.observableSet();
private final ObservableList<TagName> tagNames = FXCollections.observableArrayList(); private final ObservableList<TagName> tagNames = FXCollections.observableArrayList();
// end caches // end caches
@ -157,26 +158,19 @@ public final class FilteredEventsModel {
minCache = CacheBuilder.newBuilder() minCache = CacheBuilder.newBuilder()
.build(new CacheLoaderImpl<>(ignored -> eventManager.getMinTime())); .build(new CacheLoaderImpl<>(ignored -> eventManager.getMinTime()));
datasourcesMap.addListener((MapChangeListener.Change<? extends Long, ? extends String> change) -> { InvalidationListener filterSyncListener = observable -> {
DataSourceFilter dataSourceFilter = new DataSourceFilter(change.getValueAdded(), change.getKey());
RootFilterState rootFilter = filterProperty().get(); RootFilterState rootFilter = filterProperty().get();
rootFilter.getDataSourcesFilterState().getFilter().getSubFilters().add(dataSourceFilter); syncFilters(rootFilter);
requestedFilter.set(rootFilter.copyOf()); requestedFilter.set(rootFilter.copyOf());
}); };
hashSets.addListener((SetChangeListener.Change< ? extends String> change) -> {
HashSetFilter hashSetFilter = new HashSetFilter(change.getElementAdded()); datasourcesMap.addListener(filterSyncListener);
RootFilterState rootFilter = filterProperty().get(); hashSets.addListener(filterSyncListener);
rootFilter.getHashHitsFilterState().getFilter().getSubFilters().add(hashSetFilter); tagNames.addListener(filterSyncListener);
requestedFilter.set(rootFilter.copyOf());
});
tagNames.addListener((ListChangeListener.Change<? extends TagName> change) -> {
RootFilterState rootFilter = filterProperty().get();
syncTagsFilter(rootFilter);
requestedFilter.set(rootFilter.copyOf());
});
requestedFilter.set(getDefaultFilter()); requestedFilter.set(getDefaultFilter());
requestedZoomState.addListener((Observable observable) -> { requestedZoomState.addListener(observable -> {
final ZoomState zoomState = requestedZoomState.get(); final ZoomState zoomState = requestedZoomState.get();
if (zoomState != null) { if (zoomState != null) {
@ -244,18 +238,22 @@ public final class FilteredEventsModel {
} }
/** /**
* Use the given SleuthkitCase to update the data used to determine the * Update the data used to determine the available filters.
* available filters.
*
* @param skCase
*/ */
synchronized private void populateFilterData() throws TskCoreException { synchronized private void populateFilterData() throws TskCoreException {
SleuthkitCase skCase = autoCase.getSleuthkitCase(); SleuthkitCase skCase = autoCase.getSleuthkitCase();
hashSets.addAll(eventManager.getHashSetNames()); hashSets.addAll(eventManager.getHashSetNames());
Set<Long> dataSourceIDs = eventManager.getDataSourceIDs();
//because there is no way to remove a datasource we only add to this map. //because there is no way to remove a datasource we only add to this map.
for (Long id : eventManager.getDataSourceIDs()) { for (Long id : dataSourceIDs) {
datasourcesMap.putIfAbsent(id, skCase.getContentById(id).getDataSource().getName()); try {
if (datasourcesMap.get(id) == null) {
datasourcesMap.put(id, skCase.getDataSource(id));
}
} catch (TskDataException ex) {
throw new TskCoreException("Error looking up datasource for id " + id, ex);
}
} }
//should this only be tags applied to files or event bearing artifacts? //should this only be tags applied to files or event bearing artifacts?
@ -263,21 +261,32 @@ public final class FilteredEventsModel {
} }
/** /**
* "sync" the given tags filter with the tagnames in use: Disable filters * "sync" the given root filter with the state of the casee: Disable filters
* for tags that are not in use in the case, and add new filters for tags * for tags that are not in use in the case, and add new filters for tags,
* that don't have them. New filters are selected by default. * hashsets, and datasources. that don't have them. New filters are selected
* by default.
* *
* @param rootFilterState the filter state to modify so it is consistent * @param rootFilterState the filter state to modify so it is consistent
* with the tags in use in the case * with the tags in use in the case
*/ */
public void syncTagsFilter(RootFilterState rootFilterState) { public void syncFilters(RootFilterState rootFilterState) {
tagNames.forEach((tagName) -> { TagsFilterState tagsFilterState = rootFilterState.getTagsFilterState();
rootFilterState.getTagsFilterState().getFilter().addSubFilter(new TagNameFilter(tagName)); for (TagName tagName : tagNames) {
}); tagsFilterState.getFilter().addSubFilter(new TagNameFilter(tagName));
for (FilterState<? extends TagNameFilter> filterState : rootFilterState.getTagsFilterState().getSubFilterStates()) { }
filterState.setDisabled(tagNames.contains(filterState.getFilter().getTagName()) == false); for (FilterState<? extends TagNameFilter> tagFilterState : rootFilterState.getTagsFilterState().getSubFilterStates()) {
tagFilterState.setDisabled(tagNames.contains(tagFilterState.getFilter().getTagName()) == false);
}
DataSourcesFilter dataSourcesFilter = rootFilterState.getDataSourcesFilterState().getFilter();
for (Map.Entry<Long, DataSource> entry : datasourcesMap.entrySet()) {
dataSourcesFilter.addSubFilter(new DataSourceFilter(entry.getValue().getName(), entry.getKey()));
}
HashHitsFilter hashSetsFilter = rootFilterState.getHashHitsFilterState().getFilter();
for (String hashSet : hashSets) {
hashSetsFilter.addSubFilter(new HashSetFilter(hashSet));
} }
} }
/** /**
@ -340,7 +349,7 @@ public final class FilteredEventsModel {
public synchronized RootFilterState getDefaultFilter() { public synchronized RootFilterState getDefaultFilter() {
DataSourcesFilter dataSourcesFilter = new DataSourcesFilter(); DataSourcesFilter dataSourcesFilter = new DataSourcesFilter();
datasourcesMap.entrySet().forEach(dataSourceEntry datasourcesMap.entrySet().forEach(dataSourceEntry
-> dataSourcesFilter.addSubFilter(new DataSourceFilter(dataSourceEntry.getValue(), dataSourceEntry.getKey())) -> dataSourcesFilter.addSubFilter(new DataSourceFilter(dataSourceEntry.getValue().getName(), dataSourceEntry.getKey()))
); );
HashHitsFilter hashHitsFilter = new HashHitsFilter(); HashHitsFilter hashHitsFilter = new HashHitsFilter();
@ -501,30 +510,22 @@ public final class FilteredEventsModel {
return postTagsAdded(updatedEventIDs); return postTagsAdded(updatedEventIDs);
} }
synchronized public boolean handleContentTagDeleted(ContentTagDeletedEvent evt) { synchronized public boolean handleContentTagDeleted(ContentTagDeletedEvent evt) throws TskCoreException {
DeletedContentTagInfo deletedTagInfo = evt.getDeletedTagInfo(); DeletedContentTagInfo deletedTagInfo = evt.getDeletedTagInfo();
try {
Content content = autoCase.getSleuthkitCase().getContentById(deletedTagInfo.getContentID()); Content content = autoCase.getSleuthkitCase().getContentById(deletedTagInfo.getContentID());
boolean tagged = autoCase.getServices().getTagsManager().getContentTagsByContent(content).isEmpty() == false; boolean tagged = autoCase.getServices().getTagsManager().getContentTagsByContent(content).isEmpty() == false;
Set<Long> updatedEventIDs = deleteTag(content.getId(), null, deletedTagInfo.getTagID(), tagged); Set<Long> updatedEventIDs = deleteTag(content.getId(), null, deletedTagInfo.getTagID(), tagged);
return postTagsDeleted(updatedEventIDs); return postTagsDeleted(updatedEventIDs);
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "unable to determine tagged status of content.", ex); //NON-NLS
}
return false;
} }
synchronized public boolean handleArtifactTagDeleted(BlackBoardArtifactTagDeletedEvent evt) { synchronized public boolean handleArtifactTagDeleted(BlackBoardArtifactTagDeletedEvent evt) throws TskCoreException {
DeletedBlackboardArtifactTagInfo deletedTagInfo = evt.getDeletedTagInfo(); DeletedBlackboardArtifactTagInfo deletedTagInfo = evt.getDeletedTagInfo();
try {
BlackboardArtifact artifact = autoCase.getSleuthkitCase().getBlackboardArtifact(deletedTagInfo.getArtifactID()); BlackboardArtifact artifact = autoCase.getSleuthkitCase().getBlackboardArtifact(deletedTagInfo.getArtifactID());
boolean tagged = autoCase.getServices().getTagsManager().getBlackboardArtifactTagsByArtifact(artifact).isEmpty() == false; boolean tagged = autoCase.getServices().getTagsManager().getBlackboardArtifactTagsByArtifact(artifact).isEmpty() == false;
Set<Long> updatedEventIDs = deleteTag(artifact.getObjectID(), artifact.getArtifactID(), deletedTagInfo.getTagID(), tagged); Set<Long> updatedEventIDs = deleteTag(artifact.getObjectID(), artifact.getArtifactID(), deletedTagInfo.getTagID(), tagged);
return postTagsDeleted(updatedEventIDs); return postTagsDeleted(updatedEventIDs);
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "unable to determine tagged status of artifact.", ex); //NON-NLS
}
return false;
} }
/** /**
@ -664,21 +665,26 @@ public final class FilteredEventsModel {
return updatedEventIDs; return updatedEventIDs;
} }
synchronized void invalidateAllCaches() { /**
* Invalidate the timeline caches for the given event IDs. Also forces the
* filter values to be updated with any new values from the case data.( data
* sources, tags, etc)
*
* @param updatedEventIDs A collection of the event IDs whose cached event
* objects should be invalidated. Can be null or an
* empty sett to invalidate the general caches, such
* as min/max time, or the counts per event type.
*
* @throws TskCoreException
*/
public synchronized void invalidateCaches(Collection<Long> updatedEventIDs) throws TskCoreException {
minCache.invalidateAll(); minCache.invalidateAll();
maxCache.invalidateAll(); maxCache.invalidateAll();
idToEventCache.invalidateAll(); idToEventCache.invalidateAll(emptyIfNull(updatedEventIDs));
invalidateCaches(Collections.emptyList()); eventCountsCache.invalidateAll();
}
populateFilterData();
synchronized private void invalidateCaches(Collection<Long> updatedEventIDs) {
idToEventCache.invalidateAll(updatedEventIDs);
eventCountsCache.invalidateAll();
try {
populateFilterData();
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Failed topopulate filter data.", ex); //NON-NLS
}
eventbus.post(new CacheInvalidatedEvent()); eventbus.post(new CacheInvalidatedEvent());
} }

View File

@ -19,14 +19,18 @@
package org.sleuthkit.autopsy.timeline; package org.sleuthkit.autopsy.timeline;
import com.google.common.eventbus.EventBus; import com.google.common.eventbus.EventBus;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeEvent;
import java.time.ZoneId; import java.time.ZoneId;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import static java.util.Collections.singleton;
import java.util.Optional; import java.util.Optional;
import java.util.TimeZone; import java.util.TimeZone;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
import java.util.logging.Level; import java.util.logging.Level;
import javafx.application.Platform; import javafx.application.Platform;
@ -40,7 +44,6 @@ import javafx.beans.property.ReadOnlyObjectProperty;
import javafx.beans.property.ReadOnlyObjectWrapper; import javafx.beans.property.ReadOnlyObjectWrapper;
import javafx.beans.property.ReadOnlyStringProperty; import javafx.beans.property.ReadOnlyStringProperty;
import javafx.beans.property.ReadOnlyStringWrapper; import javafx.beans.property.ReadOnlyStringWrapper;
import javafx.beans.value.ObservableValue;
import javafx.collections.FXCollections; import javafx.collections.FXCollections;
import javafx.collections.ObservableList; import javafx.collections.ObservableList;
import javafx.collections.ObservableSet; import javafx.collections.ObservableSet;
@ -68,14 +71,18 @@ import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent;
import org.sleuthkit.autopsy.coreutils.History; import org.sleuthkit.autopsy.coreutils.History;
import org.sleuthkit.autopsy.coreutils.LoggedTask; import org.sleuthkit.autopsy.coreutils.LoggedTask;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.coreutils.ThreadConfined; import org.sleuthkit.autopsy.coreutils.ThreadConfined;
import org.sleuthkit.autopsy.events.AutopsyEvent; import org.sleuthkit.autopsy.events.AutopsyEvent;
import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.timeline.events.EventAddedEvent;
import org.sleuthkit.autopsy.timeline.events.ViewInTimelineRequestedEvent; import org.sleuthkit.autopsy.timeline.events.ViewInTimelineRequestedEvent;
import org.sleuthkit.autopsy.timeline.ui.detailview.datamodel.DetailViewEvent; import org.sleuthkit.autopsy.timeline.ui.detailview.datamodel.DetailViewEvent;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState;
import org.sleuthkit.autopsy.timeline.utils.IntervalUtils; import org.sleuthkit.autopsy.timeline.utils.IntervalUtils;
import org.sleuthkit.autopsy.timeline.zooming.TimeUnits;
import org.sleuthkit.autopsy.timeline.zooming.ZoomState; import org.sleuthkit.autopsy.timeline.zooming.ZoomState;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
@ -83,24 +90,21 @@ import org.sleuthkit.datamodel.DescriptionLoD;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.timeline.EventType; import org.sleuthkit.datamodel.timeline.EventType;
import org.sleuthkit.datamodel.timeline.EventTypeZoomLevel; import org.sleuthkit.datamodel.timeline.EventTypeZoomLevel;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState;
import org.sleuthkit.autopsy.timeline.zooming.TimeUnits;
import org.sleuthkit.datamodel.timeline.TimelineFilter.DescriptionFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter.DescriptionFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.EventTypeFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter.EventTypeFilter;
/** /**
* Controller in the MVC design along with FilteredEventsModel TimeLineView. * Controller in the MVC design along with FilteredEventsModel TimeLineView.
* Forwards interpreted user gestures form views to model. Provides model to * Forwards interpreted user gestures form views to model. Provides model to
* view. Is entry point for timeline module. * view.
* *
* Concurrency Policy:<ul> * Concurrency Policy:<ul>
* <li>Since filteredEvents is internally synchronized, only compound access to * <li>Since filteredEvents is internally synchronized, only compound access to
* it needs external synchronization</li> * it needs external synchronization</li>
* * <li>Since eventsRepository is internally synchronized, only compound *
* access to it needs external synchronization <li> * <li>Other state including topComponent, viewMode, and the listeners should
* <li>Other state including mainFrame, viewMode, and the * only be accessed with this object's intrinsic lock held, or on the EDT as
* listeners should only be accessed with this object's intrinsic lock held, or * indicated.
* on the EDT as indicated.
* </li> * </li>
* </ul> * </ul>
*/ */
@ -112,6 +116,15 @@ public class TimeLineController {
private static final ReadOnlyObjectWrapper<TimeZone> timeZone = new ReadOnlyObjectWrapper<>(TimeZone.getDefault()); private static final ReadOnlyObjectWrapper<TimeZone> timeZone = new ReadOnlyObjectWrapper<>(TimeZone.getDefault());
private final ListeningExecutorService executor = MoreExecutors.listeningDecorator(Executors.newSingleThreadExecutor());
private final ReadOnlyListWrapper<Task<?>> tasks = new ReadOnlyListWrapper<>(FXCollections.observableArrayList());
private final ReadOnlyDoubleWrapper taskProgress = new ReadOnlyDoubleWrapper(-1);
private final ReadOnlyStringWrapper taskMessage = new ReadOnlyStringWrapper();
private final ReadOnlyStringWrapper taskTitle = new ReadOnlyStringWrapper();
private final ReadOnlyStringWrapper statusMessage = new ReadOnlyStringWrapper();
private final EventBus eventbus = new EventBus("TimeLineController_EventBus");
public static ZoneId getTimeZoneID() { public static ZoneId getTimeZoneID() {
return timeZone.get().toZoneId(); return timeZone.get().toZoneId();
} }
@ -128,19 +141,6 @@ public class TimeLineController {
return timeZone.getReadOnlyProperty(); return timeZone.getReadOnlyProperty();
} }
private final ExecutorService executor = Executors.newSingleThreadExecutor();
private final ReadOnlyListWrapper<Task<?>> tasks = new ReadOnlyListWrapper<>(FXCollections.observableArrayList());
private final ReadOnlyDoubleWrapper taskProgress = new ReadOnlyDoubleWrapper(-1);
private final ReadOnlyStringWrapper taskMessage = new ReadOnlyStringWrapper();
private final ReadOnlyStringWrapper taskTitle = new ReadOnlyStringWrapper();
private final ReadOnlyStringWrapper statusMessage = new ReadOnlyStringWrapper();
private final EventBus eventbus = new EventBus("TimeLineController_EventBus");
/** /**
* Status is a string that will be displayed in the status bar as a kind of * Status is a string that will be displayed in the status bar as a kind of
* user hint/information when it is not empty * user hint/information when it is not empty
@ -189,7 +189,6 @@ public class TimeLineController {
@ThreadConfined(type = ThreadConfined.ThreadType.AWT) @ThreadConfined(type = ThreadConfined.ThreadType.AWT)
private TimeLineTopComponent topComponent; private TimeLineTopComponent topComponent;
@GuardedBy("this") @GuardedBy("this")
private final ReadOnlyObjectWrapper<ViewMode> viewMode = new ReadOnlyObjectWrapper<>(ViewMode.COUNTS); private final ReadOnlyObjectWrapper<ViewMode> viewMode = new ReadOnlyObjectWrapper<>(ViewMode.COUNTS);
@ -282,10 +281,11 @@ public class TimeLineController {
* TimeLineController. Do we need to do this with datasource or hash hit * TimeLineController. Do we need to do this with datasource or hash hit
* filters? * filters?
*/ */
historyManager.currentState().addListener((ObservableValue<? extends ZoomState> observable, ZoomState oldValue, ZoomState newValue) -> { historyManager.currentState().addListener((observable, oldState, newState) -> {
ZoomState historyManagerParams = newValue; ZoomState historyManagerState = newState;
filteredEvents.syncTagsFilter(historyManagerParams.getFilterState()); filteredEvents.syncFilters(historyManagerState.getFilterState());
currentParams.set(historyManagerParams); currentParams.set(historyManagerState);
}); });
try { try {
@ -571,9 +571,9 @@ public class TimeLineController {
synchronized public void pushFilters(RootFilterState filter) { synchronized public void pushFilters(RootFilterState filter) {
ZoomState currentZoom = filteredEvents.zoomStateProperty().get(); ZoomState currentZoom = filteredEvents.zoomStateProperty().get();
if (currentZoom == null) { if (currentZoom == null) {
advance(InitialZoomState.withFilterState(filter.copyOf())); advance(InitialZoomState.withFilterState(filter));
} else if (currentZoom.hasFilterState(filter) == false) { } else if (currentZoom.hasFilterState(filter) == false) {
advance(currentZoom.withFilterState(filter.copyOf())); advance(currentZoom.withFilterState(filter));
} }
} }
@ -665,6 +665,7 @@ public class TimeLineController {
taskTitle.bind(task.titleProperty()); taskTitle.bind(task.titleProperty());
switch (task.getState()) { switch (task.getState()) {
case READY: case READY:
//TODO: Check future result for errors....
executor.submit(task); executor.submit(task);
break; break;
case SCHEDULED: case SCHEDULED:
@ -709,76 +710,94 @@ public class TimeLineController {
} }
void handleIngestModuleEvent(PropertyChangeEvent evt) { void handleIngestModuleEvent(PropertyChangeEvent evt) {
/** /**
* Checking for a current case is a stop gap measure until a * Checking for a current case is a stop gap measure until a different
* different way of handling the closing of cases is worked out. * way of handling the closing of cases is worked out. Currently, remote
* Currently, remote events may be received for a case that is * events may be received for a case that is already closed.
* already closed. */
*/ try {
try { Case.getCurrentCaseThrows();
Case.getCurrentCaseThrows(); } catch (NoCurrentCaseException notUsed) {
} catch (NoCurrentCaseException notUsed) { // Case is closed, do nothing.
// Case is closed, do nothing. return;
return; }
} // ignore remote events. The node running the ingest should update the Case DB
// @@@ We should signal though that there is more data and flush caches...
// ignore remote events. The node running the ingest should update the Case DB if (((AutopsyEvent) evt).getSourceType() == AutopsyEvent.SourceType.REMOTE) {
// @@@ We should signal though that there is more data and flush caches... return;
if (((AutopsyEvent) evt).getSourceType() == AutopsyEvent.SourceType.REMOTE) {
return;
}
switch (IngestManager.IngestModuleEvent.valueOf(evt.getPropertyName())) {
case CONTENT_CHANGED:
// new files were already added to the events table from SleuthkitCase.
break;
case DATA_ADDED:
ModuleDataEvent eventData = (ModuleDataEvent) evt.getOldValue();
if (null != eventData && eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID()) {
executor.submit(() -> filteredEvents.setHashHit(eventData.getArtifacts(), true));
}
break;
case FILE_DONE:
/*
* Since the known state or hash hit state may have changed
* invalidate caches.
*/
//@@@ This causes HUGE slow downs during ingest when TL is open.
// executor.submit(filteredEvents::invalidateAllCaches);
// known state should have been udpated automatically via SleuthkitCase.setKnown();
// hashes should have been updated from event
}
} }
switch (IngestManager.IngestModuleEvent.valueOf(evt.getPropertyName())) {
void handleCaseEvent(PropertyChangeEvent evt) { case CONTENT_CHANGED:
switch (Case.Events.valueOf(evt.getPropertyName())) { // new files were already added to the events table from SleuthkitCase.
case BLACKBOARD_ARTIFACT_TAG_ADDED: break;
executor.submit(() -> filteredEvents.handleArtifactTagAdded((BlackBoardArtifactTagAddedEvent) evt)); case DATA_ADDED:
break; ModuleDataEvent eventData = (ModuleDataEvent) evt.getOldValue();
case BLACKBOARD_ARTIFACT_TAG_DELETED: if (null != eventData && eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID()) {
executor.submit(() -> filteredEvents.handleArtifactTagDeleted((BlackBoardArtifactTagDeletedEvent) evt)); logFutureException(executor.submit(() -> filteredEvents.setHashHit(eventData.getArtifacts(), true)),
break; "Error executing task in response to DATA_ADDED event.",
case CONTENT_TAG_ADDED: "Error executing response to new data.");
executor.submit(() -> filteredEvents.handleContentTagAdded((ContentTagAddedEvent) evt)); }
break; break;
case CONTENT_TAG_DELETED: case FILE_DONE:
executor.submit(() -> filteredEvents.handleContentTagDeleted((ContentTagDeletedEvent) evt)); /*
break; * Since the known state or hash hit state may have changed
case DATA_SOURCE_ADDED: * invalidate caches.
executor.submit(() -> filteredEvents.postAutopsyEventLocally((AutopsyEvent) evt)); */
break; //@@@ This causes HUGE slow downs during ingest when TL is open.
case CURRENT_CASE: // executor.submit(filteredEvents::invalidateAllCaches);
//close timeline on case changes.
SwingUtilities.invokeLater(TimeLineController.this::shutDownTimeLine); // known state should have been udpated automatically via SleuthkitCase.setKnown();
break; // hashes should have been updated from event
case TIMELINE_EVENT_ADDED:
executor.submit(filteredEvents::invalidateAllCaches);
break;
} }
} }
}
void handleCaseEvent(PropertyChangeEvent evt) {
ListenableFuture<?> future = Futures.immediateFuture(null);
switch (Case.Events.valueOf(evt.getPropertyName())) {
case BLACKBOARD_ARTIFACT_TAG_ADDED:
future = executor.submit(() -> filteredEvents.handleArtifactTagAdded((BlackBoardArtifactTagAddedEvent) evt));
break;
case BLACKBOARD_ARTIFACT_TAG_DELETED:
future = executor.submit(() -> filteredEvents.handleArtifactTagDeleted((BlackBoardArtifactTagDeletedEvent) evt));
break;
case CONTENT_TAG_ADDED:
future = executor.submit(() -> filteredEvents.handleContentTagAdded((ContentTagAddedEvent) evt));
break;
case CONTENT_TAG_DELETED:
future = executor.submit(() -> filteredEvents.handleContentTagDeleted((ContentTagDeletedEvent) evt));
break;
case CURRENT_CASE:
//close timeline on case changes.
SwingUtilities.invokeLater(TimeLineController.this::shutDownTimeLine);
break;
case DATA_SOURCE_ADDED:
future = executor.submit(() -> {
filteredEvents.invalidateCaches(null);
return null;
});
break;
case TIMELINE_EVENT_ADDED:
future = executor.submit(() -> {
filteredEvents.invalidateCaches(singleton(((EventAddedEvent) evt).getAddedEventID()));
return null;
});
break;
}
logFutureException(future,
"Error executing task in response to " + evt.getPropertyName() + " event.",
"Error executing task in response to case event.");
}
private void logFutureException(ListenableFuture<?> future, String errorLogMessage, String errorUserMessage) {
future.addListener(() -> {
try {
future.get();
} catch (InterruptedException | ExecutionException ex) {
logger.log(Level.SEVERE, errorLogMessage, ex);
MessageNotifyUtil.Message.error(errorUserMessage);
}
}, MoreExecutors.directExecutor());
}
}

View File

@ -20,18 +20,20 @@ package org.sleuthkit.autopsy.timeline;
import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener; import java.beans.PropertyChangeListener;
import java.util.logging.Level;
import javafx.application.Platform; import javafx.application.Platform;
import javax.swing.SwingUtilities;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
import static org.sleuthkit.autopsy.casemodule.Case.Events.CURRENT_CASE; import static org.sleuthkit.autopsy.casemodule.Case.Events.CURRENT_CASE;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
/** /**
* Manages listeners and the controller. * Manages listeners and the controller.
* *
*/ */
public class TimeLineModule { public class TimeLineModule {
@ -48,17 +50,18 @@ public class TimeLineModule {
/** /**
* Get instance of the controller for the current case * Get instance of the controller for the current case
* @return *
* @throws NoCurrentCaseException * @return the controller for the current case.
*
* @throws NoCurrentCaseException If there is no case open.
* @throws TskCoreException If there was a problem accessing the case
* database.
*
*/ */
public static TimeLineController getController() throws NoCurrentCaseException { public static TimeLineController getController() throws NoCurrentCaseException, TskCoreException {
synchronized (controllerLock) { synchronized (controllerLock) {
if (controller == null) { if (controller == null) {
try { controller = new TimeLineController(Case.getCurrentCaseThrows());
controller = new TimeLineController(Case.getCurrentCaseThrows());
} catch (NoCurrentCaseException | TskCoreException ex) {
throw new NoCurrentCaseException("Error getting TimeLineController for the current case.", ex);
}
} }
return controller; return controller;
} }
@ -84,19 +87,21 @@ public class TimeLineModule {
@Override @Override
public void propertyChange(PropertyChangeEvent evt) { public void propertyChange(PropertyChangeEvent evt) {
try { try {
TimeLineController tlController = getController(); getController().handleCaseEvent(evt);
tlController.handleCaseEvent(evt);
} catch (NoCurrentCaseException ex) { } catch (NoCurrentCaseException ex) {
// ignore // ignore
return; return;
} catch (TskCoreException ex) {
MessageNotifyUtil.Message.error("Error creating timeline controller.");
logger.log(Level.SEVERE, "Error creating timeline controller", ex);
} }
if (Case.Events.valueOf(evt.getPropertyName()).equals(CURRENT_CASE)) { if (Case.Events.valueOf(evt.getPropertyName()).equals(CURRENT_CASE)) {
// we care only about case closing here // we care only about case closing here
if (evt.getNewValue() == null) { if (evt.getNewValue() == null) {
synchronized (controllerLock) { synchronized (controllerLock) {
if (controller != null) { if (controller != null) {
controller.shutDownTimeLine(); SwingUtilities.invokeLater(controller::shutDownTimeLine);
} }
controller = null; controller = null;
} }
@ -113,11 +118,13 @@ public class TimeLineModule {
@Override @Override
public void propertyChange(PropertyChangeEvent evt) { public void propertyChange(PropertyChangeEvent evt) {
try { try {
TimeLineController tlController = getController(); getController().handleIngestModuleEvent(evt);
tlController.handleIngestModuleEvent(evt);
} catch (NoCurrentCaseException ex) { } catch (NoCurrentCaseException ex) {
// ignore // ignore
return; return;
} catch (TskCoreException ex) {
MessageNotifyUtil.Message.error("Error creating timeline controller.");
logger.log(Level.SEVERE, "Error creating timeline controller", ex);
} }
} }
} }

View File

@ -80,4 +80,13 @@ public class TimelineEventAddedEvent extends AutopsyEvent {
public TimelineEvent getAddedEvent() { public TimelineEvent getAddedEvent() {
return getNewValue(); return getNewValue();
} }
/**
* Gets the Id of the event that was added.
*
* @return The Id of the event that was added.
*/
public long getAddedEventID() {
return (long) super.getNewValue();
}
} }

View File

@ -511,13 +511,13 @@ final public class ViewFrame extends BorderPane {
* NOTE: This ViewFrame must be registered with the filteredEventsModel's * NOTE: This ViewFrame must be registered with the filteredEventsModel's
* EventBus in order for this handler to be invoked. * EventBus in order for this handler to be invoked.
* *
* @param event The DataSourceAnalysisCompletedEvent to handle. * @param event The CacheInvalidatedEvent to handle.
*/ */
@Subscribe @Subscribe
@NbBundle.Messages({ @NbBundle.Messages({
"# {0} - datasource name", "# {0} - datasource name",
"ViewFrame.notification.analysisComplete=The event data has changed, the visualization may be out of date."}) "ViewFrame.notification.analysisComplete=The event data has changed, the visualization may be out of date."})
public void handleEventAdded(FilteredEventsModel.CacheInvalidatedEvent event) { public void handleCacheInvalidated(FilteredEventsModel.CacheInvalidatedEvent event) {
Platform.runLater(() -> { Platform.runLater(() -> {
if (hostedView.needsRefresh() == false) { if (hostedView.needsRefresh() == false) {
hostedView.setNeedsRefresh(); hostedView.setNeedsRefresh();

View File

@ -20,7 +20,7 @@ package org.sleuthkit.autopsy.timeline.ui.filtering;
import java.util.Arrays; import java.util.Arrays;
import javafx.application.Platform; import javafx.application.Platform;
import javafx.beans.Observable; import javafx.beans.InvalidationListener;
import javafx.beans.binding.Bindings; import javafx.beans.binding.Bindings;
import javafx.collections.FXCollections; import javafx.collections.FXCollections;
import javafx.collections.ObservableMap; import javafx.collections.ObservableMap;
@ -116,14 +116,17 @@ final public class FilterSetPanel extends BorderPane {
legendColumn.setCellFactory(col -> new LegendCell(this.controller)); legendColumn.setCellFactory(col -> new LegendCell(this.controller));
//type is the only filter expanded initialy //type is the only filter expanded initialy
expansionMap.put(controller.getEventsModel().getFilterState().getFilter(), true); expansionMap.put(filteredEvents.getFilterState().getFilter(), true);
expansionMap.put(controller.getEventsModel().getFilterState().getEventTypeFilterState().getFilter(), true); expansionMap.put(filteredEvents.getFilterState().getEventTypeFilterState().getFilter(), true);
this.filteredEvents.eventTypeZoomProperty().addListener((Observable observable) -> applyFilters());
this.filteredEvents.descriptionLODProperty().addListener((Observable observable1) -> applyFilters()); InvalidationListener applyFiltersListener = observable -> applyFilters();
this.filteredEvents.timeRangeProperty().addListener((Observable observable2) -> applyFilters());
this.filteredEvents.filterProperty().addListener((observable, oldValue, newValue) -> refresh()); filteredEvents.eventTypeZoomProperty().addListener(applyFiltersListener);
filteredEvents.descriptionLODProperty().addListener(applyFiltersListener);
filteredEvents.timeRangeProperty().addListener(applyFiltersListener);
filteredEvents.filterProperty().addListener(observable -> refresh());
refresh(); refresh();
hiddenDescriptionsListView.setItems(controller.getQuickHideFilters()); hiddenDescriptionsListView.setItems(controller.getQuickHideFilters());
@ -164,13 +167,13 @@ final public class FilterSetPanel extends BorderPane {
private void refresh() { private void refresh() {
Platform.runLater(() -> { Platform.runLater(() -> {
filterTreeTable.setRoot(new FilterTreeItem(filteredEvents.getFilterState().copyOf(), expansionMap)); filterTreeTable.setRoot(new FilterTreeItem(filteredEvents.filterProperty().get().copyOf(), expansionMap));
}); });
} }
private void applyFilters() { private void applyFilters() {
Platform.runLater(() -> { Platform.runLater(() -> {
controller.pushFilters(((RootFilterState) filterTreeTable.getRoot().getValue())); controller.pushFilters(((RootFilterState) filterTreeTable.getRoot().getValue().copyOf()));
}); });
} }

View File

@ -18,8 +18,6 @@
*/ */
package org.sleuthkit.autopsy.timeline.ui.filtering; package org.sleuthkit.autopsy.timeline.ui.filtering;
import javafx.beans.InvalidationListener;
import javafx.beans.Observable;
import javafx.collections.ListChangeListener; import javafx.collections.ListChangeListener;
import javafx.collections.ObservableMap; import javafx.collections.ObservableMap;
import javafx.scene.control.TreeItem; import javafx.scene.control.TreeItem;
@ -74,12 +72,9 @@ final public class FilterTreeItem extends TreeItem<FilterState<?>> {
} }
}); });
compoundFilter.selectedProperty().addListener(new InvalidationListener() { compoundFilter.selectedProperty().addListener( observable -> {
@Override if (compoundFilter.isSelected()) {
public void invalidated(Observable observable) { setExpanded(true);
if (compoundFilter.isSelected()) {
setExpanded(true);
}
} }
}); });
} }

View File

@ -18,6 +18,7 @@
*/ */
package org.sleuthkit.autopsy.timeline.ui.filtering.datamodel; package org.sleuthkit.autopsy.timeline.ui.filtering.datamodel;
import com.google.common.collect.Lists;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@ -27,9 +28,17 @@ import javafx.collections.ObservableList;
import org.sleuthkit.datamodel.timeline.TimelineFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.CompoundFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter.CompoundFilter;
class CompoundFilterStateImpl<SubFilterType extends TimelineFilter, C extends CompoundFilter<SubFilterType>> /**
extends DefaultFilterState<C> *
implements CompoundFilterState<SubFilterType, C> { * Defualt implementation of CompoundFilterState
*
* @param <SubFilterType> The type of the subfilters in the underlying
* CompoundFilter
* @param <FilterType> The type of the underlying CompoundFilter
*/
class CompoundFilterStateImpl<SubFilterType extends TimelineFilter, FilterType extends CompoundFilter<SubFilterType>>
extends DefaultFilterState<FilterType>
implements CompoundFilterState<SubFilterType, FilterType> {
private final ObservableList<FilterState<SubFilterType>> subFilterStates = FXCollections.observableArrayList(); private final ObservableList<FilterState<SubFilterType>> subFilterStates = FXCollections.observableArrayList();
@ -39,14 +48,9 @@ class CompoundFilterStateImpl<SubFilterType extends TimelineFilter, C extends Co
* *
* @param filter The CompoundFilter this will represent the state of. * @param filter The CompoundFilter this will represent the state of.
*/ */
CompoundFilterStateImpl(C filter) { CompoundFilterStateImpl(FilterType filter) {
super(filter); super(filter);
filter.getSubFilters().forEach(this::addSubFilterState); filter.getSubFilters().forEach(this::addStateForSubFilter);
filter.getSubFilters().addListener((ListChangeListener.Change<? extends SubFilterType> change) -> {
while (change.next()) {
change.getAddedSubList().forEach(CompoundFilterStateImpl.this::addSubFilterState);
}
});
configureListeners(); configureListeners();
} }
@ -60,7 +64,7 @@ class CompoundFilterStateImpl<SubFilterType extends TimelineFilter, C extends Co
* of. * of.
* @param subFilterStates The filter states to use as the sub filter states. * @param subFilterStates The filter states to use as the sub filter states.
*/ */
CompoundFilterStateImpl(C filter, Collection<FilterState<SubFilterType>> subFilterStates) { CompoundFilterStateImpl(FilterType filter, Collection<FilterState<SubFilterType>> subFilterStates) {
super(filter); super(filter);
subFilterStates.forEach(this::addSubFilterState); subFilterStates.forEach(this::addSubFilterState);
@ -68,6 +72,13 @@ class CompoundFilterStateImpl<SubFilterType extends TimelineFilter, C extends Co
} }
private void configureListeners() { private void configureListeners() {
//Add a new subfilterstate whenever the underlying subfilters change.
getFilter().getSubFilters().addListener((ListChangeListener.Change<? extends SubFilterType> change) -> {
while (change.next()) {
change.getAddedSubList().forEach(this::addStateForSubFilter);
}
});
/* /*
* enforce the following relationship between a compound filter and its * enforce the following relationship between a compound filter and its
* subfilters: if a compound filter's active property changes, disable * subfilters: if a compound filter's active property changes, disable
@ -77,10 +88,9 @@ class CompoundFilterStateImpl<SubFilterType extends TimelineFilter, C extends Co
disableSubFiltersIfNotActive(); disableSubFiltersIfNotActive();
selectedProperty().addListener(selectedProperty -> { selectedProperty().addListener(selectedProperty -> {
if (isSelected() && getSubFilterStates().stream().noneMatch(FilterState::isSelected)) { if (isSelected() && getSubFilterStates().stream().noneMatch(FilterState::isSelected)) {
getSubFilterStates().forEach(subFilterState -> subFilterState.setSelected(true)); subFilterStates.forEach(subFilterState -> subFilterState.setSelected(true));
} }
}); });
} }
/** /**
@ -91,28 +101,25 @@ class CompoundFilterStateImpl<SubFilterType extends TimelineFilter, C extends Co
private void disableSubFiltersIfNotActive() { private void disableSubFiltersIfNotActive() {
boolean inactive = isActive() == false; boolean inactive = isActive() == false;
for (FilterState<SubFilterType> subFilter : getSubFilterStates()) { subFilterStates.forEach(subFilterState -> subFilterState.setDisabled(inactive));
subFilter.setDisabled(inactive);
}
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private <X extends TimelineFilter, S extends CompoundFilter<X>> void addSubFilterState(SubFilterType subFilter) { private void addStateForSubFilter(SubFilterType subFilter) {
if (subFilter instanceof CompoundFilter<?>) { if (subFilter instanceof CompoundFilter<?>) {
addSubFilterState((FilterState<SubFilterType>) new CompoundFilterStateImpl<>((S) subFilter)); addSubFilterState((FilterState<SubFilterType>) new CompoundFilterStateImpl<>((CompoundFilter<?>) subFilter));
} else { } else {
addSubFilterState(new DefaultFilterState<>(subFilter)); addSubFilterState(new DefaultFilterState<>(subFilter));
} }
} }
private void addSubFilterState(FilterState<SubFilterType> newFilterModel) { private void addSubFilterState(FilterState<SubFilterType> newSubFilterState) {
getSubFilterStates().add(newFilterModel); subFilterStates.add(newSubFilterState);
newFilterModel.selectedProperty().addListener(selectedProperty -> { newSubFilterState.selectedProperty().addListener(selectedProperty -> {
//set this compound filter model selected af any of the subfilters are selected. //set this compound filter state selected af any of the subfilters are selected.
setSelected(getSubFilterStates().stream().anyMatch(FilterState::isSelected)); setSelected(subFilterStates.stream().anyMatch(FilterState::isSelected));
}); });
newSubFilterState.setDisabled(isActive() ==false);
} }
@Override @Override
@ -121,12 +128,11 @@ class CompoundFilterStateImpl<SubFilterType extends TimelineFilter, C extends Co
} }
@Override @Override
public CompoundFilterStateImpl<SubFilterType, C> copyOf() { public CompoundFilterStateImpl<SubFilterType, FilterType> copyOf() {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
CompoundFilterStateImpl<SubFilterType, C> copy = new CompoundFilterStateImpl<>((C) getFilter().copyOf(), CompoundFilterStateImpl<SubFilterType, FilterType> copy
getSubFilterStates().stream().map(FilterState::copyOf).collect(Collectors.toList()) = new CompoundFilterStateImpl<>((FilterType) getFilter().copyOf(),
); Lists.transform(subFilterStates, FilterState::copyOf));
copy.setSelected(isSelected()); copy.setSelected(isSelected());
copy.setDisabled(isDisabled()); copy.setDisabled(isDisabled());
@ -135,19 +141,19 @@ class CompoundFilterStateImpl<SubFilterType extends TimelineFilter, C extends Co
@Override @Override
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public C getActiveFilter() { public FilterType getActiveFilter() {
if (isActive() == false) { if (isActive() == false) {
return null; return null;
} }
List<SubFilterType> activeSubFilters = getSubFilterStates().stream() List<SubFilterType> activeSubFilters = subFilterStates.stream()
.filter(FilterState::isActive) .filter(FilterState::isActive)
.map(FilterState::getActiveFilter) .map(FilterState::getActiveFilter)
.collect(Collectors.toList()); .collect(Collectors.toList());
C copy = (C) getFilter().copyOf(); FilterType copy = (FilterType) getFilter().copyOf();
copy.getSubFilters().clear(); copy.getSubFilters().clear();
copy.getSubFilters().addAll(activeSubFilters); copy.getSubFilters().addAll(activeSubFilters);
return copy; return copy;
} }
} }

View File

@ -18,11 +18,12 @@
*/ */
package org.sleuthkit.autopsy.timeline.ui.filtering.datamodel; package org.sleuthkit.autopsy.timeline.ui.filtering.datamodel;
import com.google.common.collect.Lists;
import javafx.beans.property.ReadOnlyBooleanProperty; import javafx.beans.property.ReadOnlyBooleanProperty;
import javafx.beans.property.ReadOnlyBooleanWrapper; import javafx.beans.property.ReadOnlyBooleanWrapper;
import javafx.collections.FXCollections; import javafx.collections.FXCollections;
import javafx.collections.ObservableList; import javafx.collections.ObservableList;
import org.python.google.common.collect.Lists; import static org.apache.commons.lang3.ObjectUtils.notEqual;
import org.sleuthkit.datamodel.timeline.TimelineFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.DataSourceFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter.DataSourceFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.DataSourcesFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter.DataSourcesFilter;
@ -37,7 +38,7 @@ import org.sleuthkit.datamodel.timeline.TimelineFilter.TextFilter;
/** /**
*/ */
public class RootFilterState implements FilterState<RootFilter>, CompoundFilterState< TimelineFilter, RootFilter> { public class RootFilterState implements CompoundFilterState< TimelineFilter, RootFilter> {
private final CompoundFilterState<EventTypeFilter, EventTypeFilter> eventTypeFilterState; private final CompoundFilterState<EventTypeFilter, EventTypeFilter> eventTypeFilterState;
private final DefaultFilterState<HideKnownFilter> knownFilterState; private final DefaultFilterState<HideKnownFilter> knownFilterState;
@ -194,17 +195,25 @@ public class RootFilterState implements FilterState<RootFilter>, CompoundFilterS
if (getClass() != obj.getClass()) { if (getClass() != obj.getClass()) {
return false; return false;
} }
RootFilter activeFilter = getActiveFilter();
RootFilter activeFilter1 = ((RootFilterState) obj).getActiveFilter();
return activeFilter.equals(activeFilter1); RootFilterState otherFilterState = (RootFilterState) obj;
RootFilter activeFilter = getActiveFilter();
RootFilter activeFilter1 = otherFilterState.getActiveFilter();
if (notEqual(activeFilter, activeFilter1)) {
return false;
}
RootFilter filter = getFilter();
RootFilter filter1 = otherFilterState.getFilter();
return filter.equals(filter1);
} }
@Override @Override
public int hashCode() { public int hashCode() {
int hash = 7; return 7;
return hash;
} }
@Override @Override
@ -261,4 +270,4 @@ public class RootFilterState implements FilterState<RootFilter>, CompoundFilterS
public void setSelected(Boolean act) { public void setSelected(Boolean act) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
} }
} }

View File

@ -18,6 +18,7 @@
*/ */
package org.sleuthkit.autopsy.timeline.ui.filtering.datamodel; package org.sleuthkit.autopsy.timeline.ui.filtering.datamodel;
import com.google.common.collect.Lists;
import java.util.Collection; import java.util.Collection;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@ -51,10 +52,8 @@ public class TagsFilterState extends CompoundFilterStateImpl<TagNameFilter, Tags
@Override @Override
public TagsFilterState copyOf() { public TagsFilterState copyOf() {
@SuppressWarnings("unchecked")
TagsFilterState copy = new TagsFilterState(getFilter().copyOf(), TagsFilterState copy = new TagsFilterState(getFilter().copyOf(),
getSubFilterStates().stream().map(FilterState::copyOf).collect(Collectors.toList()) Lists.transform(getSubFilterStates(), FilterState::copyOf));
);
copy.setSelected(isSelected()); copy.setSelected(isSelected());
copy.setDisabled(isDisabled()); copy.setDisabled(isDisabled());

View File

@ -55,7 +55,7 @@ public final class FilterUtils {
"application/x-msdos-program"//NON-NLS "application/x-msdos-program"//NON-NLS
); );
private static final Set<String> DOCUMENT_MIME_TYPES = ImmutableSet.of( private static final Set<String> DOCUMENT_MIME_TYPES =ImmutableSet.of(
"text/*", //NON-NLS "text/*", //NON-NLS
"application/rtf", //NON-NLS "application/rtf", //NON-NLS
"application/pdf", //NON-NLS "application/pdf", //NON-NLS