diff --git a/Core/src/org/sleuthkit/autopsy/timeline/EventsModel.java b/Core/src/org/sleuthkit/autopsy/timeline/EventsModel.java new file mode 100755 index 0000000000..03d48a1711 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/timeline/EventsModel.java @@ -0,0 +1,819 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2011-2019 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.timeline; + +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.LoadingCache; +import com.google.common.collect.ImmutableList; +import com.google.common.eventbus.EventBus; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.logging.Level; +import javafx.beans.InvalidationListener; +import javafx.beans.property.ReadOnlyObjectProperty; +import javafx.beans.property.ReadOnlyObjectWrapper; +import javafx.collections.FXCollections; +import javafx.collections.ObservableMap; +import static org.apache.commons.collections4.CollectionUtils.emptyIfNull; +import static org.apache.commons.collections4.CollectionUtils.isNotEmpty; +import org.joda.time.DateTimeZone; +import org.joda.time.Interval; +import org.openide.util.NbBundle; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagAddedEvent; +import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagDeletedEvent; +import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagDeletedEvent.DeletedBlackboardArtifactTagInfo; +import org.sleuthkit.autopsy.casemodule.events.ContentTagAddedEvent; +import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent; +import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent.DeletedContentTagInfo; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; +import org.sleuthkit.autopsy.timeline.events.RefreshRequestedEvent; +import org.sleuthkit.autopsy.timeline.events.TagsAddedEvent; +import org.sleuthkit.autopsy.timeline.events.TagsDeletedEvent; +import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState; +import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState; +import org.sleuthkit.autopsy.timeline.utils.CacheLoaderImpl; +import org.sleuthkit.autopsy.timeline.utils.FilterUtils; +import org.sleuthkit.autopsy.timeline.zooming.EventsModelParams; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.BlackboardArtifact; +import org.sleuthkit.datamodel.BlackboardArtifactTag; +import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.ContentTag; +import org.sleuthkit.datamodel.DataSource; +import org.sleuthkit.datamodel.SleuthkitCase; +import org.sleuthkit.datamodel.TimelineManager; +import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.datamodel.TimelineEvent; +import org.sleuthkit.datamodel.TimelineEventType; +import org.sleuthkit.datamodel.TimelineFilter; +import org.sleuthkit.datamodel.TimelineFilter.DataSourceFilter; +import org.sleuthkit.datamodel.TimelineFilter.DataSourcesFilter; +import org.sleuthkit.datamodel.TimelineFilter.EventTypeFilter; +import org.sleuthkit.datamodel.TimelineFilter.FileTypesFilter; +import org.sleuthkit.datamodel.TimelineFilter.HashHitsFilter; +import org.sleuthkit.datamodel.TimelineFilter.HideKnownFilter; +import org.sleuthkit.datamodel.TimelineFilter.RootFilter; +import org.sleuthkit.datamodel.TimelineFilter.TagsFilter; +import org.sleuthkit.datamodel.TimelineFilter.TextFilter; +import org.sleuthkit.datamodel.TimelineLevelOfDetail; + +/** + * In the timeline implementation of the MVC pattern, this class acts as the + * model. The views are the event counts view, the event details view and the + * events list view. + * + * Concurrency Policy: TimelineManager is internally synchronized, so methods + * that only access the TimelineManager atomically do not need further + * synchronization. All other member state variables should only be accessed + * with intrinsic lock of the containing FilteredEventsModel held. + * + */ +public final class EventsModel { + + private static final Logger logger = Logger.getLogger(EventsModel.class.getName()); + private final EventBus eventbus = new EventBus("EventsModel_EventBus"); //NON-NLS + private final Case currentCase; + private final TimelineManager caseDbEventManager; + + /* + * User-specified parameters for the model exposed as JFX properties. These + * parameters apply across all of the views of the model and are set using + * GUI elements such the event filters panel. + * + * IMPORTANT: Note that the parameters are exposed both as a set and + * individually. + */ + private final ReadOnlyObjectWrapper modelParamsProperty = new ReadOnlyObjectWrapper<>(); + private final ReadOnlyObjectWrapper filterStateProperty = new ReadOnlyObjectWrapper<>(); + private final ReadOnlyObjectWrapper timeRangeProperty = new ReadOnlyObjectWrapper<>(); + private final ReadOnlyObjectWrapper eventTypesHierarchyLevelProperty = new ReadOnlyObjectWrapper<>(TimelineEventType.HierarchyLevel.CATEGORY); + private final ReadOnlyObjectWrapper timelineLODProperty = new ReadOnlyObjectWrapper<>(TimelineLevelOfDetail.LOW); + + /* + * Caches of model data from the case database. + */ + private final ObservableMap datasourceIDsToNamesMap = FXCollections.observableHashMap(); + private final LoadingCache maxEventTimeCache; + private final LoadingCache minEventTimeCache; + private final LoadingCache idsToEventsCache; + private final LoadingCache> eventCountsCache; + + /** + * Makes a new data source filter from a given entry in the cache of data + * source object IDs to data source names. + * + * @param dataSourceEntry The cache entry. + * + * @return A new DataSourceFilter. + */ + private static DataSourceFilter newDataSourceFilter(Map.Entry dataSourceEntry) { + return new DataSourceFilter(dataSourceEntry.getValue(), dataSourceEntry.getKey()); + } + + /** + * Constructs the model in the timeline implementation of the MVC pattern. + * + * @param currentCase The current case. + * @param modelParams The initial state of the model parameters. + * + * @throws TskCoreException If there is an error reading model data from the + * case database. + */ + public EventsModel(Case currentCase, ReadOnlyObjectProperty modelParams) throws TskCoreException { + this.currentCase = currentCase; + this.caseDbEventManager = currentCase.getSleuthkitCase().getTimelineManager(); + + /* + * Set up the caches of model data from the case database. Note that the + * build() method calls specify the methods used to create default cache + * entries when a call to get() would otherwise return a cache miss. + */ + populateDataSourcesCache(); + idsToEventsCache = CacheBuilder.newBuilder() + .maximumSize(5000L) + .expireAfterAccess(10, TimeUnit.MINUTES) + .build(new CacheLoaderImpl<>(caseDbEventManager::getEventById)); + eventCountsCache = CacheBuilder.newBuilder() + .maximumSize(1000L) + .expireAfterAccess(10, TimeUnit.MINUTES) + .build(new CacheLoaderImpl<>(this::countEventsByType)); + maxEventTimeCache = CacheBuilder.newBuilder() + .build(new CacheLoaderImpl<>(ignored -> caseDbEventManager.getMaxEventTime())); + minEventTimeCache = CacheBuilder.newBuilder() + .build(new CacheLoaderImpl<>(ignored -> caseDbEventManager.getMinEventTime())); + + /* + * Add a listener to the data sources cache that adds a data source + * filter to the event filter state model parameter when a data source + * is added to the cache. + */ + InvalidationListener dataSourcesMapListener = observable -> { + RootFilterState rootFilter = filterStateProperty.getReadOnlyProperty().get(); + addDataSourceFilters(rootFilter); + filterStateProperty.set(rootFilter.copyOf()); + }; + datasourceIDsToNamesMap.addListener(dataSourcesMapListener); + + /* + * Initialize the events filter state model parameter with the default + * events filter. + * + * RJCTODO: Why isn't the event filter state of the initialModelParams + * used here? + */ + filterStateProperty.set(getDefaultEventFilter()); + + /* + * Add a listener to the model parameters property that updates the + * properties that expose the individual model parameters when they are + * changed through the model parameters property. + */ + modelParamsProperty.addListener(observable -> { + final EventsModelParams params = modelParamsProperty.get(); + if (params != null) { + synchronized (EventsModel.this) { + eventTypesHierarchyLevelProperty.set(params.getEventTypesHierarchyLevel()); + filterStateProperty.set(params.getEventFilterState()); + timeRangeProperty.set(params.getTimeRange()); + timelineLODProperty.set(params.getTimelineLOD()); + } + } + }); + + modelParamsProperty.bind(modelParams); + } + + /** + * Populates the map of data source object IDs to data source names from the + * data source data in the case database. + */ + synchronized private void populateDataSourcesCache() throws TskCoreException { + datasourceIDsToNamesMap.clear(); + SleuthkitCase skCase = currentCase.getSleuthkitCase(); + for (DataSource ds : skCase.getDataSources()) { + datasourceIDsToNamesMap.putIfAbsent(ds.getId(), ds.getName()); + } + } + + /** + * Adds a data source filter for each data source in the data sources cache + * to a given root filter state object. + * + * RJCTODO: This method should be synchronized! RJCTODO: This seems like an + * unusual method. + * + * @param rootFilterState A root filter state object. + */ + void addDataSourceFilters(RootFilterState rootFilterState) { + DataSourcesFilter dataSourcesFilter = rootFilterState.getDataSourcesFilterState().getFilter(); + datasourceIDsToNamesMap.entrySet().forEach(entry -> dataSourcesFilter.addSubFilter(newDataSourceFilter(entry))); + } + + /** + * Gets the count of all events that fit the given model parameters. The + * counts are organized by event type for the given event types hierarchy + * level. + * + * RJCTODO: Where does the argument for this method come from when called by + * the cache builder? + * + * @param modelParams The model parameters. + * + * @return A mapping of event types to event counts at the given event types + * hierarchy level. + * + * @throws TskCoreException If there is an error reading model data from the + * case database. + */ + private Map countEventsByType(EventsModelParams modelParams) throws TskCoreException { + if (modelParams.getTimeRange() == null) { + return Collections.emptyMap(); + } else { + return caseDbEventManager.countEventsByType(modelParams.getTimeRange().getStartMillis() / 1000, + modelParams.getTimeRange().getEndMillis() / 1000, + modelParams.getEventFilterState().getActiveFilter(), + modelParams.getEventTypesHierarchyLevel()); + } + } + + /** + * Gets the case database events manager. + * + * RJCTODO: Clients should probably get their own reference. + * + * @return The case database events manager. + */ + public TimelineManager getEventManager() { + return caseDbEventManager; + } + + /** + * Gets the case database. + * + * RJCTODO: Clients should probably get their own reference. + * + * @return The case database. + */ + public SleuthkitCase getSleuthkitCase() { + return currentCase.getSleuthkitCase(); + } + + /** + * Gets the model parameters property. + * + * @return A read only, observable property for the current model + * parameters. + */ + synchronized public ReadOnlyObjectProperty modelParamsProperty() { + return modelParamsProperty.getReadOnlyProperty(); + } + + /** + * Gets a read only, observable property for the time range model parameter. + * + * @return The time range model parameter property. + */ + @NbBundle.Messages({ + "FilteredEventsModel.timeRangeProperty.errorTitle=Timeline", + "FilteredEventsModel.timeRangeProperty.errorMessage=Error getting spanning interval."}) + synchronized public ReadOnlyObjectProperty timeRangeProperty() { + if (timeRangeProperty.get() == null) { + try { + timeRangeProperty.set(EventsModel.this.getSpanningInterval()); + } catch (TskCoreException timelineCacheException) { + MessageNotifyUtil.Notify.error(Bundle.FilteredEventsModel_timeRangeProperty_errorTitle(), + Bundle.FilteredEventsModel_timeRangeProperty_errorMessage()); + logger.log(Level.SEVERE, "Error getting spanning interval.", timelineCacheException); + } + } + return timeRangeProperty.getReadOnlyProperty(); + } + + /** + * Gets a read only, observable property for the timeline level of detail + * model parameter. + * + * @return The timeline level of detail model parameter property. + */ + synchronized public ReadOnlyObjectProperty descriptionLODProperty() { + return timelineLODProperty.getReadOnlyProperty(); + } + + /** + * Gets a read only, observable property for the event filter model + * parameter. + * + * @return The event filter model parameter property. + */ + synchronized public ReadOnlyObjectProperty eventFilterProperty() { + return filterStateProperty.getReadOnlyProperty(); + } + + /** + * Gets a read only, observable property for the event types hierarchy level + * model parameter. + * + * @return The event types hierarchy level model parameter property. + */ + synchronized public ReadOnlyObjectProperty eventTypesHierarchyLevelProperty() { + return eventTypesHierarchyLevelProperty.getReadOnlyProperty(); + } + + /** + * Gets the current model parameters. + * + * RJCTODO: This breaks encapsulation. Is it really necessary? + * + * @return The current model parameters. + */ + synchronized public EventsModelParams getModelParams() { + return modelParamsProperty.get(); + } + + /** + * Gets the time range model parameter. + * + * RJCTODO: This breaks encapsulation. Is it really necessary? + * + * @return The time range model parameter. + */ + synchronized public Interval getTimeRange() { + return getModelParams().getTimeRange(); + } + + /** + * Gets the time range model parameter. + * + * RJCTODO: This breaks encapsulation. Is it really necessary? + * + * @return The time range model parameter. + */ + synchronized public TimelineLevelOfDetail getDescriptionLOD() { + return getModelParams().getTimelineLOD(); + } + + /** + * Gets the event filter model parameter. + * + * RJCTODO: This breaks encapsulation. Is it really necessary? + * + * @return The event filter model parameter. + */ + synchronized public RootFilterState getEventFilterState() { + return getModelParams().getEventFilterState(); + } + + /** + * Gets the event types hierarchy level model model parameter. + * + * RJCTODO: This breaks encapsulation. Is it really necessary? + * + * @return The event types hierarchy level model model parameter. + */ + synchronized public TimelineEventType.HierarchyLevel getEventTypeZoom() { + return getModelParams().getEventTypesHierarchyLevel(); + } + + /** + * Gets a new instance of the default event filter state model parameter, + * with data source filters for every data source currently in the data + * sopurces cache. + * + * @return An instance of the default filter state model parameter. + */ + public synchronized RootFilterState getDefaultEventFilter() { + DataSourcesFilter dataSourcesFilter = new DataSourcesFilter(); + datasourceIDsToNamesMap.entrySet().forEach(dataSourceEntry + -> dataSourcesFilter.addSubFilter(newDataSourceFilter(dataSourceEntry))); + return new RootFilterState(new RootFilter(new HideKnownFilter(), + new TagsFilter(), + new HashHitsFilter(), + new TextFilter(), + new EventTypeFilter(TimelineEventType.ROOT_EVENT_TYPE), + dataSourcesFilter, + FilterUtils.createDefaultFileTypesFilter(), + Collections.emptySet())); + } + + /** + * Gets an event given its event ID. + * + * @param eventID The event ID. + * + * @return The event. + * + * @throws TskCoreException If there is an error reading model data from the + * case database. + */ + public TimelineEvent getEventById(Long eventID) throws TskCoreException { + try { + return idsToEventsCache.get(eventID); + } catch (ExecutionException ex) { + throw new TskCoreException("Error getting cached event from ID", ex); + } + } + + /** + * Gets a set of events given their event IDs. + * + * @param eventIDs The event IDs. + * + * @return THe events. + * + * @throws TskCoreException If there is an error reading model data from the + * case database. + */ + public Set getEventsById(Collection eventIDs) throws TskCoreException { + Set events = new HashSet<>(); + for (Long id : eventIDs) { + events.add(getEventById(id)); + } + return events; + } + + /** + * Gets a list of event IDs for a given time range and a given events + * filter. + * + * @param timeRange The time range. + * @param filterState A filter state object for the events filter. + * + * @return The events. + * + * @throws TskCoreException If there is an error reading model data from the + * case database. + */ + public List getEventIDs(Interval timeRange, FilterState filterState) throws TskCoreException { + final Interval overlap; + RootFilter intersection; + synchronized (this) { + overlap = EventsModel.this.getSpanningInterval().overlap(timeRange); + intersection = getEventFilterState().intersect(filterState).getActiveFilter(); + } + return caseDbEventManager.getEventIDs(overlap, intersection); + } + + /** + * Gets a set of event IDs associated with a given file. + * + * @param file The file. + * @param includeDerivedArtifacts If true, also gets the event IDs of events + * associated with artifacts for which the + * file is the source file. + * + * @return The event IDs. + * + * @throws TskCoreException If there is an error reading model data from the + * case database. + */ + public Set getEventIDsForFile(AbstractFile file, boolean includeDerivedArtifacts) throws TskCoreException { + return caseDbEventManager.getEventIDsForContent(file, includeDerivedArtifacts); + } + + /** + * Gets a set of event IDs associated with a given artifact. + * + * @param artifact The artifact. + * + * @return The event IDs. + * + * @throws TskCoreException If there is an error reading model data from the + * case database. + */ + public List getEventIDsForArtifact(BlackboardArtifact artifact) throws TskCoreException { + return caseDbEventManager.getEventIDsForArtifact(artifact); + } + + /** + * Gets counts by event type of the events within a given time range. + * + * @param timeRange The time range. + * + * @return The event counts by type. + * + * @throws TskCoreException If there is an error reading model data from the + * case database. + */ + public Map getEventCounts(Interval timeRange) throws TskCoreException { + final RootFilterState filter; + final TimelineEventType.HierarchyLevel typeZoom; + synchronized (this) { + filter = getEventFilterState(); + typeZoom = getEventTypeZoom(); + } + try { + return eventCountsCache.get(new EventsModelParams(timeRange, typeZoom, filter, null)); + } catch (ExecutionException executionException) { + throw new TskCoreException("Error getting cached event counts.`1", executionException); + } + } + + /** + * Gets the spanning interval for the events that fall within the time range + * and event filter model parameters, in terms of a given time zone. + * + * @param timeZone The time zone. + * + * @return The spanning interval. + * + * @throws TskCoreException If there is an error reading model data from the + * case database. + */ + public Interval getSpanningInterval(DateTimeZone timeZone) throws TskCoreException { + return caseDbEventManager.getSpanningInterval(modelParamsProperty().get().getTimeRange(), getEventFilterState().getActiveFilter(), timeZone); + } + + /** + * Gets the spanning interval for all of the events in the case database. + * + * @return The spanning interval. + * + * @throws TskCoreException If there is an error reading model data from the + * case database. + */ + public Interval getSpanningInterval() throws TskCoreException { + return new Interval(getMinEventTime() * 1000, 1000 + getMaxEventTime() * 1000); + } + + /** + * Gets the spanning interval for a collection of events. + * + * @param eventIDs The event IDs of the events. + * + * @return The spanning interval. + * + * @throws TskCoreException If there is an error reading model data from the + * case database. + */ + public Interval getSpanningInterval(Collection eventIDs) throws TskCoreException { + return caseDbEventManager.getSpanningInterval(eventIDs); + } + + /** + * Gets the minimum event time in the case database, in seconds since the + * UNIX epoch. + * + * @return The minimum event time. + * + * @throws TskCoreException If there is an error reading model data from the + * case database. + */ + public Long getMinEventTime() throws TskCoreException { + try { + return minEventTimeCache.get("min"); // NON-NLS + } catch (ExecutionException ex) { + throw new TskCoreException("Error getting cached min time.", ex); + } + } + + /** + * Gets the maximum event time in the case database, in seconds since the + * UNIX epoch. + * + * @return The maximum event time. + * + * @throws TskCoreException If there is an error reading model data from the + * case database. + */ + public Long getMaxEventTime() throws TskCoreException { + try { + return maxEventTimeCache.get("max"); // NON-NLS + } catch (ExecutionException ex) { + throw new TskCoreException("Error getting cached max time.", ex); + } + } + + /** + * Updates the events model for a content tag added event and publishes a + * tag added event via the model's event bus. + * + * @param evt The event. + * + * @return If a tags added event was published via the model's event bus. + * + * @throws TskCoreException If there is an error reading model data from the + * case database. + */ + synchronized public boolean handleContentTagAdded(ContentTagAddedEvent evt) throws TskCoreException { + ContentTag contentTag = evt.getAddedTag(); + Content content = contentTag.getContent(); + Set updatedEventIDs = caseDbEventManager.updateEventsForContentTagAdded(content); + if (isNotEmpty(updatedEventIDs)) { + invalidateCaches(updatedEventIDs); + } + return postTagsAdded(updatedEventIDs); + } + + /** + * Updates the events model for an artifact tag added event and publishes a + * tag added event via the model's event bus. + * + * @param evt The event. + * + * @return If a tags added event was published via the model's event bus. + * + * @throws TskCoreException If there is an error reading model data from the + * case database. + */ + synchronized public boolean handleArtifactTagAdded(BlackBoardArtifactTagAddedEvent evt) throws TskCoreException { + BlackboardArtifactTag artifactTag = evt.getAddedTag(); + BlackboardArtifact artifact = artifactTag.getArtifact(); + Set updatedEventIDs = caseDbEventManager.updateEventsForArtifactTagAdded(artifact); + if (isNotEmpty(updatedEventIDs)) { + invalidateCaches(updatedEventIDs); + } + return postTagsAdded(updatedEventIDs); + } + + /** + * Updates the events model for a content tag deleted event and publishes a + * tag deleted event via the model's event bus. + * + * @param evt The event. + * + * @return If a tags deleted event was published via the model's event bus. + * + * @throws TskCoreException If there is an error reading model data from the + * case database. + */ + synchronized public boolean handleContentTagDeleted(ContentTagDeletedEvent evt) throws TskCoreException { + DeletedContentTagInfo deletedTagInfo = evt.getDeletedTagInfo(); + Content content = currentCase.getSleuthkitCase().getContentById(deletedTagInfo.getContentID()); + Set updatedEventIDs = caseDbEventManager.updateEventsForContentTagDeleted(content); + if (isNotEmpty(updatedEventIDs)) { + invalidateCaches(updatedEventIDs); + } + return postTagsDeleted(updatedEventIDs); + } + + /** + * Updates the events model for an artifact tag deleted event and publishes + * a tag deleted event via the model's event bus. + * + * @param evt The event. + * + * @return If a tags deleted event was published via the model's event bus. + * + * @throws TskCoreException If there is an error reading model data from the + * case database. + */ + synchronized public boolean handleArtifactTagDeleted(BlackBoardArtifactTagDeletedEvent evt) throws TskCoreException { + DeletedBlackboardArtifactTagInfo deletedTagInfo = evt.getDeletedTagInfo(); + BlackboardArtifact artifact = currentCase.getSleuthkitCase().getBlackboardArtifact(deletedTagInfo.getArtifactID()); + Set updatedEventIDs = caseDbEventManager.updateEventsForArtifactTagDeleted(artifact); + if (isNotEmpty(updatedEventIDs)) { + invalidateCaches(updatedEventIDs); + } + return postTagsDeleted(updatedEventIDs); + } + + /** + * Post a TagsAddedEvent to all registered subscribers, if the given set of + * updated event IDs is not empty. + * + * @param updatedEventIDs The set of event ids to be included in the + * TagsAddedEvent. + * + * @return True if an event was posted. + */ + private boolean postTagsAdded(Set updatedEventIDs) { + boolean tagsUpdated = !updatedEventIDs.isEmpty(); + if (tagsUpdated) { + eventbus.post(new TagsAddedEvent(updatedEventIDs)); + } + return tagsUpdated; + } + + /** + * Post a TagsDeletedEvent to all registered subscribers, if the given set + * of updated event IDs is not empty. + * + * @param updatedEventIDs The set of event ids to be included in the + * TagsDeletedEvent. + * + * @return True if an event was posted. + */ + private boolean postTagsDeleted(Set updatedEventIDs) { + boolean tagsUpdated = !updatedEventIDs.isEmpty(); + if (tagsUpdated) { + eventbus.post(new TagsDeletedEvent(updatedEventIDs)); + } + return tagsUpdated; + } + + /** + * Register the given object to receive events. + * + * @param subscriber The object to register. Must implement public methods + * annotated with Subscribe. + */ + synchronized public void registerForEvents(Object subscriber) { + eventbus.register(subscriber); + } + + /** + * Un-register the given object, so it no longer receives events. + * + * @param subscriber The object to un-register. + */ + synchronized public void unRegisterForEvents(Object subscriber) { + eventbus.unregister(subscriber); + } + + /** + * Posts a refresh requested event to all registered subscribers. + */ + public void postRefreshRequest() { + eventbus.post(new RefreshRequestedEvent()); + } + + /** + * Gets a list of the event types from the case database. + * + * @return The list of event types. + */ + public ImmutableList getEventTypes() { + return caseDbEventManager.getEventTypes(); + } + + /** + * Sets the hash set hits flag for the events associated with the source + * files for a collection of hash set hit artifacts. + * + * @param hashSetHitArtifacts The hash set hit artifacts. + * + * @return The event IDs of the updated events. + * + * @throws TskCoreException If there is an error reading model data from or + * writing model data to the case database. + */ + synchronized public Set updateEventsForHashSetHits(Collection hashSetHitArtifacts) throws TskCoreException { + Set updatedEventIDs = new HashSet<>(); + for (BlackboardArtifact artifact : hashSetHitArtifacts) { + Content content = currentCase.getSleuthkitCase().getContentById(artifact.getObjectID()); + updatedEventIDs.addAll(caseDbEventManager.updateEventsForHashSetHit(content)); + } + if (isNotEmpty(updatedEventIDs)) { + invalidateCaches(updatedEventIDs); + } + return updatedEventIDs; + } + + /** + * Invalidates all of the the model caches and publishes a caches + * invalidated event. Optionally, a collection of event IDs may be supplied, + * in which case only the corresponding entries in the event IDs cache are + * invalidated. + * + * RJCTODO: What is the use case for passing event IDs? The only place this + * is currently done is when handling TIMELINE_EVENT_ADDED events, i.e., for + * entirely new events. Is this some sort of performance optimization? + * + * @param updatedEventIDs Either null or a collection of the event IDs. + * + * @throws TskCoreException + */ + public synchronized void invalidateCaches(Collection updatedEventIDs) throws TskCoreException { + populateDataSourcesCache(); + minEventTimeCache.invalidateAll(); + maxEventTimeCache.invalidateAll(); + idsToEventsCache.invalidateAll(emptyIfNull(updatedEventIDs)); + eventCountsCache.invalidateAll(); + eventbus.post(new CacheInvalidatedEvent()); + } + + /** + * Event fired when a cache has been invalidated and the views need to be + * refreshed + */ + public static class CacheInvalidatedEvent { + + private CacheInvalidatedEvent() { + } + } + +} diff --git a/Core/src/org/sleuthkit/autopsy/timeline/FilteredEventsModel.java b/Core/src/org/sleuthkit/autopsy/timeline/FilteredEventsModel.java deleted file mode 100755 index 483172844f..0000000000 --- a/Core/src/org/sleuthkit/autopsy/timeline/FilteredEventsModel.java +++ /dev/null @@ -1,651 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2011-2019 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.timeline; - -import com.google.common.cache.CacheBuilder; -import com.google.common.cache.LoadingCache; -import com.google.common.collect.ImmutableList; -import com.google.common.eventbus.EventBus; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import java.util.logging.Level; -import javafx.beans.InvalidationListener; -import javafx.beans.property.ReadOnlyObjectProperty; -import javafx.beans.property.ReadOnlyObjectWrapper; -import javafx.collections.FXCollections; -import javafx.collections.ObservableMap; -import static org.apache.commons.collections4.CollectionUtils.emptyIfNull; -import static org.apache.commons.collections4.CollectionUtils.isNotEmpty; -import org.joda.time.DateTimeZone; -import org.joda.time.Interval; -import org.openide.util.NbBundle; -import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagAddedEvent; -import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagDeletedEvent; -import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagDeletedEvent.DeletedBlackboardArtifactTagInfo; -import org.sleuthkit.autopsy.casemodule.events.ContentTagAddedEvent; -import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent; -import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent.DeletedContentTagInfo; -import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; -import org.sleuthkit.autopsy.events.AutopsyEvent; -import org.sleuthkit.autopsy.timeline.events.RefreshRequestedEvent; -import org.sleuthkit.autopsy.timeline.events.TagsAddedEvent; -import org.sleuthkit.autopsy.timeline.events.TagsDeletedEvent; -import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState; -import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState; -import org.sleuthkit.autopsy.timeline.utils.CacheLoaderImpl; -import org.sleuthkit.autopsy.timeline.utils.FilterUtils; -import org.sleuthkit.autopsy.timeline.zooming.ZoomState; -import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardArtifactTag; -import org.sleuthkit.datamodel.Content; -import org.sleuthkit.datamodel.ContentTag; -import org.sleuthkit.datamodel.DataSource; -import org.sleuthkit.datamodel.SleuthkitCase; -import org.sleuthkit.datamodel.TimelineManager; -import org.sleuthkit.datamodel.TskCoreException; -import org.sleuthkit.datamodel.TimelineEvent; -import org.sleuthkit.datamodel.TimelineEventType; -import org.sleuthkit.datamodel.TimelineFilter; -import org.sleuthkit.datamodel.TimelineFilter.DataSourceFilter; -import org.sleuthkit.datamodel.TimelineFilter.DataSourcesFilter; -import org.sleuthkit.datamodel.TimelineFilter.EventTypeFilter; -import org.sleuthkit.datamodel.TimelineFilter.FileTypesFilter; -import org.sleuthkit.datamodel.TimelineFilter.HashHitsFilter; -import org.sleuthkit.datamodel.TimelineFilter.HideKnownFilter; -import org.sleuthkit.datamodel.TimelineFilter.RootFilter; -import org.sleuthkit.datamodel.TimelineFilter.TagsFilter; -import org.sleuthkit.datamodel.TimelineFilter.TextFilter; -import org.sleuthkit.datamodel.TimelineLevelOfDetail; - -/** - * This class acts as the model for a TimelineView - * - * Views can register listeners on properties returned by methods. - * - * This class is implemented as a filtered view into an underlying - * TimelineManager. - * - * Maintainers, NOTE: as many methods as possible should cache their results so - * as to avoid unnecessary db calls through the TimelineManager -jm - * - * Concurrency Policy: TimelineManager is internally synchronized, so methods - * that only access the TimelineManager atomically do not need further - * synchronization. All other member state variables should only be accessed - * with intrinsic lock of containing FilteredEventsModel held. - * - */ -public final class FilteredEventsModel { - - private static final Logger logger = Logger.getLogger(FilteredEventsModel.class.getName()); - - private final TimelineManager eventManager; - - private final Case autoCase; - private final EventBus eventbus = new EventBus("FilteredEventsModel_EventBus"); //NON-NLS - - //Filter and zoome state - private final ReadOnlyObjectWrapper requestedFilter = new ReadOnlyObjectWrapper<>(); - private final ReadOnlyObjectWrapper requestedTimeRange = new ReadOnlyObjectWrapper<>(); - private final ReadOnlyObjectWrapper requestedZoomState = new ReadOnlyObjectWrapper<>(); - private final ReadOnlyObjectWrapper requestedTypeZoom = new ReadOnlyObjectWrapper<>(TimelineEventType.HierarchyLevel.CATEGORY); - private final ReadOnlyObjectWrapper requestedLOD = new ReadOnlyObjectWrapper<>(TimelineLevelOfDetail.LOW); - // end Filter and zoome state - - //caches - private final LoadingCache maxCache; - private final LoadingCache minCache; - private final LoadingCache idToEventCache; - private final LoadingCache> eventCountsCache; - /** - * Map from datasource id to datasource name. - */ - private final ObservableMap datasourcesMap = FXCollections.observableHashMap(); - // end caches - - /** - * Make a DataSourceFilter from an entry from the datasourcesMap. - * - * @param dataSourceEntry A map entry from datasource id to datasource name. - * - * @return A new DataSourceFilter for the given datsourcesMap entry. - */ - private static DataSourceFilter newDataSourceFromMapEntry(Map.Entry dataSourceEntry) { - return new DataSourceFilter(dataSourceEntry.getValue(), dataSourceEntry.getKey()); - } - - public FilteredEventsModel(Case autoCase, ReadOnlyObjectProperty currentStateProperty) throws TskCoreException { - this.autoCase = autoCase; - this.eventManager = autoCase.getSleuthkitCase().getTimelineManager(); - populateFilterData(); - - //caches - idToEventCache = CacheBuilder.newBuilder() - .maximumSize(5000L) - .expireAfterAccess(10, TimeUnit.MINUTES) - .build(new CacheLoaderImpl<>(eventManager::getEventById)); - eventCountsCache = CacheBuilder.newBuilder() - .maximumSize(1000L) - .expireAfterAccess(10, TimeUnit.MINUTES) - .build(new CacheLoaderImpl<>(this::countEventsByType)); - - maxCache = CacheBuilder.newBuilder() - .build(new CacheLoaderImpl<>(ignored -> eventManager.getMaxEventTime())); - minCache = CacheBuilder.newBuilder() - .build(new CacheLoaderImpl<>(ignored -> eventManager.getMinEventTime())); - - InvalidationListener filterSyncListener = observable -> { - RootFilterState rootFilter = filterProperty().get(); - syncFilters(rootFilter); - requestedFilter.set(rootFilter.copyOf()); - }; - - datasourcesMap.addListener(filterSyncListener); - - requestedFilter.set(getDefaultFilter()); - - requestedZoomState.addListener(observable -> { - final ZoomState zoomState = requestedZoomState.get(); - - if (zoomState != null) { - synchronized (FilteredEventsModel.this) { - requestedTypeZoom.set(zoomState.getTypeZoomLevel()); - requestedFilter.set(zoomState.getFilterState()); - requestedTimeRange.set(zoomState.getTimeRange()); - requestedLOD.set(zoomState.getDescriptionLOD()); - } - } - }); - - requestedZoomState.bind(currentStateProperty); - } - - /** - * get the count of all events that fit the given zoom params organized by - * the EvenType of the level specified in the zoomState - * - * @param zoomState The params that control what events to count and how to - * organize the returned map - * - * @return a map from event type( of the requested level) to event counts - * - * @throws org.sleuthkit.datamodel.TskCoreException - */ - private Map countEventsByType(ZoomState zoomState) throws TskCoreException { - if (zoomState.getTimeRange() == null) { - return Collections.emptyMap(); - } else { - return eventManager.countEventsByType(zoomState.getTimeRange().getStartMillis() / 1000, - zoomState.getTimeRange().getEndMillis() / 1000, - zoomState.getFilterState().getActiveFilter(), zoomState.getTypeZoomLevel()); - } - } - - public TimelineManager getEventManager() { - return eventManager; - } - - public SleuthkitCase getSleuthkitCase() { - return autoCase.getSleuthkitCase(); - } - - public Interval getBoundingEventsInterval(Interval timeRange, RootFilter filter, DateTimeZone timeZone) throws TskCoreException { - return eventManager.getSpanningInterval(timeRange, filter, timeZone); - } - - /** - * Readonly observable property for the current ZoomState - * - * @return A readonly observable property for the current ZoomState. - */ - synchronized public ReadOnlyObjectProperty zoomStateProperty() { - return requestedZoomState.getReadOnlyProperty(); - } - - /** - * Get the current ZoomState - * - * @return The current ZoomState - */ - synchronized public ZoomState getZoomState() { - return requestedZoomState.get(); - } - - /** - * Update the data used to determine the available filters. - */ - synchronized private void populateFilterData() throws TskCoreException { - SleuthkitCase skCase = autoCase.getSleuthkitCase(); - - //because there is no way to remove a datasource we only add to this map. - for (DataSource ds : skCase.getDataSources()) { - datasourcesMap.putIfAbsent(ds.getId(), ds.getName()); - } - } - - /** - * "sync" the given root filter with the state of the casee: Disable filters - * for tags that are not in use in the case, and add new filters for tags, - * hashsets, and datasources, that don't have them. New filters are selected - * by default. - * - * @param rootFilterState the filter state to modify so it is consistent - * with the tags in use in the case - */ - public void syncFilters(RootFilterState rootFilterState) { - DataSourcesFilter dataSourcesFilter = rootFilterState.getDataSourcesFilterState().getFilter(); - datasourcesMap.entrySet().forEach(entry -> dataSourcesFilter.addSubFilter(newDataSourceFromMapEntry(entry))); - } - - /** - * Get a read only view of the time range currently in view. - * - * @return A read only view of the time range currently in view. - */ - @NbBundle.Messages({ - "FilteredEventsModel.timeRangeProperty.errorTitle=Timeline", - "FilteredEventsModel.timeRangeProperty.errorMessage=Error getting spanning interval."}) - synchronized public ReadOnlyObjectProperty timeRangeProperty() { - if (requestedTimeRange.get() == null) { - try { - requestedTimeRange.set(getSpanningInterval()); - } catch (TskCoreException timelineCacheException) { - MessageNotifyUtil.Notify.error(Bundle.FilteredEventsModel_timeRangeProperty_errorTitle(), - Bundle.FilteredEventsModel_timeRangeProperty_errorMessage()); - logger.log(Level.SEVERE, "Error getting spanning interval.", timelineCacheException); - } - } - return requestedTimeRange.getReadOnlyProperty(); - } - - synchronized public ReadOnlyObjectProperty descriptionLODProperty() { - return requestedLOD.getReadOnlyProperty(); - } - - synchronized public ReadOnlyObjectProperty filterProperty() { - return requestedFilter.getReadOnlyProperty(); - } - - synchronized public ReadOnlyObjectProperty eventTypeZoomProperty() { - return requestedTypeZoom.getReadOnlyProperty(); - } - - /** - * The time range currently in view. - * - * @return The time range currently in view. - */ - synchronized public Interval getTimeRange() { - return getZoomState().getTimeRange(); - } - - synchronized public TimelineLevelOfDetail getDescriptionLOD() { - return getZoomState().getDescriptionLOD(); - } - - synchronized public RootFilterState getFilterState() { - return getZoomState().getFilterState(); - } - - synchronized public TimelineEventType.HierarchyLevel getEventTypeZoom() { - return getZoomState().getTypeZoomLevel(); - } - - /** - * Get the default filter used at startup. - * - * @return the default filter used at startup - */ - public synchronized RootFilterState getDefaultFilter() { - DataSourcesFilter dataSourcesFilter = new DataSourcesFilter(); - datasourcesMap.entrySet().forEach(dataSourceEntry - -> dataSourcesFilter.addSubFilter(newDataSourceFromMapEntry(dataSourceEntry))); - - HashHitsFilter hashHitsFilter = new HashHitsFilter(); - - TagsFilter tagsFilter = new TagsFilter(); - - FileTypesFilter fileTypesFilter = FilterUtils.createDefaultFileTypesFilter(); - - return new RootFilterState(new RootFilter(new HideKnownFilter(), - tagsFilter, - hashHitsFilter, - new TextFilter(), - new EventTypeFilter(TimelineEventType.ROOT_EVENT_TYPE), - dataSourcesFilter, - fileTypesFilter, - Collections.emptySet())); - } - - public Interval getBoundingEventsInterval(DateTimeZone timeZone) throws TskCoreException { - return eventManager.getSpanningInterval(zoomStateProperty().get().getTimeRange(), getFilterState().getActiveFilter(), timeZone); - } - - public TimelineEvent getEventById(Long eventID) throws TskCoreException { - try { - return idToEventCache.get(eventID); - } catch (ExecutionException ex) { - throw new TskCoreException("Error getting cached event from ID", ex); - } - } - - public Set getEventsById(Collection eventIDs) throws TskCoreException { - Set events = new HashSet<>(); - for (Long id : eventIDs) { - events.add(getEventById(id)); - } - return events; - } - - public List getEventIDs(Interval timeRange, FilterState filter) throws TskCoreException { - - final Interval overlap; - RootFilter intersection; - synchronized (this) { - overlap = getSpanningInterval().overlap(timeRange); - intersection = getFilterState().intersect(filter).getActiveFilter(); - } - - return eventManager.getEventIDs(overlap, intersection); - } - - /** - * Return the number of events that pass the requested filter and are within - * the given time range. - * - * NOTE: this method does not change the requested time range - * - * @param timeRange - * - * @return - * - * @throws org.sleuthkit.datamodel.TskCoreException - */ - public Map getEventCounts(Interval timeRange) throws TskCoreException { - - final RootFilterState filter; - final TimelineEventType.HierarchyLevel typeZoom; - synchronized (this) { - filter = getFilterState(); - typeZoom = getEventTypeZoom(); - } - try { - return eventCountsCache.get(new ZoomState(timeRange, typeZoom, filter, null)); - } catch (ExecutionException executionException) { - throw new TskCoreException("Error getting cached event counts.`1", executionException); - } - } - - /** - * @return The smallest interval spanning all the events from the case, - * ignoring any filters or requested ranges. - * - * @throws org.sleuthkit.datamodel.TskCoreException - */ - public Interval getSpanningInterval() throws TskCoreException { - return new Interval(getMinTime() * 1000, 1000 + getMaxTime() * 1000); - } - - /** - * Get the smallest interval spanning all the given events. - * - * @param eventIDs The IDs of the events to get a spanning interval arround. - * - * @return the smallest interval spanning all the given events - * - * @throws org.sleuthkit.datamodel.TskCoreException - */ - public Interval getSpanningInterval(Collection eventIDs) throws TskCoreException { - return eventManager.getSpanningInterval(eventIDs); - } - - /** - * @return the time (in seconds from unix epoch) of the absolutely first - * event available from the repository, ignoring any filters or - * requested ranges - * - * @throws org.sleuthkit.datamodel.TskCoreException - */ - public Long getMinTime() throws TskCoreException { - try { - return minCache.get("min"); // NON-NLS - } catch (ExecutionException ex) { - throw new TskCoreException("Error getting cached min time.", ex); - } - } - - /** - * @return the time (in seconds from unix epoch) of the absolutely last - * event available from the repository, ignoring any filters or - * requested ranges - * - * @throws org.sleuthkit.datamodel.TskCoreException - */ - public Long getMaxTime() throws TskCoreException { - try { - return maxCache.get("max"); // NON-NLS - } catch (ExecutionException ex) { - throw new TskCoreException("Error getting cached max time.", ex); - } - } - - synchronized public boolean handleContentTagAdded(ContentTagAddedEvent evt) throws TskCoreException { - ContentTag contentTag = evt.getAddedTag(); - Content content = contentTag.getContent(); - Set updatedEventIDs = eventManager.updateEventsForContentTagAdded(content); - if (isNotEmpty(updatedEventIDs)) { - invalidateCaches(updatedEventIDs); - } - return postTagsAdded(updatedEventIDs); - } - - synchronized public boolean handleArtifactTagAdded(BlackBoardArtifactTagAddedEvent evt) throws TskCoreException { - BlackboardArtifactTag artifactTag = evt.getAddedTag(); - BlackboardArtifact artifact = artifactTag.getArtifact(); - Set updatedEventIDs = eventManager.updateEventsForArtifactTagAdded(artifact); - if (isNotEmpty(updatedEventIDs)) { - invalidateCaches(updatedEventIDs); - } - return postTagsAdded(updatedEventIDs); - } - - synchronized public boolean handleContentTagDeleted(ContentTagDeletedEvent evt) throws TskCoreException { - DeletedContentTagInfo deletedTagInfo = evt.getDeletedTagInfo(); - Content content = autoCase.getSleuthkitCase().getContentById(deletedTagInfo.getContentID()); - Set updatedEventIDs = eventManager.updateEventsForContentTagDeleted(content); - if (isNotEmpty(updatedEventIDs)) { - invalidateCaches(updatedEventIDs); - } - return postTagsDeleted(updatedEventIDs); - } - - synchronized public boolean handleArtifactTagDeleted(BlackBoardArtifactTagDeletedEvent evt) throws TskCoreException { - DeletedBlackboardArtifactTagInfo deletedTagInfo = evt.getDeletedTagInfo(); - BlackboardArtifact artifact = autoCase.getSleuthkitCase().getBlackboardArtifact(deletedTagInfo.getArtifactID()); - Set updatedEventIDs = eventManager.updateEventsForArtifactTagDeleted(artifact); - if (isNotEmpty(updatedEventIDs)) { - invalidateCaches(updatedEventIDs); - } - return postTagsDeleted(updatedEventIDs); - } - - /** - * Get a Set of event IDs for the events that are derived from the given - * file. - * - * @param file The AbstractFile to get derived event IDs - * for. - * @param includeDerivedArtifacts If true, also get event IDs for events - * derived from artifacts derived form this - * file. If false, only gets events derived - * directly from this file (file system - * timestamps). - * - * @return A Set of event IDs for the events that are derived from the given - * file. - * - * @throws org.sleuthkit.datamodel.TskCoreException - */ - public Set getEventIDsForFile(AbstractFile file, boolean includeDerivedArtifacts) throws TskCoreException { - return eventManager.getEventIDsForContent(file, includeDerivedArtifacts); - } - - /** - * Get a List of event IDs for the events that are derived from the given - * artifact. - * - * @param artifact The BlackboardArtifact to get derived event IDs for. - * - * @return A List of event IDs for the events that are derived from the - * given artifact. - * - * @throws org.sleuthkit.datamodel.TskCoreException - */ - public List getEventIDsForArtifact(BlackboardArtifact artifact) throws TskCoreException { - return eventManager.getEventIDsForArtifact(artifact); - } - - /** - * Post a TagsAddedEvent to all registered subscribers, if the given set of - * updated event IDs is not empty. - * - * @param updatedEventIDs The set of event ids to be included in the - * TagsAddedEvent. - * - * @return True if an event was posted. - */ - private boolean postTagsAdded(Set updatedEventIDs) { - boolean tagsUpdated = !updatedEventIDs.isEmpty(); - if (tagsUpdated) { - eventbus.post(new TagsAddedEvent(updatedEventIDs)); - } - return tagsUpdated; - } - - /** - * Post a TagsDeletedEvent to all registered subscribers, if the given set - * of updated event IDs is not empty. - * - * @param updatedEventIDs The set of event ids to be included in the - * TagsDeletedEvent. - * - * @return True if an event was posted. - */ - private boolean postTagsDeleted(Set updatedEventIDs) { - boolean tagsUpdated = !updatedEventIDs.isEmpty(); - if (tagsUpdated) { - eventbus.post(new TagsDeletedEvent(updatedEventIDs)); - } - return tagsUpdated; - } - - /** - * Register the given object to receive events. - * - * @param subscriber The object to register. Must implement public methods - * annotated with Subscribe. - */ - synchronized public void registerForEvents(Object subscriber) { - eventbus.register(subscriber); - } - - /** - * Un-register the given object, so it no longer receives events. - * - * @param subscriber The object to un-register. - */ - synchronized public void unRegisterForEvents(Object subscriber) { - eventbus.unregister(subscriber); - } - - /** - * Post a RefreshRequestedEvent to all registered subscribers. - */ - public void postRefreshRequest() { - eventbus.post(new RefreshRequestedEvent()); - } - - /** - * (Re)Post an AutopsyEvent received from another event distribution system - * locally to all registered subscribers. - * - * @param event The event to re-post. - */ - public void postAutopsyEventLocally(AutopsyEvent event) { - eventbus.post(event); - } - - public ImmutableList getEventTypes() { - return eventManager.getEventTypes(); - } - - synchronized public Set setHashHit(Collection artifacts) throws TskCoreException { - Set updatedEventIDs = new HashSet<>(); - for (BlackboardArtifact artifact : artifacts) { - Content content = autoCase.getSleuthkitCase().getContentById(artifact.getObjectID()); - updatedEventIDs.addAll(eventManager.updateEventsForHashSetHit(content)); - } - if (isNotEmpty(updatedEventIDs)) { - invalidateCaches(updatedEventIDs); - } - return updatedEventIDs; - } - - /** - * Invalidate the timeline caches for the given event IDs. Also forces the - * filter values to be updated with any new values from the case data.( data - * sources, tags, etc) - * - * @param updatedEventIDs A collection of the event IDs whose cached event - * objects should be invalidated. Can be null or an - * empty sett to invalidate the general caches, such - * as min/max time, or the counts per event type. - * - * @throws TskCoreException - */ - public synchronized void invalidateCaches(Collection updatedEventIDs) throws TskCoreException { - minCache.invalidateAll(); - maxCache.invalidateAll(); - idToEventCache.invalidateAll(emptyIfNull(updatedEventIDs)); - eventCountsCache.invalidateAll(); - - populateFilterData(); - - eventbus.post(new CacheInvalidatedEvent()); - } - - /** - * Event fired when a cache has been invalidated. The UI should make it - * clear that the view is potentially out of date and present an action to - * refresh the view. - */ - public static class CacheInvalidatedEvent { - - private CacheInvalidatedEvent() { - } - } -} diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ShowInTimelineDialog.java b/Core/src/org/sleuthkit/autopsy/timeline/ShowInTimelineDialog.java index 1c1dae4165..abad95b2ff 100755 --- a/Core/src/org/sleuthkit/autopsy/timeline/ShowInTimelineDialog.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ShowInTimelineDialog.java @@ -195,7 +195,7 @@ final class ShowInTimelineDialog extends Dialog { //add events to table Set events = new HashSet<>(); - FilteredEventsModel eventsModel = controller.getEventsModel(); + EventsModel eventsModel = controller.getEventsModel(); for (Long eventID : eventIDS) { try { events.add(eventsModel.getEventById(eventID)); diff --git a/Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java b/Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java index 8235fb79f3..d30fde7798 100755 --- a/Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java @@ -85,7 +85,7 @@ import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.DescriptionFilterSt import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState; import org.sleuthkit.autopsy.timeline.utils.IntervalUtils; import org.sleuthkit.autopsy.timeline.zooming.TimeUnits; -import org.sleuthkit.autopsy.timeline.zooming.ZoomState; +import org.sleuthkit.autopsy.timeline.zooming.EventsModelParams; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT; @@ -200,16 +200,16 @@ public class TimeLineController { private final ReadOnlyObjectWrapper viewMode = new ReadOnlyObjectWrapper<>(ViewMode.COUNTS); @GuardedBy("filteredEvents") - private final FilteredEventsModel filteredEvents; + private final EventsModel filteredEvents; @GuardedBy("this") - private final ZoomState InitialZoomState; + private final EventsModelParams InitialZoomState; @GuardedBy("this") - private final History historyManager = new History<>(); + private final History historyManager = new History<>(); @GuardedBy("this") - private final ReadOnlyObjectWrapper currentParams = new ReadOnlyObjectWrapper<>(); + private final ReadOnlyObjectWrapper currentParams = new ReadOnlyObjectWrapper<>(); //selected events (ie shown in the result viewer) @GuardedBy("this") @@ -281,7 +281,7 @@ public class TimeLineController { TimeLineController(Case autoCase) throws TskCoreException { this.autoCase = autoCase; - filteredEvents = new FilteredEventsModel(autoCase, currentParams.getReadOnlyProperty()); + filteredEvents = new EventsModel(autoCase, currentParams.getReadOnlyProperty()); /* * as the history manager's current state changes, modify the tags * filter to be in sync, and expose that as propery from @@ -289,16 +289,16 @@ public class TimeLineController { * filters? */ historyManager.currentState().addListener((observable, oldState, newState) -> { - ZoomState historyManagerState = newState; - filteredEvents.syncFilters(historyManagerState.getFilterState()); + EventsModelParams historyManagerState = newState; + filteredEvents.addDataSourceFilters(historyManagerState.getEventFilterState()); currentParams.set(historyManagerState); }); try { - InitialZoomState = new ZoomState(filteredEvents.getSpanningInterval(), + InitialZoomState = new EventsModelParams(filteredEvents.getSpanningInterval(), TimelineEventType.HierarchyLevel.CATEGORY, - filteredEvents.filterProperty().get(), + filteredEvents.eventFilterProperty().get(), TimelineLevelOfDetail.LOW); } catch (TskCoreException ex) { throw new TskCoreException("Error getting spanning interval.", ex); @@ -318,17 +318,17 @@ public class TimeLineController { /** * @return a shared events model */ - public FilteredEventsModel getEventsModel() { + public EventsModel getEventsModel() { return filteredEvents; } public void applyDefaultFilters() { - pushFilters(filteredEvents.getDefaultFilter()); + pushFilters(filteredEvents.getDefaultEventFilter()); } public void zoomOutToActivity() throws TskCoreException { - Interval boundingEventsInterval = filteredEvents.getBoundingEventsInterval(getJodaTimeZone()); - advance(filteredEvents.zoomStateProperty().get().withTimeRange(boundingEventsInterval)); + Interval boundingEventsInterval = filteredEvents.getSpanningInterval(getJodaTimeZone()); + advance(filteredEvents.modelParamsProperty().get().withTimeRange(boundingEventsInterval)); } private final ObservableSet pinnedEvents = FXCollections.observableSet(); @@ -494,7 +494,7 @@ public class TimeLineController { } synchronized public void pushEventTypeZoom(TimelineEventType.HierarchyLevel typeZoomeLevel) { - ZoomState currentZoom = filteredEvents.zoomStateProperty().get(); + EventsModelParams currentZoom = filteredEvents.modelParamsProperty().get(); if (currentZoom == null) { advance(InitialZoomState.withTypeZoomLevel(typeZoomeLevel)); } else if (currentZoom.hasTypeZoomLevel(typeZoomeLevel) == false) { @@ -525,7 +525,7 @@ public class TimeLineController { } } - ZoomState currentZoom = filteredEvents.zoomStateProperty().get(); + EventsModelParams currentZoom = filteredEvents.modelParamsProperty().get(); if (currentZoom == null) { advance(InitialZoomState.withTimeRange(clampedTimeRange)); return true; @@ -556,7 +556,7 @@ public class TimeLineController { } synchronized public void pushDescrLOD(TimelineLevelOfDetail newLOD) { - ZoomState currentZoom = filteredEvents.zoomStateProperty().get(); + EventsModelParams currentZoom = filteredEvents.modelParamsProperty().get(); if (currentZoom == null) { advance(InitialZoomState.withDescrLOD(newLOD)); } else if (currentZoom.hasDescrLOD(newLOD) == false) { @@ -567,7 +567,7 @@ public class TimeLineController { @SuppressWarnings("AssignmentToMethodParameter") //clamp timerange to case synchronized public void pushTimeAndType(Interval timeRange, TimelineEventType.HierarchyLevel typeZoom) throws TskCoreException { Interval overlappingTimeRange = this.filteredEvents.getSpanningInterval().overlap(timeRange); - ZoomState currentZoom = filteredEvents.zoomStateProperty().get(); + EventsModelParams currentZoom = filteredEvents.modelParamsProperty().get(); if (currentZoom == null) { advance(InitialZoomState.withTimeAndType(overlappingTimeRange, typeZoom)); } else if (currentZoom.hasTimeRange(overlappingTimeRange) == false && currentZoom.hasTypeZoomLevel(typeZoom) == false) { @@ -580,7 +580,7 @@ public class TimeLineController { } synchronized public void pushFilters(RootFilterState filter) { - ZoomState currentZoom = filteredEvents.zoomStateProperty().get(); + EventsModelParams currentZoom = filteredEvents.modelParamsProperty().get(); if (currentZoom == null) { advance(InitialZoomState.withFilterState(filter)); } else if (currentZoom.hasFilterState(filter) == false) { @@ -596,7 +596,7 @@ public class TimeLineController { historyManager.retreat(); } - synchronized private void advance(ZoomState newState) { + synchronized private void advance(EventsModelParams newState) { historyManager.advance(newState); } @@ -746,7 +746,7 @@ public class TimeLineController { case DATA_ADDED: ModuleDataEvent eventData = (ModuleDataEvent) evt.getOldValue(); if (null != eventData && eventData.getBlackboardArtifactType().getTypeID() == TSK_HASHSET_HIT.getTypeID()) { - logFutureException(executor.submit(() -> filteredEvents.setHashHit(eventData.getArtifacts())), + logFutureException(executor.submit(() -> filteredEvents.updateEventsForHashSetHits(eventData.getArtifacts())), "Error executing task in response to DATA_ADDED event.", "Error executing response to new data."); } diff --git a/Core/src/org/sleuthkit/autopsy/timeline/actions/ResetFilters.java b/Core/src/org/sleuthkit/autopsy/timeline/actions/ResetFilters.java index 30ed0ebb81..b895d527cb 100755 --- a/Core/src/org/sleuthkit/autopsy/timeline/actions/ResetFilters.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/actions/ResetFilters.java @@ -22,7 +22,7 @@ import javafx.beans.binding.BooleanBinding; import javafx.event.ActionEvent; import org.controlsfx.control.action.Action; import org.openide.util.NbBundle; -import org.sleuthkit.autopsy.timeline.FilteredEventsModel; +import org.sleuthkit.autopsy.timeline.EventsModel; import org.sleuthkit.autopsy.timeline.TimeLineController; /** @@ -32,7 +32,7 @@ import org.sleuthkit.autopsy.timeline.TimeLineController; "RestFilters.longText=Reset all filters to their default state."}) public class ResetFilters extends Action { - private FilteredEventsModel eventsModel; + private EventsModel eventsModel; public ResetFilters(final TimeLineController controller) { this(Bundle.ResetFilters_text(), controller); @@ -44,12 +44,12 @@ public class ResetFilters extends Action { eventsModel = controller.getEventsModel(); disabledProperty().bind(new BooleanBinding() { { - bind(eventsModel.zoomStateProperty()); + bind(eventsModel.modelParamsProperty()); } @Override protected boolean computeValue() { - return eventsModel.zoomStateProperty().getValue().getFilterState().equals(eventsModel.getDefaultFilter()); + return eventsModel.modelParamsProperty().getValue().getEventFilterState().equals(eventsModel.getDefaultEventFilter()); } }); setEventHandler((ActionEvent t) -> { diff --git a/Core/src/org/sleuthkit/autopsy/timeline/actions/SaveSnapshotAsReport.java b/Core/src/org/sleuthkit/autopsy/timeline/actions/SaveSnapshotAsReport.java index 6208714e06..b5949b7f01 100755 --- a/Core/src/org/sleuthkit/autopsy/timeline/actions/SaveSnapshotAsReport.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/actions/SaveSnapshotAsReport.java @@ -137,7 +137,7 @@ public class SaveSnapshotAsReport extends Action { reportMainFilePath = new SnapShotReportWriter(currentCase, reportFolderPath, reportName, - controller.getEventsModel().getZoomState(), + controller.getEventsModel().getModelParams(), generationDate, snapshot).writeReport(); } catch (IOException ex) { LOGGER.log(Level.SEVERE, "Error writing report to disk at " + reportFolderPath, ex); //NON_NLS diff --git a/Core/src/org/sleuthkit/autopsy/timeline/actions/ZoomOut.java b/Core/src/org/sleuthkit/autopsy/timeline/actions/ZoomOut.java index 9550004e1c..5022d3e8da 100755 --- a/Core/src/org/sleuthkit/autopsy/timeline/actions/ZoomOut.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/actions/ZoomOut.java @@ -26,7 +26,7 @@ import javafx.scene.image.ImageView; import org.controlsfx.control.action.Action; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.timeline.FilteredEventsModel; +import org.sleuthkit.autopsy.timeline.EventsModel; import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.datamodel.TskCoreException; @@ -58,10 +58,10 @@ public class ZoomOut extends Action { //disable action when the current time range already encompases the entire case. disabledProperty().bind(new BooleanBinding() { - private final FilteredEventsModel eventsModel = controller.getEventsModel(); + private final EventsModel eventsModel = controller.getEventsModel(); { - bind(eventsModel.zoomStateProperty(), eventsModel.timeRangeProperty()); + bind(eventsModel.modelParamsProperty(), eventsModel.timeRangeProperty()); } @Override diff --git a/Core/src/org/sleuthkit/autopsy/timeline/actions/ZoomToEvents.java b/Core/src/org/sleuthkit/autopsy/timeline/actions/ZoomToEvents.java index e85de25b5f..799208bf8e 100755 --- a/Core/src/org/sleuthkit/autopsy/timeline/actions/ZoomToEvents.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/actions/ZoomToEvents.java @@ -27,7 +27,7 @@ import org.controlsfx.control.action.Action; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.timeline.TimeLineController; -import org.sleuthkit.autopsy.timeline.FilteredEventsModel; +import org.sleuthkit.autopsy.timeline.EventsModel; import org.sleuthkit.datamodel.TskCoreException; /** @@ -56,10 +56,10 @@ public class ZoomToEvents extends Action { //disable action when the current time range already encompases the entire case. disabledProperty().bind(new BooleanBinding() { - private final FilteredEventsModel eventsModel = controller.getEventsModel(); + private final EventsModel eventsModel = controller.getEventsModel(); { - bind(eventsModel.zoomStateProperty()); + bind(eventsModel.modelParamsProperty()); } @Override diff --git a/Core/src/org/sleuthkit/autopsy/timeline/explorernodes/EventNode.java b/Core/src/org/sleuthkit/autopsy/timeline/explorernodes/EventNode.java index 56dc02daed..018adf1018 100755 --- a/Core/src/org/sleuthkit/autopsy/timeline/explorernodes/EventNode.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/explorernodes/EventNode.java @@ -45,7 +45,7 @@ import org.sleuthkit.autopsy.datamodel.DataModelActionsFactory; import org.sleuthkit.autopsy.datamodel.DisplayableItemNode; import org.sleuthkit.autopsy.datamodel.DisplayableItemNodeVisitor; import org.sleuthkit.autopsy.datamodel.NodeProperty; -import org.sleuthkit.autopsy.timeline.FilteredEventsModel; +import org.sleuthkit.autopsy.timeline.EventsModel; import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.actions.ViewFileInTimelineAction; import org.sleuthkit.autopsy.timeline.ui.EventTypeUtils; @@ -262,7 +262,7 @@ public class EventNode extends DisplayableItemNode { * @return An EventNode with the content (and possible artifact) backing * this event in its lookup. */ - public static EventNode createEventNode(final Long eventID, FilteredEventsModel eventsModel) throws TskCoreException { + public static EventNode createEventNode(final Long eventID, EventsModel eventsModel) throws TskCoreException { SleuthkitCase sleuthkitCase = eventsModel.getSleuthkitCase(); diff --git a/Core/src/org/sleuthkit/autopsy/timeline/explorernodes/EventRootNode.java b/Core/src/org/sleuthkit/autopsy/timeline/explorernodes/EventRootNode.java index e7c14d87f9..833d6ccc41 100755 --- a/Core/src/org/sleuthkit/autopsy/timeline/explorernodes/EventRootNode.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/explorernodes/EventRootNode.java @@ -32,7 +32,7 @@ import org.openide.util.lookup.Lookups; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.datamodel.DisplayableItemNode; import org.sleuthkit.autopsy.datamodel.DisplayableItemNodeVisitor; -import org.sleuthkit.autopsy.timeline.FilteredEventsModel; +import org.sleuthkit.autopsy.timeline.EventsModel; import org.sleuthkit.datamodel.TskCoreException; /** @@ -49,7 +49,7 @@ public class EventRootNode extends DisplayableItemNode { */ public static final int MAX_EVENTS_TO_DISPLAY = 5000; - public EventRootNode(Collection eventIds, FilteredEventsModel filteredEvents) { + public EventRootNode(Collection eventIds, EventsModel filteredEvents) { super(Children.create(new EventNodeChildFactory(eventIds, filteredEvents), true), Lookups.singleton(eventIds)); } @@ -83,10 +83,10 @@ public class EventRootNode extends DisplayableItemNode { /** * filteredEvents is used to lookup the events from their IDs */ - private final FilteredEventsModel filteredEvents; + private final EventsModel filteredEvents; private final Map nodesMap = new HashMap<>(); - EventNodeChildFactory(Collection eventIds, FilteredEventsModel filteredEvents) { + EventNodeChildFactory(Collection eventIds, EventsModel filteredEvents) { this.eventIDs = eventIds; this.filteredEvents = filteredEvents; } diff --git a/Core/src/org/sleuthkit/autopsy/timeline/snapshot/SnapShotReportWriter.java b/Core/src/org/sleuthkit/autopsy/timeline/snapshot/SnapShotReportWriter.java index c7734dbeea..994b23d845 100755 --- a/Core/src/org/sleuthkit/autopsy/timeline/snapshot/SnapShotReportWriter.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/snapshot/SnapShotReportWriter.java @@ -27,14 +27,14 @@ import javax.imageio.ImageIO; import org.joda.time.format.DateTimeFormat; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.report.uisnapshot.UiSnapShotReportWriter; -import org.sleuthkit.autopsy.timeline.zooming.ZoomState; +import org.sleuthkit.autopsy.timeline.zooming.EventsModelParams; /** * Generate and write the Timeline snapshot report to disk. */ public class SnapShotReportWriter extends UiSnapShotReportWriter{ - private final ZoomState zoomState; + private final EventsModelParams zoomState; private final BufferedImage image; /** @@ -49,7 +49,7 @@ public class SnapShotReportWriter extends UiSnapShotReportWriter{ * @param generationDate The generation Date of the report. * @param snapshot A snapshot of the view to include in the report. */ - public SnapShotReportWriter(Case currentCase, Path reportFolderPath, String reportName, ZoomState zoomState, Date generationDate, BufferedImage snapshot) { + public SnapShotReportWriter(Case currentCase, Path reportFolderPath, String reportName, EventsModelParams zoomState, Date generationDate, BufferedImage snapshot) { super(currentCase, reportFolderPath, reportName, generationDate); this.zoomState = zoomState; this.image = snapshot; diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/AbstractTimeLineView.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/AbstractTimeLineView.java index d9a51f2023..7a4fc4db85 100755 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/AbstractTimeLineView.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/AbstractTimeLineView.java @@ -37,7 +37,7 @@ import org.openide.util.NbBundle; import org.sleuthkit.autopsy.coreutils.LoggedTask; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.ThreadConfined; -import org.sleuthkit.autopsy.timeline.FilteredEventsModel; +import org.sleuthkit.autopsy.timeline.EventsModel; import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.ViewMode; import org.sleuthkit.autopsy.timeline.events.RefreshRequestedEvent; @@ -75,7 +75,7 @@ public abstract class AbstractTimeLineView extends BorderPane { private Task updateTask; private final TimeLineController controller; - private final FilteredEventsModel filteredEvents; + private final EventsModel filteredEvents; /** * Constructor @@ -86,7 +86,7 @@ public abstract class AbstractTimeLineView extends BorderPane { this.controller = controller; this.filteredEvents = controller.getEventsModel(); this.filteredEvents.registerForEvents(this); - this.filteredEvents.zoomStateProperty().addListener(updateListener); + this.filteredEvents.modelParamsProperty().addListener(updateListener); TimeLineController.timeZoneProperty().addListener(updateListener); } @@ -170,7 +170,7 @@ public abstract class AbstractTimeLineView extends BorderPane { * * @return The FilteredEventsModel for this view. */ - protected FilteredEventsModel getEventsModel() { + protected EventsModel getEventsModel() { return filteredEvents; } @@ -224,7 +224,7 @@ public abstract class AbstractTimeLineView extends BorderPane { updateTask = null; } //remvoe and gc updateListener - this.filteredEvents.zoomStateProperty().removeListener(updateListener); + this.filteredEvents.modelParamsProperty().removeListener(updateListener); TimeLineController.timeZoneProperty().removeListener(updateListener); updateListener = null; filteredEvents.unRegisterForEvents(this); diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/ViewFrame.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/ViewFrame.java index 9e780277f3..39506554ac 100755 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/ViewFrame.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/ViewFrame.java @@ -77,7 +77,7 @@ import org.sleuthkit.autopsy.coreutils.LoggedTask; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.ThreadConfined; import org.sleuthkit.autopsy.timeline.FXMLConstructor; -import org.sleuthkit.autopsy.timeline.FilteredEventsModel; +import org.sleuthkit.autopsy.timeline.EventsModel; import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.ViewMode; import org.sleuthkit.autopsy.timeline.actions.AddManualEvent; @@ -236,7 +236,7 @@ final public class ViewFrame extends BorderPane { private final NotificationPane notificationPane = new NotificationPane(); private final TimeLineController controller; - private final FilteredEventsModel filteredEvents; + private final EventsModel filteredEvents; /** * Listen to changes in the range slider selection and forward to the @@ -426,7 +426,7 @@ final public class ViewFrame extends BorderPane { //listen for changes in the time range / zoom params TimeLineController.timeZoneProperty().addListener(timeZoneProp -> refreshTimeUI()); filteredEvents.timeRangeProperty().addListener(timeRangeProp -> refreshTimeUI()); - filteredEvents.zoomStateProperty().addListener(zoomListener); + filteredEvents.modelParamsProperty().addListener(zoomListener); refreshTimeUI(); //populate the view refreshHistorgram(); @@ -474,7 +474,7 @@ final public class ViewFrame extends BorderPane { @Subscribe @NbBundle.Messages({ "ViewFrame.notification.cacheInvalidated=The event data has been updated, the visualization may be out of date."}) - public void handleCacheInvalidated(FilteredEventsModel.CacheInvalidatedEvent event) { + public void handleCacheInvalidated(EventsModel.CacheInvalidatedEvent event) { Platform.runLater(() -> { if (hostedView.needsRefresh() == false) { hostedView.setNeedsRefresh(); diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/countsview/CountsViewPane.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/countsview/CountsViewPane.java index 70197ff3e3..53e79210b2 100755 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/countsview/CountsViewPane.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/countsview/CountsViewPane.java @@ -56,7 +56,7 @@ import org.openide.util.NbBundle; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.ThreadConfined; import org.sleuthkit.autopsy.timeline.FXMLConstructor; -import org.sleuthkit.autopsy.timeline.FilteredEventsModel; +import org.sleuthkit.autopsy.timeline.EventsModel; import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.ViewMode; import org.sleuthkit.autopsy.timeline.ui.AbstractTimelineChart; @@ -374,7 +374,7 @@ public class CountsViewPane extends AbstractTimelineChart implements private ContextMenu chartContextMenu; private final TimeLineController controller; - private final FilteredEventsModel filteredEvents; + private final EventsModel filteredEvents; private IntervalSelector intervalSelector; diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/DetailViewPane.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/DetailViewPane.java index 8736c46103..004dd21a03 100755 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/DetailViewPane.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/DetailViewPane.java @@ -53,7 +53,7 @@ import org.openide.util.NbBundle; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.ThreadConfined; import org.sleuthkit.autopsy.timeline.FXMLConstructor; -import org.sleuthkit.autopsy.timeline.FilteredEventsModel; +import org.sleuthkit.autopsy.timeline.EventsModel; import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.ViewMode; import org.sleuthkit.autopsy.timeline.ui.AbstractTimelineChart; @@ -61,7 +61,7 @@ import org.sleuthkit.autopsy.timeline.ui.detailview.datamodel.DetailViewEvent; import org.sleuthkit.autopsy.timeline.ui.detailview.datamodel.DetailsViewModel; import org.sleuthkit.autopsy.timeline.ui.detailview.datamodel.EventStripe; import org.sleuthkit.autopsy.timeline.utils.MappedList; -import org.sleuthkit.autopsy.timeline.zooming.ZoomState; +import org.sleuthkit.autopsy.timeline.zooming.EventsModelParams; import org.sleuthkit.datamodel.TimelineLevelOfDetail; import org.sleuthkit.datamodel.TskCoreException; @@ -99,7 +99,7 @@ final public class DetailViewPane extends AbstractTimelineChart { this.pinnedDateAxis = pinnedDateAxis; this.selectedNodes = selectedNodes; - FilteredEventsModel eventsModel = getController().getEventsModel(); + EventsModel eventsModel = getController().getEventsModel(); /* * If the time range is changed, clear the guide line and the interval @@ -147,7 +147,7 @@ final class DetailsChart extends Control implements TimeLineChart { eventsModel.timeRangeProperty().addListener(observable -> clearTimeBasedUIElements()); //if the view paramaters change, clear the selection - eventsModel.zoomStateProperty().addListener(observable -> getSelectedNodes().clear()); + eventsModel.modelParamsProperty().addListener(observable -> getSelectedNodes().clear()); } /** diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/EventClusterNode.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/EventClusterNode.java index 402cefe2b6..581b79d7d6 100755 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/EventClusterNode.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/EventClusterNode.java @@ -58,7 +58,7 @@ import org.sleuthkit.autopsy.timeline.ui.detailview.datamodel.SingleDetailsViewE import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.SqlFilterState; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.DescriptionFilter; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState; -import org.sleuthkit.autopsy.timeline.zooming.ZoomState; +import org.sleuthkit.autopsy.timeline.zooming.EventsModelParams; import org.sleuthkit.datamodel.TimelineLevelOfDetail; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TimelineEventType; @@ -177,12 +177,12 @@ final class EventClusterNode extends MultiEventNodeBase( new EventTypeFilter(getEventType()), true)); final Interval subClusterSpan = new Interval(getStartMillis(), getEndMillis() + 1000); final TimelineEventType.HierarchyLevel eventTypeZoomLevel = eventsModel.getEventTypeZoom(); - final ZoomState zoom = new ZoomState(subClusterSpan, eventTypeZoomLevel, subClusterFilter, getDescriptionLevel()); + final EventsModelParams zoom = new EventsModelParams(subClusterSpan, eventTypeZoomLevel, subClusterFilter, getDescriptionLevel()); DescriptionFilter descriptionFilter = new DescriptionFilter(getEvent().getDescriptionLevel(), getDescription()); /* diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/EventNodeBase.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/EventNodeBase.java index 8caad5f6fb..7c37c274a6 100755 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/EventNodeBase.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/EventNodeBase.java @@ -67,7 +67,7 @@ import org.joda.time.DateTime; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.ThreadConfined; -import org.sleuthkit.autopsy.timeline.FilteredEventsModel; +import org.sleuthkit.autopsy.timeline.EventsModel; import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.events.TagsAddedEvent; import org.sleuthkit.autopsy.timeline.events.TagsDeletedEvent; @@ -126,7 +126,7 @@ public abstract class EventNodeBase extends StackP final HBox controlsHBox = new HBox(5); final HBox infoHBox = new HBox(5, eventTypeImageView, hashIV, tagIV, descrLabel, countLabel, controlsHBox); final SleuthkitCase sleuthkitCase; - final FilteredEventsModel eventsModel; + final EventsModel eventsModel; private Timeline timeline; private Button pinButton; private final Border SELECTION_BORDER; diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/datamodel/DetailsViewModel.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/datamodel/DetailsViewModel.java index ff0b77d49c..4045c05ffd 100755 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/datamodel/DetailsViewModel.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/datamodel/DetailsViewModel.java @@ -45,13 +45,13 @@ import org.joda.time.DateTimeZone; import org.joda.time.Interval; import org.joda.time.Period; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.timeline.FilteredEventsModel; +import org.sleuthkit.autopsy.timeline.EventsModel; import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.UIFilter; import org.sleuthkit.autopsy.timeline.utils.CacheLoaderImpl; import org.sleuthkit.autopsy.timeline.utils.RangeDivision; import org.sleuthkit.autopsy.timeline.zooming.TimeUnits; -import org.sleuthkit.autopsy.timeline.zooming.ZoomState; +import org.sleuthkit.autopsy.timeline.zooming.EventsModelParams; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TimelineManager; import org.sleuthkit.datamodel.TskCoreException; @@ -68,12 +68,12 @@ final public class DetailsViewModel { private final static Logger logger = Logger.getLogger(DetailsViewModel.class.getName()); - private final FilteredEventsModel eventsModel; - private final LoadingCache> eventCache; + private final EventsModel eventsModel; + private final LoadingCache> eventCache; private final TimelineManager eventManager; private final SleuthkitCase sleuthkitCase; - public DetailsViewModel(FilteredEventsModel eventsModel) { + public DetailsViewModel(EventsModel eventsModel) { this.eventsModel = eventsModel; this.eventManager = eventsModel.getEventManager(); this.sleuthkitCase = eventsModel.getSleuthkitCase(); @@ -86,7 +86,7 @@ final public class DetailsViewModel { } @Subscribe - void handleCacheInvalidation(FilteredEventsModel.CacheInvalidatedEvent event) { + void handleCacheInvalidation(EventsModel.CacheInvalidatedEvent event) { eventCache.invalidateAll(); } @@ -99,7 +99,7 @@ final public class DetailsViewModel { * * @throws org.sleuthkit.datamodel.TskCoreException */ - public List getEventStripes(ZoomState zoom) throws TskCoreException { + public List getEventStripes(EventsModelParams zoom) throws TskCoreException { return getEventStripes(UIFilter.getAllPassFilter(), zoom); } @@ -113,11 +113,11 @@ final public class DetailsViewModel { * * @throws org.sleuthkit.datamodel.TskCoreException */ - public List getEventStripes(UIFilter uiFilter, ZoomState zoom) throws TskCoreException { + public List getEventStripes(UIFilter uiFilter, EventsModelParams zoom) throws TskCoreException { DateTimeZone timeZone = TimeLineController.getJodaTimeZone(); //unpack params Interval timeRange = zoom.getTimeRange(); - TimelineLevelOfDetail descriptionLOD = zoom.getDescriptionLOD(); + TimelineLevelOfDetail descriptionLOD = zoom.getTimelineLOD(); //intermediate results Map> eventClusters = new HashMap<>(); @@ -156,10 +156,10 @@ final public class DetailsViewModel { * @throws org.sleuthkit.datamodel.TskCoreException If there is an error * querying the db. */ - private List getEvents(ZoomState zoom, DateTimeZone timeZone) throws TskCoreException { + private List getEvents(EventsModelParams zoom, DateTimeZone timeZone) throws TskCoreException { //unpack params Interval timeRange = zoom.getTimeRange(); - TimelineFilter.RootFilter activeFilter = zoom.getFilterState().getActiveFilter(); + TimelineFilter.RootFilter activeFilter = zoom.getEventFilterState().getActiveFilter(); return eventManager.getEvents(timeRange, activeFilter); } diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/FilterSetPanel.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/FilterSetPanel.java index 5d28e24c82..5b9f417b82 100755 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/FilterSetPanel.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/FilterSetPanel.java @@ -40,7 +40,7 @@ import org.controlsfx.control.action.Action; import org.controlsfx.control.action.ActionUtils; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.timeline.FXMLConstructor; -import org.sleuthkit.autopsy.timeline.FilteredEventsModel; +import org.sleuthkit.autopsy.timeline.EventsModel; import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.actions.ResetFilters; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.DescriptionFilterState; @@ -79,7 +79,7 @@ final public class FilterSetPanel extends BorderPane { @FXML private SplitPane splitPane; - private final FilteredEventsModel filteredEvents; + private final EventsModel filteredEvents; private final TimeLineController controller; /** @@ -115,16 +115,16 @@ final public class FilterSetPanel extends BorderPane { legendColumn.setCellFactory(col -> new LegendCell(this.controller)); //type is the only filter expanded initialy - expansionMap.put(filteredEvents.getFilterState().getFilter(), true); - expansionMap.put(filteredEvents.getFilterState().getEventTypeFilterState().getFilter(), true); + expansionMap.put(filteredEvents.getEventFilterState().getFilter(), true); + expansionMap.put(filteredEvents.getEventFilterState().getEventTypeFilterState().getFilter(), true); InvalidationListener applyFiltersListener = observable -> applyFilters(); - filteredEvents.eventTypeZoomProperty().addListener(applyFiltersListener); + filteredEvents.eventTypesHierarchyLevelProperty().addListener(applyFiltersListener); filteredEvents.descriptionLODProperty().addListener(applyFiltersListener); filteredEvents.timeRangeProperty().addListener(applyFiltersListener); - filteredEvents.filterProperty().addListener(observable -> refreshFilterUI()); + filteredEvents.eventFilterProperty().addListener(observable -> refreshFilterUI()); refreshFilterUI(); hiddenDescriptionsListView.setItems(controller.getQuickHideFilters()); @@ -164,7 +164,7 @@ final public class FilterSetPanel extends BorderPane { private void refreshFilterUI() { Platform.runLater(() - -> filterTreeTable.setRoot(new FilterTreeItem(filteredEvents.filterProperty().get().copyOf(), expansionMap))); + -> filterTreeTable.setRoot(new FilterTreeItem(filteredEvents.eventFilterProperty().get().copyOf(), expansionMap))); } private void applyFilters() { diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/LegendCell.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/LegendCell.java index 53ffec84a6..817effdacc 100755 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/LegendCell.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/LegendCell.java @@ -28,7 +28,7 @@ import javafx.scene.layout.HBox; import javafx.scene.paint.Color; import javafx.scene.shape.Rectangle; import org.openide.util.NbBundle; -import org.sleuthkit.autopsy.timeline.FilteredEventsModel; +import org.sleuthkit.autopsy.timeline.EventsModel; import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.ui.EventTypeUtils; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState; @@ -46,7 +46,7 @@ final class LegendCell extends TreeTableCell, FilterState> { private final TimeLineController controller; - private final FilteredEventsModel filteredEvents; + private final EventsModel filteredEvents; //We need a controller so we can listen to changes in EventTypeZoom to show/hide legends LegendCell(TimeLineController controller) { @@ -75,7 +75,7 @@ final class LegendCell extends TreeTableCell, FilterState> { rect.setArcWidth(5); rect.setStrokeWidth(3); setLegendColor(filter, rect, this.filteredEvents.getEventTypeZoom()); - this.filteredEvents.eventTypeZoomProperty().addListener((obs, oldZoomLevel, newZoomLevel) -> { + this.filteredEvents.eventTypesHierarchyLevelProperty().addListener((obs, oldZoomLevel, newZoomLevel) -> { setLegendColor(filter, rect, newZoomLevel); }); diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/listvew/ListViewPane.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/listvew/ListViewPane.java index b94e7956bd..6cb7d55160 100755 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/listvew/ListViewPane.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/listvew/ListViewPane.java @@ -28,7 +28,7 @@ import javafx.concurrent.Task; import javafx.scene.Node; import org.joda.time.Interval; import org.openide.util.NbBundle; -import org.sleuthkit.autopsy.timeline.FilteredEventsModel; +import org.sleuthkit.autopsy.timeline.EventsModel; import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.ViewMode; import org.sleuthkit.autopsy.timeline.events.ViewInTimelineRequestedEvent; @@ -116,7 +116,7 @@ public class ListViewPane extends AbstractTimeLineView { return null; } - FilteredEventsModel eventsModel = getEventsModel(); + EventsModel eventsModel = getEventsModel(); Set selectedEventIDs; TimeLineController controller = getController(); diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/listvew/datamodel/ListViewModel.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/listvew/datamodel/ListViewModel.java index 957b17265c..d51e454247 100755 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/listvew/datamodel/ListViewModel.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/listvew/datamodel/ListViewModel.java @@ -29,7 +29,7 @@ import java.util.Map.Entry; import java.util.Objects; import static java.util.stream.Collectors.groupingBy; import org.joda.time.Interval; -import org.sleuthkit.autopsy.timeline.FilteredEventsModel; +import org.sleuthkit.autopsy.timeline.EventsModel; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState; import org.sleuthkit.datamodel.TimelineManager; import org.sleuthkit.datamodel.TskCoreException; @@ -44,10 +44,10 @@ import org.sleuthkit.datamodel.TimelineLevelOfDetail; */ public class ListViewModel { - private final FilteredEventsModel eventsModel; + private final EventsModel eventsModel; private final TimelineManager eventManager; - public ListViewModel(FilteredEventsModel eventsModel) { + public ListViewModel(EventsModel eventsModel) { this.eventsModel = eventsModel; this.eventManager = eventsModel.getEventManager(); } @@ -63,7 +63,7 @@ public class ListViewModel { * @throws org.sleuthkit.datamodel.TskCoreException */ public List getCombinedEvents() throws TskCoreException { - return getCombinedEvents(eventsModel.getTimeRange(), eventsModel.getFilterState()); + return getCombinedEvents(eventsModel.getTimeRange(), eventsModel.getEventFilterState()); } /** diff --git a/Core/src/org/sleuthkit/autopsy/timeline/zooming/EventsModelParams.java b/Core/src/org/sleuthkit/autopsy/timeline/zooming/EventsModelParams.java new file mode 100755 index 0000000000..b3010fe64c --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/timeline/zooming/EventsModelParams.java @@ -0,0 +1,135 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2013-2019 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.timeline.zooming; + +import java.util.Objects; +import org.joda.time.Interval; +import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState; +import org.sleuthkit.datamodel.TimelineEventType; +import org.sleuthkit.datamodel.TimelineLevelOfDetail; + +/** + * A container that bundles the user-specified parameters for the events model + * so that they can be passed around and saved as mementos to support a + * navigable (forwards-backwards) history feature for the events model. + */ +final public class EventsModelParams { + + private final Interval timeRange; + private final TimelineEventType.HierarchyLevel eventTypesHierarchyLevel; + private final RootFilterState eventFilterState; + private final TimelineLevelOfDetail timelineLOD; + + public EventsModelParams(Interval timeRange, TimelineEventType.HierarchyLevel eventTypesHierarchyLevel, RootFilterState eventFilterState, TimelineLevelOfDetail timelineLOD) { + this.timeRange = timeRange; + this.eventTypesHierarchyLevel = eventTypesHierarchyLevel; + this.eventFilterState = eventFilterState; + this.timelineLOD = timelineLOD; + } + + public Interval getTimeRange() { + return timeRange; + } + + public TimelineEventType.HierarchyLevel getEventTypesHierarchyLevel() { + return eventTypesHierarchyLevel; + } + + public RootFilterState getEventFilterState() { + return eventFilterState; + } + + public TimelineLevelOfDetail getTimelineLOD() { + return timelineLOD; + } + + public EventsModelParams withTimeAndType(Interval timeRange, TimelineEventType.HierarchyLevel zoomLevel) { + return new EventsModelParams(timeRange, zoomLevel, eventFilterState, timelineLOD); + } + + public EventsModelParams withTypeZoomLevel(TimelineEventType.HierarchyLevel zoomLevel) { + return new EventsModelParams(timeRange, zoomLevel, eventFilterState, timelineLOD); + } + + public EventsModelParams withTimeRange(Interval timeRange) { + return new EventsModelParams(timeRange, eventTypesHierarchyLevel, eventFilterState, timelineLOD); + } + + public EventsModelParams withDescrLOD(TimelineLevelOfDetail descrLOD) { + return new EventsModelParams(timeRange, eventTypesHierarchyLevel, eventFilterState, descrLOD); + } + + public EventsModelParams withFilterState(RootFilterState filter) { + return new EventsModelParams(timeRange, eventTypesHierarchyLevel, filter, timelineLOD); + } + + public boolean hasFilterState(RootFilterState filterSet) { + return this.eventFilterState.equals(filterSet); + } + + public boolean hasTypeZoomLevel(TimelineEventType.HierarchyLevel typeZoom) { + return this.eventTypesHierarchyLevel.equals(typeZoom); + } + + public boolean hasTimeRange(Interval timeRange) { + return this.timeRange != null && this.timeRange.equals(timeRange); + } + + public boolean hasDescrLOD(TimelineLevelOfDetail newLOD) { + return this.timelineLOD.equals(newLOD); + } + + @Override + public int hashCode() { + int hash = 3; + hash = 97 * hash + Objects.hashCode(this.timeRange.getStartMillis()); + hash = 97 * hash + Objects.hashCode(this.timeRange.getEndMillis()); + hash = 97 * hash + Objects.hashCode(this.eventTypesHierarchyLevel); + hash = 97 * hash + Objects.hashCode(this.eventFilterState); + hash = 97 * hash + Objects.hashCode(this.timelineLOD); + + return hash; + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final EventsModelParams other = (EventsModelParams) obj; + if (!Objects.equals(this.timeRange, other.getTimeRange())) { + return false; + } + if (this.eventTypesHierarchyLevel != other.getEventTypesHierarchyLevel()) { + return false; + } + if (this.eventFilterState.equals(other.getEventFilterState()) == false) { + return false; + } + return this.timelineLOD == other.getTimelineLOD(); + } + + @Override + public String toString() { + return "ZoomState{" + "timeRange=" + timeRange + ", typeZoomLevel=" + eventTypesHierarchyLevel + ", filter=" + eventFilterState.getActiveFilter().toString() + ", descrLOD=" + timelineLOD + '}'; //NON-NLS + } +} diff --git a/Core/src/org/sleuthkit/autopsy/timeline/zooming/ZoomSettingsPane.java b/Core/src/org/sleuthkit/autopsy/timeline/zooming/ZoomSettingsPane.java index 0d832e0108..0f576ee499 100755 --- a/Core/src/org/sleuthkit/autopsy/timeline/zooming/ZoomSettingsPane.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/zooming/ZoomSettingsPane.java @@ -34,7 +34,7 @@ import org.controlsfx.control.Notifications; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.timeline.FXMLConstructor; -import org.sleuthkit.autopsy.timeline.FilteredEventsModel; +import org.sleuthkit.autopsy.timeline.EventsModel; import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.ViewMode; import org.sleuthkit.autopsy.timeline.utils.RangeDivision; @@ -72,7 +72,7 @@ public class ZoomSettingsPane extends TitledPane { private Slider timeUnitSlider; private final TimeLineController controller; - private final FilteredEventsModel filteredEvents; + private final EventsModel filteredEvents; /** * Constructor @@ -97,7 +97,7 @@ public class ZoomSettingsPane extends TitledPane { typeZoomSlider.setMax(TimelineEventType.HierarchyLevel.values().length - 1); configureSliderListeners(typeZoomSlider, controller::pushEventTypeZoom, - filteredEvents.eventTypeZoomProperty(), + filteredEvents.eventTypesHierarchyLevelProperty(), TimelineEventType.HierarchyLevel.class, TimelineEventType.HierarchyLevel::ordinal, Function.identity()); diff --git a/Core/src/org/sleuthkit/autopsy/timeline/zooming/ZoomState.java b/Core/src/org/sleuthkit/autopsy/timeline/zooming/ZoomState.java deleted file mode 100755 index 2588a32cf7..0000000000 --- a/Core/src/org/sleuthkit/autopsy/timeline/zooming/ZoomState.java +++ /dev/null @@ -1,138 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2013-18 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.timeline.zooming; - -import java.util.Objects; -import org.joda.time.Interval; -import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState; -import org.sleuthkit.datamodel.TimelineEvent; -import org.sleuthkit.datamodel.TimelineEventType; -import org.sleuthkit.datamodel.TimelineLevelOfDetail; - -/** - * This class encapsulates all the zoom(and filter) parameters into one object - * for passing around and as a memento of the zoom/filter state. - */ -final public class ZoomState { - - private final Interval timeRange; - - private final TimelineEventType.HierarchyLevel typeZoomLevel; - - private final RootFilterState filter; - - private final TimelineLevelOfDetail descrLOD; - - public Interval getTimeRange() { - return timeRange; - } - - public TimelineEventType.HierarchyLevel getTypeZoomLevel() { - return typeZoomLevel; - } - - public RootFilterState getFilterState() { - return filter; - } - - public TimelineLevelOfDetail getDescriptionLOD() { - return descrLOD; - } - - public ZoomState(Interval timeRange, TimelineEventType.HierarchyLevel zoomLevel, RootFilterState filter, TimelineLevelOfDetail descrLOD) { - this.timeRange = timeRange; - this.typeZoomLevel = zoomLevel; - this.filter = filter; - this.descrLOD = descrLOD; - } - - public ZoomState withTimeAndType(Interval timeRange, TimelineEventType.HierarchyLevel zoomLevel) { - return new ZoomState(timeRange, zoomLevel, filter, descrLOD); - } - - public ZoomState withTypeZoomLevel(TimelineEventType.HierarchyLevel zoomLevel) { - return new ZoomState(timeRange, zoomLevel, filter, descrLOD); - } - - public ZoomState withTimeRange(Interval timeRange) { - return new ZoomState(timeRange, typeZoomLevel, filter, descrLOD); - } - - public ZoomState withDescrLOD(TimelineLevelOfDetail descrLOD) { - return new ZoomState(timeRange, typeZoomLevel, filter, descrLOD); - } - - public ZoomState withFilterState(RootFilterState filter) { - return new ZoomState(timeRange, typeZoomLevel, filter, descrLOD); - } - - public boolean hasFilterState(RootFilterState filterSet) { - return this.filter.equals(filterSet); - } - - public boolean hasTypeZoomLevel(TimelineEventType.HierarchyLevel typeZoom) { - return this.typeZoomLevel.equals(typeZoom); - } - - public boolean hasTimeRange(Interval timeRange) { - return this.timeRange != null && this.timeRange.equals(timeRange); - } - - public boolean hasDescrLOD(TimelineLevelOfDetail newLOD) { - return this.descrLOD.equals(newLOD); - } - - @Override - public int hashCode() { - int hash = 3; - hash = 97 * hash + Objects.hashCode(this.timeRange.getStartMillis()); - hash = 97 * hash + Objects.hashCode(this.timeRange.getEndMillis()); - hash = 97 * hash + Objects.hashCode(this.typeZoomLevel); - hash = 97 * hash + Objects.hashCode(this.filter); - hash = 97 * hash + Objects.hashCode(this.descrLOD); - - return hash; - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - final ZoomState other = (ZoomState) obj; - if (!Objects.equals(this.timeRange, other.timeRange)) { - return false; - } - if (this.typeZoomLevel != other.typeZoomLevel) { - return false; - } - if (this.filter.equals(other.filter) == false) { - return false; - } - return this.descrLOD == other.descrLOD; - } - - @Override - public String toString() { - return "ZoomState{" + "timeRange=" + timeRange + ", typeZoomLevel=" + typeZoomLevel + ", filter=" + filter.getActiveFilter().toString() + ", descrLOD=" + descrLOD + '}'; //NON-NLS - } -}