Merge branch 'develop' of github.com:sleuthkit/autopsy into solr8_libraries

This commit is contained in:
Eugene Livis 2019-11-27 17:01:31 -05:00
commit b44e556242
27 changed files with 1052 additions and 887 deletions

View File

@ -0,0 +1,819 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2019 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.timeline;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.ImmutableList;
import com.google.common.eventbus.EventBus;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import javafx.beans.InvalidationListener;
import javafx.beans.property.ReadOnlyObjectProperty;
import javafx.beans.property.ReadOnlyObjectWrapper;
import javafx.collections.FXCollections;
import javafx.collections.ObservableMap;
import static org.apache.commons.collections4.CollectionUtils.emptyIfNull;
import static org.apache.commons.collections4.CollectionUtils.isNotEmpty;
import org.joda.time.DateTimeZone;
import org.joda.time.Interval;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagAddedEvent;
import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagDeletedEvent;
import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagDeletedEvent.DeletedBlackboardArtifactTagInfo;
import org.sleuthkit.autopsy.casemodule.events.ContentTagAddedEvent;
import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent;
import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent.DeletedContentTagInfo;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.timeline.events.RefreshRequestedEvent;
import org.sleuthkit.autopsy.timeline.events.TagsAddedEvent;
import org.sleuthkit.autopsy.timeline.events.TagsDeletedEvent;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState;
import org.sleuthkit.autopsy.timeline.utils.CacheLoaderImpl;
import org.sleuthkit.autopsy.timeline.utils.FilterUtils;
import org.sleuthkit.autopsy.timeline.zooming.EventsModelParams;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifactTag;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.ContentTag;
import org.sleuthkit.datamodel.DataSource;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TimelineManager;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TimelineEvent;
import org.sleuthkit.datamodel.TimelineEventType;
import org.sleuthkit.datamodel.TimelineFilter;
import org.sleuthkit.datamodel.TimelineFilter.DataSourceFilter;
import org.sleuthkit.datamodel.TimelineFilter.DataSourcesFilter;
import org.sleuthkit.datamodel.TimelineFilter.EventTypeFilter;
import org.sleuthkit.datamodel.TimelineFilter.FileTypesFilter;
import org.sleuthkit.datamodel.TimelineFilter.HashHitsFilter;
import org.sleuthkit.datamodel.TimelineFilter.HideKnownFilter;
import org.sleuthkit.datamodel.TimelineFilter.RootFilter;
import org.sleuthkit.datamodel.TimelineFilter.TagsFilter;
import org.sleuthkit.datamodel.TimelineFilter.TextFilter;
import org.sleuthkit.datamodel.TimelineLevelOfDetail;
/**
* In the timeline implementation of the MVC pattern, this class acts as the
* model. The views are the event counts view, the event details view and the
* events list view.
*
* Concurrency Policy: TimelineManager is internally synchronized, so methods
* that only access the TimelineManager atomically do not need further
* synchronization. All other member state variables should only be accessed
* with intrinsic lock of the containing FilteredEventsModel held.
*
*/
public final class EventsModel {
private static final Logger logger = Logger.getLogger(EventsModel.class.getName());
private final EventBus eventbus = new EventBus("EventsModel_EventBus"); //NON-NLS
private final Case currentCase;
private final TimelineManager caseDbEventManager;
/*
* User-specified parameters for the model exposed as JFX properties. These
* parameters apply across all of the views of the model and are set using
* GUI elements such the event filters panel.
*
* IMPORTANT: Note that the parameters are exposed both as a set and
* individually.
*/
private final ReadOnlyObjectWrapper<EventsModelParams> modelParamsProperty = new ReadOnlyObjectWrapper<>();
private final ReadOnlyObjectWrapper<RootFilterState> filterStateProperty = new ReadOnlyObjectWrapper<>();
private final ReadOnlyObjectWrapper<Interval> timeRangeProperty = new ReadOnlyObjectWrapper<>();
private final ReadOnlyObjectWrapper<TimelineEventType.HierarchyLevel> eventTypesHierarchyLevelProperty = new ReadOnlyObjectWrapper<>(TimelineEventType.HierarchyLevel.CATEGORY);
private final ReadOnlyObjectWrapper<TimelineLevelOfDetail> timelineLODProperty = new ReadOnlyObjectWrapper<>(TimelineLevelOfDetail.LOW);
/*
* Caches of model data from the case database.
*/
private final ObservableMap<Long, String> datasourceIDsToNamesMap = FXCollections.observableHashMap();
private final LoadingCache<Object, Long> maxEventTimeCache;
private final LoadingCache<Object, Long> minEventTimeCache;
private final LoadingCache<Long, TimelineEvent> idsToEventsCache;
private final LoadingCache<EventsModelParams, Map<TimelineEventType, Long>> eventCountsCache;
/**
* Makes a new data source filter from a given entry in the cache of data
* source object IDs to data source names.
*
* @param dataSourceEntry The cache entry.
*
* @return A new DataSourceFilter.
*/
private static DataSourceFilter newDataSourceFilter(Map.Entry<Long, String> dataSourceEntry) {
return new DataSourceFilter(dataSourceEntry.getValue(), dataSourceEntry.getKey());
}
/**
* Constructs the model in the timeline implementation of the MVC pattern.
*
* @param currentCase The current case.
* @param modelParams The initial state of the model parameters.
*
* @throws TskCoreException If there is an error reading model data from the
* case database.
*/
public EventsModel(Case currentCase, ReadOnlyObjectProperty<EventsModelParams> modelParams) throws TskCoreException {
this.currentCase = currentCase;
this.caseDbEventManager = currentCase.getSleuthkitCase().getTimelineManager();
/*
* Set up the caches of model data from the case database. Note that the
* build() method calls specify the methods used to create default cache
* entries when a call to get() would otherwise return a cache miss.
*/
populateDataSourcesCache();
idsToEventsCache = CacheBuilder.newBuilder()
.maximumSize(5000L)
.expireAfterAccess(10, TimeUnit.MINUTES)
.build(new CacheLoaderImpl<>(caseDbEventManager::getEventById));
eventCountsCache = CacheBuilder.newBuilder()
.maximumSize(1000L)
.expireAfterAccess(10, TimeUnit.MINUTES)
.build(new CacheLoaderImpl<>(this::countEventsByType));
maxEventTimeCache = CacheBuilder.newBuilder()
.build(new CacheLoaderImpl<>(ignored -> caseDbEventManager.getMaxEventTime()));
minEventTimeCache = CacheBuilder.newBuilder()
.build(new CacheLoaderImpl<>(ignored -> caseDbEventManager.getMinEventTime()));
/*
* Add a listener to the data sources cache that adds a data source
* filter to the event filter state model parameter when a data source
* is added to the cache.
*/
InvalidationListener dataSourcesMapListener = observable -> {
RootFilterState rootFilter = filterStateProperty.getReadOnlyProperty().get();
addDataSourceFilters(rootFilter);
filterStateProperty.set(rootFilter.copyOf());
};
datasourceIDsToNamesMap.addListener(dataSourcesMapListener);
/*
* Initialize the events filter state model parameter with the default
* events filter.
*
* RJCTODO: Why isn't the event filter state of the initialModelParams
* used here?
*/
filterStateProperty.set(getDefaultEventFilter());
/*
* Add a listener to the model parameters property that updates the
* properties that expose the individual model parameters when they are
* changed through the model parameters property.
*/
modelParamsProperty.addListener(observable -> {
final EventsModelParams params = modelParamsProperty.get();
if (params != null) {
synchronized (EventsModel.this) {
eventTypesHierarchyLevelProperty.set(params.getEventTypesHierarchyLevel());
filterStateProperty.set(params.getEventFilterState());
timeRangeProperty.set(params.getTimeRange());
timelineLODProperty.set(params.getTimelineLOD());
}
}
});
modelParamsProperty.bind(modelParams);
}
/**
* Populates the map of data source object IDs to data source names from the
* data source data in the case database.
*/
synchronized private void populateDataSourcesCache() throws TskCoreException {
datasourceIDsToNamesMap.clear();
SleuthkitCase skCase = currentCase.getSleuthkitCase();
for (DataSource ds : skCase.getDataSources()) {
datasourceIDsToNamesMap.putIfAbsent(ds.getId(), ds.getName());
}
}
/**
* Adds a data source filter for each data source in the data sources cache
* to a given root filter state object.
*
* RJCTODO: This method should be synchronized! RJCTODO: This seems like an
* unusual method.
*
* @param rootFilterState A root filter state object.
*/
void addDataSourceFilters(RootFilterState rootFilterState) {
DataSourcesFilter dataSourcesFilter = rootFilterState.getDataSourcesFilterState().getFilter();
datasourceIDsToNamesMap.entrySet().forEach(entry -> dataSourcesFilter.addSubFilter(newDataSourceFilter(entry)));
}
/**
* Gets the count of all events that fit the given model parameters. The
* counts are organized by event type for the given event types hierarchy
* level.
*
* RJCTODO: Where does the argument for this method come from when called by
* the cache builder?
*
* @param modelParams The model parameters.
*
* @return A mapping of event types to event counts at the given event types
* hierarchy level.
*
* @throws TskCoreException If there is an error reading model data from the
* case database.
*/
private Map<TimelineEventType, Long> countEventsByType(EventsModelParams modelParams) throws TskCoreException {
if (modelParams.getTimeRange() == null) {
return Collections.emptyMap();
} else {
return caseDbEventManager.countEventsByType(modelParams.getTimeRange().getStartMillis() / 1000,
modelParams.getTimeRange().getEndMillis() / 1000,
modelParams.getEventFilterState().getActiveFilter(),
modelParams.getEventTypesHierarchyLevel());
}
}
/**
* Gets the case database events manager.
*
* RJCTODO: Clients should probably get their own reference.
*
* @return The case database events manager.
*/
public TimelineManager getEventManager() {
return caseDbEventManager;
}
/**
* Gets the case database.
*
* RJCTODO: Clients should probably get their own reference.
*
* @return The case database.
*/
public SleuthkitCase getSleuthkitCase() {
return currentCase.getSleuthkitCase();
}
/**
* Gets the model parameters property.
*
* @return A read only, observable property for the current model
* parameters.
*/
synchronized public ReadOnlyObjectProperty<EventsModelParams> modelParamsProperty() {
return modelParamsProperty.getReadOnlyProperty();
}
/**
* Gets a read only, observable property for the time range model parameter.
*
* @return The time range model parameter property.
*/
@NbBundle.Messages({
"FilteredEventsModel.timeRangeProperty.errorTitle=Timeline",
"FilteredEventsModel.timeRangeProperty.errorMessage=Error getting spanning interval."})
synchronized public ReadOnlyObjectProperty<Interval> timeRangeProperty() {
if (timeRangeProperty.get() == null) {
try {
timeRangeProperty.set(EventsModel.this.getSpanningInterval());
} catch (TskCoreException timelineCacheException) {
MessageNotifyUtil.Notify.error(Bundle.FilteredEventsModel_timeRangeProperty_errorTitle(),
Bundle.FilteredEventsModel_timeRangeProperty_errorMessage());
logger.log(Level.SEVERE, "Error getting spanning interval.", timelineCacheException);
}
}
return timeRangeProperty.getReadOnlyProperty();
}
/**
* Gets a read only, observable property for the timeline level of detail
* model parameter.
*
* @return The timeline level of detail model parameter property.
*/
synchronized public ReadOnlyObjectProperty<TimelineLevelOfDetail> descriptionLODProperty() {
return timelineLODProperty.getReadOnlyProperty();
}
/**
* Gets a read only, observable property for the event filter model
* parameter.
*
* @return The event filter model parameter property.
*/
synchronized public ReadOnlyObjectProperty<RootFilterState> eventFilterProperty() {
return filterStateProperty.getReadOnlyProperty();
}
/**
* Gets a read only, observable property for the event types hierarchy level
* model parameter.
*
* @return The event types hierarchy level model parameter property.
*/
synchronized public ReadOnlyObjectProperty<TimelineEventType.HierarchyLevel> eventTypesHierarchyLevelProperty() {
return eventTypesHierarchyLevelProperty.getReadOnlyProperty();
}
/**
* Gets the current model parameters.
*
* RJCTODO: This breaks encapsulation. Is it really necessary?
*
* @return The current model parameters.
*/
synchronized public EventsModelParams getModelParams() {
return modelParamsProperty.get();
}
/**
* Gets the time range model parameter.
*
* RJCTODO: This breaks encapsulation. Is it really necessary?
*
* @return The time range model parameter.
*/
synchronized public Interval getTimeRange() {
return getModelParams().getTimeRange();
}
/**
* Gets the time range model parameter.
*
* RJCTODO: This breaks encapsulation. Is it really necessary?
*
* @return The time range model parameter.
*/
synchronized public TimelineLevelOfDetail getDescriptionLOD() {
return getModelParams().getTimelineLOD();
}
/**
* Gets the event filter model parameter.
*
* RJCTODO: This breaks encapsulation. Is it really necessary?
*
* @return The event filter model parameter.
*/
synchronized public RootFilterState getEventFilterState() {
return getModelParams().getEventFilterState();
}
/**
* Gets the event types hierarchy level model model parameter.
*
* RJCTODO: This breaks encapsulation. Is it really necessary?
*
* @return The event types hierarchy level model model parameter.
*/
synchronized public TimelineEventType.HierarchyLevel getEventTypeZoom() {
return getModelParams().getEventTypesHierarchyLevel();
}
/**
* Gets a new instance of the default event filter state model parameter,
* with data source filters for every data source currently in the data
* sopurces cache.
*
* @return An instance of the default filter state model parameter.
*/
public synchronized RootFilterState getDefaultEventFilter() {
DataSourcesFilter dataSourcesFilter = new DataSourcesFilter();
datasourceIDsToNamesMap.entrySet().forEach(dataSourceEntry
-> dataSourcesFilter.addSubFilter(newDataSourceFilter(dataSourceEntry)));
return new RootFilterState(new RootFilter(new HideKnownFilter(),
new TagsFilter(),
new HashHitsFilter(),
new TextFilter(),
new EventTypeFilter(TimelineEventType.ROOT_EVENT_TYPE),
dataSourcesFilter,
FilterUtils.createDefaultFileTypesFilter(),
Collections.emptySet()));
}
/**
* Gets an event given its event ID.
*
* @param eventID The event ID.
*
* @return The event.
*
* @throws TskCoreException If there is an error reading model data from the
* case database.
*/
public TimelineEvent getEventById(Long eventID) throws TskCoreException {
try {
return idsToEventsCache.get(eventID);
} catch (ExecutionException ex) {
throw new TskCoreException("Error getting cached event from ID", ex);
}
}
/**
* Gets a set of events given their event IDs.
*
* @param eventIDs The event IDs.
*
* @return THe events.
*
* @throws TskCoreException If there is an error reading model data from the
* case database.
*/
public Set<TimelineEvent> getEventsById(Collection<Long> eventIDs) throws TskCoreException {
Set<TimelineEvent> events = new HashSet<>();
for (Long id : eventIDs) {
events.add(getEventById(id));
}
return events;
}
/**
* Gets a list of event IDs for a given time range and a given events
* filter.
*
* @param timeRange The time range.
* @param filterState A filter state object for the events filter.
*
* @return The events.
*
* @throws TskCoreException If there is an error reading model data from the
* case database.
*/
public List<Long> getEventIDs(Interval timeRange, FilterState<? extends TimelineFilter> filterState) throws TskCoreException {
final Interval overlap;
RootFilter intersection;
synchronized (this) {
overlap = EventsModel.this.getSpanningInterval().overlap(timeRange);
intersection = getEventFilterState().intersect(filterState).getActiveFilter();
}
return caseDbEventManager.getEventIDs(overlap, intersection);
}
/**
* Gets a set of event IDs associated with a given file.
*
* @param file The file.
* @param includeDerivedArtifacts If true, also gets the event IDs of events
* associated with artifacts for which the
* file is the source file.
*
* @return The event IDs.
*
* @throws TskCoreException If there is an error reading model data from the
* case database.
*/
public Set<Long> getEventIDsForFile(AbstractFile file, boolean includeDerivedArtifacts) throws TskCoreException {
return caseDbEventManager.getEventIDsForContent(file, includeDerivedArtifacts);
}
/**
* Gets a set of event IDs associated with a given artifact.
*
* @param artifact The artifact.
*
* @return The event IDs.
*
* @throws TskCoreException If there is an error reading model data from the
* case database.
*/
public List<Long> getEventIDsForArtifact(BlackboardArtifact artifact) throws TskCoreException {
return caseDbEventManager.getEventIDsForArtifact(artifact);
}
/**
* Gets counts by event type of the events within a given time range.
*
* @param timeRange The time range.
*
* @return The event counts by type.
*
* @throws TskCoreException If there is an error reading model data from the
* case database.
*/
public Map<TimelineEventType, Long> getEventCounts(Interval timeRange) throws TskCoreException {
final RootFilterState filter;
final TimelineEventType.HierarchyLevel typeZoom;
synchronized (this) {
filter = getEventFilterState();
typeZoom = getEventTypeZoom();
}
try {
return eventCountsCache.get(new EventsModelParams(timeRange, typeZoom, filter, null));
} catch (ExecutionException executionException) {
throw new TskCoreException("Error getting cached event counts.`1", executionException);
}
}
/**
* Gets the spanning interval for the events that fall within the time range
* and event filter model parameters, in terms of a given time zone.
*
* @param timeZone The time zone.
*
* @return The spanning interval.
*
* @throws TskCoreException If there is an error reading model data from the
* case database.
*/
public Interval getSpanningInterval(DateTimeZone timeZone) throws TskCoreException {
return caseDbEventManager.getSpanningInterval(modelParamsProperty().get().getTimeRange(), getEventFilterState().getActiveFilter(), timeZone);
}
/**
* Gets the spanning interval for all of the events in the case database.
*
* @return The spanning interval.
*
* @throws TskCoreException If there is an error reading model data from the
* case database.
*/
public Interval getSpanningInterval() throws TskCoreException {
return new Interval(getMinEventTime() * 1000, 1000 + getMaxEventTime() * 1000);
}
/**
* Gets the spanning interval for a collection of events.
*
* @param eventIDs The event IDs of the events.
*
* @return The spanning interval.
*
* @throws TskCoreException If there is an error reading model data from the
* case database.
*/
public Interval getSpanningInterval(Collection<Long> eventIDs) throws TskCoreException {
return caseDbEventManager.getSpanningInterval(eventIDs);
}
/**
* Gets the minimum event time in the case database, in seconds since the
* UNIX epoch.
*
* @return The minimum event time.
*
* @throws TskCoreException If there is an error reading model data from the
* case database.
*/
public Long getMinEventTime() throws TskCoreException {
try {
return minEventTimeCache.get("min"); // NON-NLS
} catch (ExecutionException ex) {
throw new TskCoreException("Error getting cached min time.", ex);
}
}
/**
* Gets the maximum event time in the case database, in seconds since the
* UNIX epoch.
*
* @return The maximum event time.
*
* @throws TskCoreException If there is an error reading model data from the
* case database.
*/
public Long getMaxEventTime() throws TskCoreException {
try {
return maxEventTimeCache.get("max"); // NON-NLS
} catch (ExecutionException ex) {
throw new TskCoreException("Error getting cached max time.", ex);
}
}
/**
* Updates the events model for a content tag added event and publishes a
* tag added event via the model's event bus.
*
* @param evt The event.
*
* @return If a tags added event was published via the model's event bus.
*
* @throws TskCoreException If there is an error reading model data from the
* case database.
*/
synchronized public boolean handleContentTagAdded(ContentTagAddedEvent evt) throws TskCoreException {
ContentTag contentTag = evt.getAddedTag();
Content content = contentTag.getContent();
Set<Long> updatedEventIDs = caseDbEventManager.updateEventsForContentTagAdded(content);
if (isNotEmpty(updatedEventIDs)) {
invalidateCaches(updatedEventIDs);
}
return postTagsAdded(updatedEventIDs);
}
/**
* Updates the events model for an artifact tag added event and publishes a
* tag added event via the model's event bus.
*
* @param evt The event.
*
* @return If a tags added event was published via the model's event bus.
*
* @throws TskCoreException If there is an error reading model data from the
* case database.
*/
synchronized public boolean handleArtifactTagAdded(BlackBoardArtifactTagAddedEvent evt) throws TskCoreException {
BlackboardArtifactTag artifactTag = evt.getAddedTag();
BlackboardArtifact artifact = artifactTag.getArtifact();
Set<Long> updatedEventIDs = caseDbEventManager.updateEventsForArtifactTagAdded(artifact);
if (isNotEmpty(updatedEventIDs)) {
invalidateCaches(updatedEventIDs);
}
return postTagsAdded(updatedEventIDs);
}
/**
* Updates the events model for a content tag deleted event and publishes a
* tag deleted event via the model's event bus.
*
* @param evt The event.
*
* @return If a tags deleted event was published via the model's event bus.
*
* @throws TskCoreException If there is an error reading model data from the
* case database.
*/
synchronized public boolean handleContentTagDeleted(ContentTagDeletedEvent evt) throws TskCoreException {
DeletedContentTagInfo deletedTagInfo = evt.getDeletedTagInfo();
Content content = currentCase.getSleuthkitCase().getContentById(deletedTagInfo.getContentID());
Set<Long> updatedEventIDs = caseDbEventManager.updateEventsForContentTagDeleted(content);
if (isNotEmpty(updatedEventIDs)) {
invalidateCaches(updatedEventIDs);
}
return postTagsDeleted(updatedEventIDs);
}
/**
* Updates the events model for an artifact tag deleted event and publishes
* a tag deleted event via the model's event bus.
*
* @param evt The event.
*
* @return If a tags deleted event was published via the model's event bus.
*
* @throws TskCoreException If there is an error reading model data from the
* case database.
*/
synchronized public boolean handleArtifactTagDeleted(BlackBoardArtifactTagDeletedEvent evt) throws TskCoreException {
DeletedBlackboardArtifactTagInfo deletedTagInfo = evt.getDeletedTagInfo();
BlackboardArtifact artifact = currentCase.getSleuthkitCase().getBlackboardArtifact(deletedTagInfo.getArtifactID());
Set<Long> updatedEventIDs = caseDbEventManager.updateEventsForArtifactTagDeleted(artifact);
if (isNotEmpty(updatedEventIDs)) {
invalidateCaches(updatedEventIDs);
}
return postTagsDeleted(updatedEventIDs);
}
/**
* Post a TagsAddedEvent to all registered subscribers, if the given set of
* updated event IDs is not empty.
*
* @param updatedEventIDs The set of event ids to be included in the
* TagsAddedEvent.
*
* @return True if an event was posted.
*/
private boolean postTagsAdded(Set<Long> updatedEventIDs) {
boolean tagsUpdated = !updatedEventIDs.isEmpty();
if (tagsUpdated) {
eventbus.post(new TagsAddedEvent(updatedEventIDs));
}
return tagsUpdated;
}
/**
* Post a TagsDeletedEvent to all registered subscribers, if the given set
* of updated event IDs is not empty.
*
* @param updatedEventIDs The set of event ids to be included in the
* TagsDeletedEvent.
*
* @return True if an event was posted.
*/
private boolean postTagsDeleted(Set<Long> updatedEventIDs) {
boolean tagsUpdated = !updatedEventIDs.isEmpty();
if (tagsUpdated) {
eventbus.post(new TagsDeletedEvent(updatedEventIDs));
}
return tagsUpdated;
}
/**
* Register the given object to receive events.
*
* @param subscriber The object to register. Must implement public methods
* annotated with Subscribe.
*/
synchronized public void registerForEvents(Object subscriber) {
eventbus.register(subscriber);
}
/**
* Un-register the given object, so it no longer receives events.
*
* @param subscriber The object to un-register.
*/
synchronized public void unRegisterForEvents(Object subscriber) {
eventbus.unregister(subscriber);
}
/**
* Posts a refresh requested event to all registered subscribers.
*/
public void postRefreshRequest() {
eventbus.post(new RefreshRequestedEvent());
}
/**
* Gets a list of the event types from the case database.
*
* @return The list of event types.
*/
public ImmutableList<TimelineEventType> getEventTypes() {
return caseDbEventManager.getEventTypes();
}
/**
* Sets the hash set hits flag for the events associated with the source
* files for a collection of hash set hit artifacts.
*
* @param hashSetHitArtifacts The hash set hit artifacts.
*
* @return The event IDs of the updated events.
*
* @throws TskCoreException If there is an error reading model data from or
* writing model data to the case database.
*/
synchronized public Set<Long> updateEventsForHashSetHits(Collection<BlackboardArtifact> hashSetHitArtifacts) throws TskCoreException {
Set<Long> updatedEventIDs = new HashSet<>();
for (BlackboardArtifact artifact : hashSetHitArtifacts) {
Content content = currentCase.getSleuthkitCase().getContentById(artifact.getObjectID());
updatedEventIDs.addAll(caseDbEventManager.updateEventsForHashSetHit(content));
}
if (isNotEmpty(updatedEventIDs)) {
invalidateCaches(updatedEventIDs);
}
return updatedEventIDs;
}
/**
* Invalidates all of the the model caches and publishes a caches
* invalidated event. Optionally, a collection of event IDs may be supplied,
* in which case only the corresponding entries in the event IDs cache are
* invalidated.
*
* RJCTODO: What is the use case for passing event IDs? The only place this
* is currently done is when handling TIMELINE_EVENT_ADDED events, i.e., for
* entirely new events. Is this some sort of performance optimization?
*
* @param updatedEventIDs Either null or a collection of the event IDs.
*
* @throws TskCoreException
*/
public synchronized void invalidateCaches(Collection<Long> updatedEventIDs) throws TskCoreException {
populateDataSourcesCache();
minEventTimeCache.invalidateAll();
maxEventTimeCache.invalidateAll();
idsToEventsCache.invalidateAll(emptyIfNull(updatedEventIDs));
eventCountsCache.invalidateAll();
eventbus.post(new CacheInvalidatedEvent());
}
/**
* Event fired when a cache has been invalidated and the views need to be
* refreshed
*/
public static class CacheInvalidatedEvent {
private CacheInvalidatedEvent() {
}
}
}

View File

@ -1,651 +0,0 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2019 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.timeline;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.ImmutableList;
import com.google.common.eventbus.EventBus;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import javafx.beans.InvalidationListener;
import javafx.beans.property.ReadOnlyObjectProperty;
import javafx.beans.property.ReadOnlyObjectWrapper;
import javafx.collections.FXCollections;
import javafx.collections.ObservableMap;
import static org.apache.commons.collections4.CollectionUtils.emptyIfNull;
import static org.apache.commons.collections4.CollectionUtils.isNotEmpty;
import org.joda.time.DateTimeZone;
import org.joda.time.Interval;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagAddedEvent;
import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagDeletedEvent;
import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagDeletedEvent.DeletedBlackboardArtifactTagInfo;
import org.sleuthkit.autopsy.casemodule.events.ContentTagAddedEvent;
import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent;
import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent.DeletedContentTagInfo;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.events.AutopsyEvent;
import org.sleuthkit.autopsy.timeline.events.RefreshRequestedEvent;
import org.sleuthkit.autopsy.timeline.events.TagsAddedEvent;
import org.sleuthkit.autopsy.timeline.events.TagsDeletedEvent;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState;
import org.sleuthkit.autopsy.timeline.utils.CacheLoaderImpl;
import org.sleuthkit.autopsy.timeline.utils.FilterUtils;
import org.sleuthkit.autopsy.timeline.zooming.ZoomState;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifactTag;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.ContentTag;
import org.sleuthkit.datamodel.DataSource;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TimelineManager;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TimelineEvent;
import org.sleuthkit.datamodel.TimelineEventType;
import org.sleuthkit.datamodel.TimelineFilter;
import org.sleuthkit.datamodel.TimelineFilter.DataSourceFilter;
import org.sleuthkit.datamodel.TimelineFilter.DataSourcesFilter;
import org.sleuthkit.datamodel.TimelineFilter.EventTypeFilter;
import org.sleuthkit.datamodel.TimelineFilter.FileTypesFilter;
import org.sleuthkit.datamodel.TimelineFilter.HashHitsFilter;
import org.sleuthkit.datamodel.TimelineFilter.HideKnownFilter;
import org.sleuthkit.datamodel.TimelineFilter.RootFilter;
import org.sleuthkit.datamodel.TimelineFilter.TagsFilter;
import org.sleuthkit.datamodel.TimelineFilter.TextFilter;
import org.sleuthkit.datamodel.TimelineLevelOfDetail;
/**
* This class acts as the model for a TimelineView
*
* Views can register listeners on properties returned by methods.
*
* This class is implemented as a filtered view into an underlying
* TimelineManager.
*
* Maintainers, NOTE: as many methods as possible should cache their results so
* as to avoid unnecessary db calls through the TimelineManager -jm
*
* Concurrency Policy: TimelineManager is internally synchronized, so methods
* that only access the TimelineManager atomically do not need further
* synchronization. All other member state variables should only be accessed
* with intrinsic lock of containing FilteredEventsModel held.
*
*/
public final class FilteredEventsModel {
private static final Logger logger = Logger.getLogger(FilteredEventsModel.class.getName());
private final TimelineManager eventManager;
private final Case autoCase;
private final EventBus eventbus = new EventBus("FilteredEventsModel_EventBus"); //NON-NLS
//Filter and zoome state
private final ReadOnlyObjectWrapper<RootFilterState> requestedFilter = new ReadOnlyObjectWrapper<>();
private final ReadOnlyObjectWrapper<Interval> requestedTimeRange = new ReadOnlyObjectWrapper<>();
private final ReadOnlyObjectWrapper<ZoomState> requestedZoomState = new ReadOnlyObjectWrapper<>();
private final ReadOnlyObjectWrapper<TimelineEventType.HierarchyLevel> requestedTypeZoom = new ReadOnlyObjectWrapper<>(TimelineEventType.HierarchyLevel.CATEGORY);
private final ReadOnlyObjectWrapper<TimelineLevelOfDetail> requestedLOD = new ReadOnlyObjectWrapper<>(TimelineLevelOfDetail.LOW);
// end Filter and zoome state
//caches
private final LoadingCache<Object, Long> maxCache;
private final LoadingCache<Object, Long> minCache;
private final LoadingCache<Long, TimelineEvent> idToEventCache;
private final LoadingCache<ZoomState, Map<TimelineEventType, Long>> eventCountsCache;
/**
* Map from datasource id to datasource name.
*/
private final ObservableMap<Long, String> datasourcesMap = FXCollections.observableHashMap();
// end caches
/**
* Make a DataSourceFilter from an entry from the datasourcesMap.
*
* @param dataSourceEntry A map entry from datasource id to datasource name.
*
* @return A new DataSourceFilter for the given datsourcesMap entry.
*/
private static DataSourceFilter newDataSourceFromMapEntry(Map.Entry<Long, String> dataSourceEntry) {
return new DataSourceFilter(dataSourceEntry.getValue(), dataSourceEntry.getKey());
}
public FilteredEventsModel(Case autoCase, ReadOnlyObjectProperty<ZoomState> currentStateProperty) throws TskCoreException {
this.autoCase = autoCase;
this.eventManager = autoCase.getSleuthkitCase().getTimelineManager();
populateFilterData();
//caches
idToEventCache = CacheBuilder.newBuilder()
.maximumSize(5000L)
.expireAfterAccess(10, TimeUnit.MINUTES)
.build(new CacheLoaderImpl<>(eventManager::getEventById));
eventCountsCache = CacheBuilder.newBuilder()
.maximumSize(1000L)
.expireAfterAccess(10, TimeUnit.MINUTES)
.build(new CacheLoaderImpl<>(this::countEventsByType));
maxCache = CacheBuilder.newBuilder()
.build(new CacheLoaderImpl<>(ignored -> eventManager.getMaxEventTime()));
minCache = CacheBuilder.newBuilder()
.build(new CacheLoaderImpl<>(ignored -> eventManager.getMinEventTime()));
InvalidationListener filterSyncListener = observable -> {
RootFilterState rootFilter = filterProperty().get();
syncFilters(rootFilter);
requestedFilter.set(rootFilter.copyOf());
};
datasourcesMap.addListener(filterSyncListener);
requestedFilter.set(getDefaultFilter());
requestedZoomState.addListener(observable -> {
final ZoomState zoomState = requestedZoomState.get();
if (zoomState != null) {
synchronized (FilteredEventsModel.this) {
requestedTypeZoom.set(zoomState.getTypeZoomLevel());
requestedFilter.set(zoomState.getFilterState());
requestedTimeRange.set(zoomState.getTimeRange());
requestedLOD.set(zoomState.getDescriptionLOD());
}
}
});
requestedZoomState.bind(currentStateProperty);
}
/**
* get the count of all events that fit the given zoom params organized by
* the EvenType of the level specified in the zoomState
*
* @param zoomState The params that control what events to count and how to
* organize the returned map
*
* @return a map from event type( of the requested level) to event counts
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
private Map<TimelineEventType, Long> countEventsByType(ZoomState zoomState) throws TskCoreException {
if (zoomState.getTimeRange() == null) {
return Collections.emptyMap();
} else {
return eventManager.countEventsByType(zoomState.getTimeRange().getStartMillis() / 1000,
zoomState.getTimeRange().getEndMillis() / 1000,
zoomState.getFilterState().getActiveFilter(), zoomState.getTypeZoomLevel());
}
}
public TimelineManager getEventManager() {
return eventManager;
}
public SleuthkitCase getSleuthkitCase() {
return autoCase.getSleuthkitCase();
}
public Interval getBoundingEventsInterval(Interval timeRange, RootFilter filter, DateTimeZone timeZone) throws TskCoreException {
return eventManager.getSpanningInterval(timeRange, filter, timeZone);
}
/**
* Readonly observable property for the current ZoomState
*
* @return A readonly observable property for the current ZoomState.
*/
synchronized public ReadOnlyObjectProperty<ZoomState> zoomStateProperty() {
return requestedZoomState.getReadOnlyProperty();
}
/**
* Get the current ZoomState
*
* @return The current ZoomState
*/
synchronized public ZoomState getZoomState() {
return requestedZoomState.get();
}
/**
* Update the data used to determine the available filters.
*/
synchronized private void populateFilterData() throws TskCoreException {
SleuthkitCase skCase = autoCase.getSleuthkitCase();
//because there is no way to remove a datasource we only add to this map.
for (DataSource ds : skCase.getDataSources()) {
datasourcesMap.putIfAbsent(ds.getId(), ds.getName());
}
}
/**
* "sync" the given root filter with the state of the casee: Disable filters
* for tags that are not in use in the case, and add new filters for tags,
* hashsets, and datasources, that don't have them. New filters are selected
* by default.
*
* @param rootFilterState the filter state to modify so it is consistent
* with the tags in use in the case
*/
public void syncFilters(RootFilterState rootFilterState) {
DataSourcesFilter dataSourcesFilter = rootFilterState.getDataSourcesFilterState().getFilter();
datasourcesMap.entrySet().forEach(entry -> dataSourcesFilter.addSubFilter(newDataSourceFromMapEntry(entry)));
}
/**
* Get a read only view of the time range currently in view.
*
* @return A read only view of the time range currently in view.
*/
@NbBundle.Messages({
"FilteredEventsModel.timeRangeProperty.errorTitle=Timeline",
"FilteredEventsModel.timeRangeProperty.errorMessage=Error getting spanning interval."})
synchronized public ReadOnlyObjectProperty<Interval> timeRangeProperty() {
if (requestedTimeRange.get() == null) {
try {
requestedTimeRange.set(getSpanningInterval());
} catch (TskCoreException timelineCacheException) {
MessageNotifyUtil.Notify.error(Bundle.FilteredEventsModel_timeRangeProperty_errorTitle(),
Bundle.FilteredEventsModel_timeRangeProperty_errorMessage());
logger.log(Level.SEVERE, "Error getting spanning interval.", timelineCacheException);
}
}
return requestedTimeRange.getReadOnlyProperty();
}
synchronized public ReadOnlyObjectProperty<TimelineLevelOfDetail> descriptionLODProperty() {
return requestedLOD.getReadOnlyProperty();
}
synchronized public ReadOnlyObjectProperty<RootFilterState> filterProperty() {
return requestedFilter.getReadOnlyProperty();
}
synchronized public ReadOnlyObjectProperty<TimelineEventType.HierarchyLevel> eventTypeZoomProperty() {
return requestedTypeZoom.getReadOnlyProperty();
}
/**
* The time range currently in view.
*
* @return The time range currently in view.
*/
synchronized public Interval getTimeRange() {
return getZoomState().getTimeRange();
}
synchronized public TimelineLevelOfDetail getDescriptionLOD() {
return getZoomState().getDescriptionLOD();
}
synchronized public RootFilterState getFilterState() {
return getZoomState().getFilterState();
}
synchronized public TimelineEventType.HierarchyLevel getEventTypeZoom() {
return getZoomState().getTypeZoomLevel();
}
/**
* Get the default filter used at startup.
*
* @return the default filter used at startup
*/
public synchronized RootFilterState getDefaultFilter() {
DataSourcesFilter dataSourcesFilter = new DataSourcesFilter();
datasourcesMap.entrySet().forEach(dataSourceEntry
-> dataSourcesFilter.addSubFilter(newDataSourceFromMapEntry(dataSourceEntry)));
HashHitsFilter hashHitsFilter = new HashHitsFilter();
TagsFilter tagsFilter = new TagsFilter();
FileTypesFilter fileTypesFilter = FilterUtils.createDefaultFileTypesFilter();
return new RootFilterState(new RootFilter(new HideKnownFilter(),
tagsFilter,
hashHitsFilter,
new TextFilter(),
new EventTypeFilter(TimelineEventType.ROOT_EVENT_TYPE),
dataSourcesFilter,
fileTypesFilter,
Collections.emptySet()));
}
public Interval getBoundingEventsInterval(DateTimeZone timeZone) throws TskCoreException {
return eventManager.getSpanningInterval(zoomStateProperty().get().getTimeRange(), getFilterState().getActiveFilter(), timeZone);
}
public TimelineEvent getEventById(Long eventID) throws TskCoreException {
try {
return idToEventCache.get(eventID);
} catch (ExecutionException ex) {
throw new TskCoreException("Error getting cached event from ID", ex);
}
}
public Set<TimelineEvent> getEventsById(Collection<Long> eventIDs) throws TskCoreException {
Set<TimelineEvent> events = new HashSet<>();
for (Long id : eventIDs) {
events.add(getEventById(id));
}
return events;
}
public List<Long> getEventIDs(Interval timeRange, FilterState<? extends TimelineFilter> filter) throws TskCoreException {
final Interval overlap;
RootFilter intersection;
synchronized (this) {
overlap = getSpanningInterval().overlap(timeRange);
intersection = getFilterState().intersect(filter).getActiveFilter();
}
return eventManager.getEventIDs(overlap, intersection);
}
/**
* Return the number of events that pass the requested filter and are within
* the given time range.
*
* NOTE: this method does not change the requested time range
*
* @param timeRange
*
* @return
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
public Map<TimelineEventType, Long> getEventCounts(Interval timeRange) throws TskCoreException {
final RootFilterState filter;
final TimelineEventType.HierarchyLevel typeZoom;
synchronized (this) {
filter = getFilterState();
typeZoom = getEventTypeZoom();
}
try {
return eventCountsCache.get(new ZoomState(timeRange, typeZoom, filter, null));
} catch (ExecutionException executionException) {
throw new TskCoreException("Error getting cached event counts.`1", executionException);
}
}
/**
* @return The smallest interval spanning all the events from the case,
* ignoring any filters or requested ranges.
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
public Interval getSpanningInterval() throws TskCoreException {
return new Interval(getMinTime() * 1000, 1000 + getMaxTime() * 1000);
}
/**
* Get the smallest interval spanning all the given events.
*
* @param eventIDs The IDs of the events to get a spanning interval arround.
*
* @return the smallest interval spanning all the given events
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
public Interval getSpanningInterval(Collection<Long> eventIDs) throws TskCoreException {
return eventManager.getSpanningInterval(eventIDs);
}
/**
* @return the time (in seconds from unix epoch) of the absolutely first
* event available from the repository, ignoring any filters or
* requested ranges
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
public Long getMinTime() throws TskCoreException {
try {
return minCache.get("min"); // NON-NLS
} catch (ExecutionException ex) {
throw new TskCoreException("Error getting cached min time.", ex);
}
}
/**
* @return the time (in seconds from unix epoch) of the absolutely last
* event available from the repository, ignoring any filters or
* requested ranges
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
public Long getMaxTime() throws TskCoreException {
try {
return maxCache.get("max"); // NON-NLS
} catch (ExecutionException ex) {
throw new TskCoreException("Error getting cached max time.", ex);
}
}
synchronized public boolean handleContentTagAdded(ContentTagAddedEvent evt) throws TskCoreException {
ContentTag contentTag = evt.getAddedTag();
Content content = contentTag.getContent();
Set<Long> updatedEventIDs = eventManager.updateEventsForContentTagAdded(content);
if (isNotEmpty(updatedEventIDs)) {
invalidateCaches(updatedEventIDs);
}
return postTagsAdded(updatedEventIDs);
}
synchronized public boolean handleArtifactTagAdded(BlackBoardArtifactTagAddedEvent evt) throws TskCoreException {
BlackboardArtifactTag artifactTag = evt.getAddedTag();
BlackboardArtifact artifact = artifactTag.getArtifact();
Set<Long> updatedEventIDs = eventManager.updateEventsForArtifactTagAdded(artifact);
if (isNotEmpty(updatedEventIDs)) {
invalidateCaches(updatedEventIDs);
}
return postTagsAdded(updatedEventIDs);
}
synchronized public boolean handleContentTagDeleted(ContentTagDeletedEvent evt) throws TskCoreException {
DeletedContentTagInfo deletedTagInfo = evt.getDeletedTagInfo();
Content content = autoCase.getSleuthkitCase().getContentById(deletedTagInfo.getContentID());
Set<Long> updatedEventIDs = eventManager.updateEventsForContentTagDeleted(content);
if (isNotEmpty(updatedEventIDs)) {
invalidateCaches(updatedEventIDs);
}
return postTagsDeleted(updatedEventIDs);
}
synchronized public boolean handleArtifactTagDeleted(BlackBoardArtifactTagDeletedEvent evt) throws TskCoreException {
DeletedBlackboardArtifactTagInfo deletedTagInfo = evt.getDeletedTagInfo();
BlackboardArtifact artifact = autoCase.getSleuthkitCase().getBlackboardArtifact(deletedTagInfo.getArtifactID());
Set<Long> updatedEventIDs = eventManager.updateEventsForArtifactTagDeleted(artifact);
if (isNotEmpty(updatedEventIDs)) {
invalidateCaches(updatedEventIDs);
}
return postTagsDeleted(updatedEventIDs);
}
/**
* Get a Set of event IDs for the events that are derived from the given
* file.
*
* @param file The AbstractFile to get derived event IDs
* for.
* @param includeDerivedArtifacts If true, also get event IDs for events
* derived from artifacts derived form this
* file. If false, only gets events derived
* directly from this file (file system
* timestamps).
*
* @return A Set of event IDs for the events that are derived from the given
* file.
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
public Set<Long> getEventIDsForFile(AbstractFile file, boolean includeDerivedArtifacts) throws TskCoreException {
return eventManager.getEventIDsForContent(file, includeDerivedArtifacts);
}
/**
* Get a List of event IDs for the events that are derived from the given
* artifact.
*
* @param artifact The BlackboardArtifact to get derived event IDs for.
*
* @return A List of event IDs for the events that are derived from the
* given artifact.
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
public List<Long> getEventIDsForArtifact(BlackboardArtifact artifact) throws TskCoreException {
return eventManager.getEventIDsForArtifact(artifact);
}
/**
* Post a TagsAddedEvent to all registered subscribers, if the given set of
* updated event IDs is not empty.
*
* @param updatedEventIDs The set of event ids to be included in the
* TagsAddedEvent.
*
* @return True if an event was posted.
*/
private boolean postTagsAdded(Set<Long> updatedEventIDs) {
boolean tagsUpdated = !updatedEventIDs.isEmpty();
if (tagsUpdated) {
eventbus.post(new TagsAddedEvent(updatedEventIDs));
}
return tagsUpdated;
}
/**
* Post a TagsDeletedEvent to all registered subscribers, if the given set
* of updated event IDs is not empty.
*
* @param updatedEventIDs The set of event ids to be included in the
* TagsDeletedEvent.
*
* @return True if an event was posted.
*/
private boolean postTagsDeleted(Set<Long> updatedEventIDs) {
boolean tagsUpdated = !updatedEventIDs.isEmpty();
if (tagsUpdated) {
eventbus.post(new TagsDeletedEvent(updatedEventIDs));
}
return tagsUpdated;
}
/**
* Register the given object to receive events.
*
* @param subscriber The object to register. Must implement public methods
* annotated with Subscribe.
*/
synchronized public void registerForEvents(Object subscriber) {
eventbus.register(subscriber);
}
/**
* Un-register the given object, so it no longer receives events.
*
* @param subscriber The object to un-register.
*/
synchronized public void unRegisterForEvents(Object subscriber) {
eventbus.unregister(subscriber);
}
/**
* Post a RefreshRequestedEvent to all registered subscribers.
*/
public void postRefreshRequest() {
eventbus.post(new RefreshRequestedEvent());
}
/**
* (Re)Post an AutopsyEvent received from another event distribution system
* locally to all registered subscribers.
*
* @param event The event to re-post.
*/
public void postAutopsyEventLocally(AutopsyEvent event) {
eventbus.post(event);
}
public ImmutableList<TimelineEventType> getEventTypes() {
return eventManager.getEventTypes();
}
synchronized public Set<Long> setHashHit(Collection<BlackboardArtifact> artifacts) throws TskCoreException {
Set<Long> updatedEventIDs = new HashSet<>();
for (BlackboardArtifact artifact : artifacts) {
Content content = autoCase.getSleuthkitCase().getContentById(artifact.getObjectID());
updatedEventIDs.addAll(eventManager.updateEventsForHashSetHit(content));
}
if (isNotEmpty(updatedEventIDs)) {
invalidateCaches(updatedEventIDs);
}
return updatedEventIDs;
}
/**
* Invalidate the timeline caches for the given event IDs. Also forces the
* filter values to be updated with any new values from the case data.( data
* sources, tags, etc)
*
* @param updatedEventIDs A collection of the event IDs whose cached event
* objects should be invalidated. Can be null or an
* empty sett to invalidate the general caches, such
* as min/max time, or the counts per event type.
*
* @throws TskCoreException
*/
public synchronized void invalidateCaches(Collection<Long> updatedEventIDs) throws TskCoreException {
minCache.invalidateAll();
maxCache.invalidateAll();
idToEventCache.invalidateAll(emptyIfNull(updatedEventIDs));
eventCountsCache.invalidateAll();
populateFilterData();
eventbus.post(new CacheInvalidatedEvent());
}
/**
* Event fired when a cache has been invalidated. The UI should make it
* clear that the view is potentially out of date and present an action to
* refresh the view.
*/
public static class CacheInvalidatedEvent {
private CacheInvalidatedEvent() {
}
}
}

View File

@ -195,7 +195,7 @@ final class ShowInTimelineDialog extends Dialog<ViewInTimelineRequestedEvent> {
//add events to table //add events to table
Set<TimelineEvent> events = new HashSet<>(); Set<TimelineEvent> events = new HashSet<>();
FilteredEventsModel eventsModel = controller.getEventsModel(); EventsModel eventsModel = controller.getEventsModel();
for (Long eventID : eventIDS) { for (Long eventID : eventIDS) {
try { try {
events.add(eventsModel.getEventById(eventID)); events.add(eventsModel.getEventById(eventID));

View File

@ -85,7 +85,7 @@ import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.DescriptionFilterSt
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState;
import org.sleuthkit.autopsy.timeline.utils.IntervalUtils; import org.sleuthkit.autopsy.timeline.utils.IntervalUtils;
import org.sleuthkit.autopsy.timeline.zooming.TimeUnits; import org.sleuthkit.autopsy.timeline.zooming.TimeUnits;
import org.sleuthkit.autopsy.timeline.zooming.ZoomState; import org.sleuthkit.autopsy.timeline.zooming.EventsModelParams;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT; import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT;
@ -200,16 +200,16 @@ public class TimeLineController {
private final ReadOnlyObjectWrapper<ViewMode> viewMode = new ReadOnlyObjectWrapper<>(ViewMode.COUNTS); private final ReadOnlyObjectWrapper<ViewMode> viewMode = new ReadOnlyObjectWrapper<>(ViewMode.COUNTS);
@GuardedBy("filteredEvents") @GuardedBy("filteredEvents")
private final FilteredEventsModel filteredEvents; private final EventsModel filteredEvents;
@GuardedBy("this") @GuardedBy("this")
private final ZoomState InitialZoomState; private final EventsModelParams InitialZoomState;
@GuardedBy("this") @GuardedBy("this")
private final History<ZoomState> historyManager = new History<>(); private final History<EventsModelParams> historyManager = new History<>();
@GuardedBy("this") @GuardedBy("this")
private final ReadOnlyObjectWrapper<ZoomState> currentParams = new ReadOnlyObjectWrapper<>(); private final ReadOnlyObjectWrapper<EventsModelParams> currentParams = new ReadOnlyObjectWrapper<>();
//selected events (ie shown in the result viewer) //selected events (ie shown in the result viewer)
@GuardedBy("this") @GuardedBy("this")
@ -281,7 +281,7 @@ public class TimeLineController {
TimeLineController(Case autoCase) throws TskCoreException { TimeLineController(Case autoCase) throws TskCoreException {
this.autoCase = autoCase; this.autoCase = autoCase;
filteredEvents = new FilteredEventsModel(autoCase, currentParams.getReadOnlyProperty()); filteredEvents = new EventsModel(autoCase, currentParams.getReadOnlyProperty());
/* /*
* as the history manager's current state changes, modify the tags * as the history manager's current state changes, modify the tags
* filter to be in sync, and expose that as propery from * filter to be in sync, and expose that as propery from
@ -289,16 +289,16 @@ public class TimeLineController {
* filters? * filters?
*/ */
historyManager.currentState().addListener((observable, oldState, newState) -> { historyManager.currentState().addListener((observable, oldState, newState) -> {
ZoomState historyManagerState = newState; EventsModelParams historyManagerState = newState;
filteredEvents.syncFilters(historyManagerState.getFilterState()); filteredEvents.addDataSourceFilters(historyManagerState.getEventFilterState());
currentParams.set(historyManagerState); currentParams.set(historyManagerState);
}); });
try { try {
InitialZoomState = new ZoomState(filteredEvents.getSpanningInterval(), InitialZoomState = new EventsModelParams(filteredEvents.getSpanningInterval(),
TimelineEventType.HierarchyLevel.CATEGORY, TimelineEventType.HierarchyLevel.CATEGORY,
filteredEvents.filterProperty().get(), filteredEvents.eventFilterProperty().get(),
TimelineLevelOfDetail.LOW); TimelineLevelOfDetail.LOW);
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
throw new TskCoreException("Error getting spanning interval.", ex); throw new TskCoreException("Error getting spanning interval.", ex);
@ -318,17 +318,17 @@ public class TimeLineController {
/** /**
* @return a shared events model * @return a shared events model
*/ */
public FilteredEventsModel getEventsModel() { public EventsModel getEventsModel() {
return filteredEvents; return filteredEvents;
} }
public void applyDefaultFilters() { public void applyDefaultFilters() {
pushFilters(filteredEvents.getDefaultFilter()); pushFilters(filteredEvents.getDefaultEventFilter());
} }
public void zoomOutToActivity() throws TskCoreException { public void zoomOutToActivity() throws TskCoreException {
Interval boundingEventsInterval = filteredEvents.getBoundingEventsInterval(getJodaTimeZone()); Interval boundingEventsInterval = filteredEvents.getSpanningInterval(getJodaTimeZone());
advance(filteredEvents.zoomStateProperty().get().withTimeRange(boundingEventsInterval)); advance(filteredEvents.modelParamsProperty().get().withTimeRange(boundingEventsInterval));
} }
private final ObservableSet<DetailViewEvent> pinnedEvents = FXCollections.observableSet(); private final ObservableSet<DetailViewEvent> pinnedEvents = FXCollections.observableSet();
@ -494,7 +494,7 @@ public class TimeLineController {
} }
synchronized public void pushEventTypeZoom(TimelineEventType.HierarchyLevel typeZoomeLevel) { synchronized public void pushEventTypeZoom(TimelineEventType.HierarchyLevel typeZoomeLevel) {
ZoomState currentZoom = filteredEvents.zoomStateProperty().get(); EventsModelParams currentZoom = filteredEvents.modelParamsProperty().get();
if (currentZoom == null) { if (currentZoom == null) {
advance(InitialZoomState.withTypeZoomLevel(typeZoomeLevel)); advance(InitialZoomState.withTypeZoomLevel(typeZoomeLevel));
} else if (currentZoom.hasTypeZoomLevel(typeZoomeLevel) == false) { } else if (currentZoom.hasTypeZoomLevel(typeZoomeLevel) == false) {
@ -525,7 +525,7 @@ public class TimeLineController {
} }
} }
ZoomState currentZoom = filteredEvents.zoomStateProperty().get(); EventsModelParams currentZoom = filteredEvents.modelParamsProperty().get();
if (currentZoom == null) { if (currentZoom == null) {
advance(InitialZoomState.withTimeRange(clampedTimeRange)); advance(InitialZoomState.withTimeRange(clampedTimeRange));
return true; return true;
@ -556,7 +556,7 @@ public class TimeLineController {
} }
synchronized public void pushDescrLOD(TimelineLevelOfDetail newLOD) { synchronized public void pushDescrLOD(TimelineLevelOfDetail newLOD) {
ZoomState currentZoom = filteredEvents.zoomStateProperty().get(); EventsModelParams currentZoom = filteredEvents.modelParamsProperty().get();
if (currentZoom == null) { if (currentZoom == null) {
advance(InitialZoomState.withDescrLOD(newLOD)); advance(InitialZoomState.withDescrLOD(newLOD));
} else if (currentZoom.hasDescrLOD(newLOD) == false) { } else if (currentZoom.hasDescrLOD(newLOD) == false) {
@ -567,7 +567,7 @@ public class TimeLineController {
@SuppressWarnings("AssignmentToMethodParameter") //clamp timerange to case @SuppressWarnings("AssignmentToMethodParameter") //clamp timerange to case
synchronized public void pushTimeAndType(Interval timeRange, TimelineEventType.HierarchyLevel typeZoom) throws TskCoreException { synchronized public void pushTimeAndType(Interval timeRange, TimelineEventType.HierarchyLevel typeZoom) throws TskCoreException {
Interval overlappingTimeRange = this.filteredEvents.getSpanningInterval().overlap(timeRange); Interval overlappingTimeRange = this.filteredEvents.getSpanningInterval().overlap(timeRange);
ZoomState currentZoom = filteredEvents.zoomStateProperty().get(); EventsModelParams currentZoom = filteredEvents.modelParamsProperty().get();
if (currentZoom == null) { if (currentZoom == null) {
advance(InitialZoomState.withTimeAndType(overlappingTimeRange, typeZoom)); advance(InitialZoomState.withTimeAndType(overlappingTimeRange, typeZoom));
} else if (currentZoom.hasTimeRange(overlappingTimeRange) == false && currentZoom.hasTypeZoomLevel(typeZoom) == false) { } else if (currentZoom.hasTimeRange(overlappingTimeRange) == false && currentZoom.hasTypeZoomLevel(typeZoom) == false) {
@ -580,7 +580,7 @@ public class TimeLineController {
} }
synchronized public void pushFilters(RootFilterState filter) { synchronized public void pushFilters(RootFilterState filter) {
ZoomState currentZoom = filteredEvents.zoomStateProperty().get(); EventsModelParams currentZoom = filteredEvents.modelParamsProperty().get();
if (currentZoom == null) { if (currentZoom == null) {
advance(InitialZoomState.withFilterState(filter)); advance(InitialZoomState.withFilterState(filter));
} else if (currentZoom.hasFilterState(filter) == false) { } else if (currentZoom.hasFilterState(filter) == false) {
@ -596,7 +596,7 @@ public class TimeLineController {
historyManager.retreat(); historyManager.retreat();
} }
synchronized private void advance(ZoomState newState) { synchronized private void advance(EventsModelParams newState) {
historyManager.advance(newState); historyManager.advance(newState);
} }
@ -746,7 +746,7 @@ public class TimeLineController {
case DATA_ADDED: case DATA_ADDED:
ModuleDataEvent eventData = (ModuleDataEvent) evt.getOldValue(); ModuleDataEvent eventData = (ModuleDataEvent) evt.getOldValue();
if (null != eventData && eventData.getBlackboardArtifactType().getTypeID() == TSK_HASHSET_HIT.getTypeID()) { if (null != eventData && eventData.getBlackboardArtifactType().getTypeID() == TSK_HASHSET_HIT.getTypeID()) {
logFutureException(executor.submit(() -> filteredEvents.setHashHit(eventData.getArtifacts())), logFutureException(executor.submit(() -> filteredEvents.updateEventsForHashSetHits(eventData.getArtifacts())),
"Error executing task in response to DATA_ADDED event.", "Error executing task in response to DATA_ADDED event.",
"Error executing response to new data."); "Error executing response to new data.");
} }

View File

@ -22,7 +22,7 @@ import javafx.beans.binding.BooleanBinding;
import javafx.event.ActionEvent; import javafx.event.ActionEvent;
import org.controlsfx.control.action.Action; import org.controlsfx.control.action.Action;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.timeline.FilteredEventsModel; import org.sleuthkit.autopsy.timeline.EventsModel;
import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.TimeLineController;
/** /**
@ -32,7 +32,7 @@ import org.sleuthkit.autopsy.timeline.TimeLineController;
"RestFilters.longText=Reset all filters to their default state."}) "RestFilters.longText=Reset all filters to their default state."})
public class ResetFilters extends Action { public class ResetFilters extends Action {
private FilteredEventsModel eventsModel; private EventsModel eventsModel;
public ResetFilters(final TimeLineController controller) { public ResetFilters(final TimeLineController controller) {
this(Bundle.ResetFilters_text(), controller); this(Bundle.ResetFilters_text(), controller);
@ -44,12 +44,12 @@ public class ResetFilters extends Action {
eventsModel = controller.getEventsModel(); eventsModel = controller.getEventsModel();
disabledProperty().bind(new BooleanBinding() { disabledProperty().bind(new BooleanBinding() {
{ {
bind(eventsModel.zoomStateProperty()); bind(eventsModel.modelParamsProperty());
} }
@Override @Override
protected boolean computeValue() { protected boolean computeValue() {
return eventsModel.zoomStateProperty().getValue().getFilterState().equals(eventsModel.getDefaultFilter()); return eventsModel.modelParamsProperty().getValue().getEventFilterState().equals(eventsModel.getDefaultEventFilter());
} }
}); });
setEventHandler((ActionEvent t) -> { setEventHandler((ActionEvent t) -> {

View File

@ -137,7 +137,7 @@ public class SaveSnapshotAsReport extends Action {
reportMainFilePath = new SnapShotReportWriter(currentCase, reportMainFilePath = new SnapShotReportWriter(currentCase,
reportFolderPath, reportFolderPath,
reportName, reportName,
controller.getEventsModel().getZoomState(), controller.getEventsModel().getModelParams(),
generationDate, snapshot).writeReport(); generationDate, snapshot).writeReport();
} catch (IOException ex) { } catch (IOException ex) {
LOGGER.log(Level.SEVERE, "Error writing report to disk at " + reportFolderPath, ex); //NON_NLS LOGGER.log(Level.SEVERE, "Error writing report to disk at " + reportFolderPath, ex); //NON_NLS

View File

@ -26,7 +26,7 @@ import javafx.scene.image.ImageView;
import org.controlsfx.control.action.Action; import org.controlsfx.control.action.Action;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.timeline.FilteredEventsModel; import org.sleuthkit.autopsy.timeline.EventsModel;
import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.TimeLineController;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
@ -58,10 +58,10 @@ public class ZoomOut extends Action {
//disable action when the current time range already encompases the entire case. //disable action when the current time range already encompases the entire case.
disabledProperty().bind(new BooleanBinding() { disabledProperty().bind(new BooleanBinding() {
private final FilteredEventsModel eventsModel = controller.getEventsModel(); private final EventsModel eventsModel = controller.getEventsModel();
{ {
bind(eventsModel.zoomStateProperty(), eventsModel.timeRangeProperty()); bind(eventsModel.modelParamsProperty(), eventsModel.timeRangeProperty());
} }
@Override @Override

View File

@ -27,7 +27,7 @@ import org.controlsfx.control.action.Action;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.TimeLineController;
import org.sleuthkit.autopsy.timeline.FilteredEventsModel; import org.sleuthkit.autopsy.timeline.EventsModel;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
/** /**
@ -56,10 +56,10 @@ public class ZoomToEvents extends Action {
//disable action when the current time range already encompases the entire case. //disable action when the current time range already encompases the entire case.
disabledProperty().bind(new BooleanBinding() { disabledProperty().bind(new BooleanBinding() {
private final FilteredEventsModel eventsModel = controller.getEventsModel(); private final EventsModel eventsModel = controller.getEventsModel();
{ {
bind(eventsModel.zoomStateProperty()); bind(eventsModel.modelParamsProperty());
} }
@Override @Override

View File

@ -45,7 +45,7 @@ import org.sleuthkit.autopsy.datamodel.DataModelActionsFactory;
import org.sleuthkit.autopsy.datamodel.DisplayableItemNode; import org.sleuthkit.autopsy.datamodel.DisplayableItemNode;
import org.sleuthkit.autopsy.datamodel.DisplayableItemNodeVisitor; import org.sleuthkit.autopsy.datamodel.DisplayableItemNodeVisitor;
import org.sleuthkit.autopsy.datamodel.NodeProperty; import org.sleuthkit.autopsy.datamodel.NodeProperty;
import org.sleuthkit.autopsy.timeline.FilteredEventsModel; import org.sleuthkit.autopsy.timeline.EventsModel;
import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.TimeLineController;
import org.sleuthkit.autopsy.timeline.actions.ViewFileInTimelineAction; import org.sleuthkit.autopsy.timeline.actions.ViewFileInTimelineAction;
import org.sleuthkit.autopsy.timeline.ui.EventTypeUtils; import org.sleuthkit.autopsy.timeline.ui.EventTypeUtils;
@ -262,7 +262,7 @@ public class EventNode extends DisplayableItemNode {
* @return An EventNode with the content (and possible artifact) backing * @return An EventNode with the content (and possible artifact) backing
* this event in its lookup. * this event in its lookup.
*/ */
public static EventNode createEventNode(final Long eventID, FilteredEventsModel eventsModel) throws TskCoreException { public static EventNode createEventNode(final Long eventID, EventsModel eventsModel) throws TskCoreException {
SleuthkitCase sleuthkitCase = eventsModel.getSleuthkitCase(); SleuthkitCase sleuthkitCase = eventsModel.getSleuthkitCase();

View File

@ -32,7 +32,7 @@ import org.openide.util.lookup.Lookups;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.datamodel.DisplayableItemNode; import org.sleuthkit.autopsy.datamodel.DisplayableItemNode;
import org.sleuthkit.autopsy.datamodel.DisplayableItemNodeVisitor; import org.sleuthkit.autopsy.datamodel.DisplayableItemNodeVisitor;
import org.sleuthkit.autopsy.timeline.FilteredEventsModel; import org.sleuthkit.autopsy.timeline.EventsModel;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
/** /**
@ -49,7 +49,7 @@ public class EventRootNode extends DisplayableItemNode {
*/ */
public static final int MAX_EVENTS_TO_DISPLAY = 5000; public static final int MAX_EVENTS_TO_DISPLAY = 5000;
public EventRootNode(Collection<Long> eventIds, FilteredEventsModel filteredEvents) { public EventRootNode(Collection<Long> eventIds, EventsModel filteredEvents) {
super(Children.create(new EventNodeChildFactory(eventIds, filteredEvents), true), Lookups.singleton(eventIds)); super(Children.create(new EventNodeChildFactory(eventIds, filteredEvents), true), Lookups.singleton(eventIds));
} }
@ -83,10 +83,10 @@ public class EventRootNode extends DisplayableItemNode {
/** /**
* filteredEvents is used to lookup the events from their IDs * filteredEvents is used to lookup the events from their IDs
*/ */
private final FilteredEventsModel filteredEvents; private final EventsModel filteredEvents;
private final Map<Long, Node> nodesMap = new HashMap<>(); private final Map<Long, Node> nodesMap = new HashMap<>();
EventNodeChildFactory(Collection<Long> eventIds, FilteredEventsModel filteredEvents) { EventNodeChildFactory(Collection<Long> eventIds, EventsModel filteredEvents) {
this.eventIDs = eventIds; this.eventIDs = eventIds;
this.filteredEvents = filteredEvents; this.filteredEvents = filteredEvents;
} }

View File

@ -27,14 +27,14 @@ import javax.imageio.ImageIO;
import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormat;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.report.uisnapshot.UiSnapShotReportWriter; import org.sleuthkit.autopsy.report.uisnapshot.UiSnapShotReportWriter;
import org.sleuthkit.autopsy.timeline.zooming.ZoomState; import org.sleuthkit.autopsy.timeline.zooming.EventsModelParams;
/** /**
* Generate and write the Timeline snapshot report to disk. * Generate and write the Timeline snapshot report to disk.
*/ */
public class SnapShotReportWriter extends UiSnapShotReportWriter{ public class SnapShotReportWriter extends UiSnapShotReportWriter{
private final ZoomState zoomState; private final EventsModelParams zoomState;
private final BufferedImage image; private final BufferedImage image;
/** /**
@ -49,7 +49,7 @@ public class SnapShotReportWriter extends UiSnapShotReportWriter{
* @param generationDate The generation Date of the report. * @param generationDate The generation Date of the report.
* @param snapshot A snapshot of the view to include in the report. * @param snapshot A snapshot of the view to include in the report.
*/ */
public SnapShotReportWriter(Case currentCase, Path reportFolderPath, String reportName, ZoomState zoomState, Date generationDate, BufferedImage snapshot) { public SnapShotReportWriter(Case currentCase, Path reportFolderPath, String reportName, EventsModelParams zoomState, Date generationDate, BufferedImage snapshot) {
super(currentCase, reportFolderPath, reportName, generationDate); super(currentCase, reportFolderPath, reportName, generationDate);
this.zoomState = zoomState; this.zoomState = zoomState;
this.image = snapshot; this.image = snapshot;

View File

@ -37,7 +37,7 @@ import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.LoggedTask; import org.sleuthkit.autopsy.coreutils.LoggedTask;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.ThreadConfined; import org.sleuthkit.autopsy.coreutils.ThreadConfined;
import org.sleuthkit.autopsy.timeline.FilteredEventsModel; import org.sleuthkit.autopsy.timeline.EventsModel;
import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.TimeLineController;
import org.sleuthkit.autopsy.timeline.ViewMode; import org.sleuthkit.autopsy.timeline.ViewMode;
import org.sleuthkit.autopsy.timeline.events.RefreshRequestedEvent; import org.sleuthkit.autopsy.timeline.events.RefreshRequestedEvent;
@ -75,7 +75,7 @@ public abstract class AbstractTimeLineView extends BorderPane {
private Task<Boolean> updateTask; private Task<Boolean> updateTask;
private final TimeLineController controller; private final TimeLineController controller;
private final FilteredEventsModel filteredEvents; private final EventsModel filteredEvents;
/** /**
* Constructor * Constructor
@ -86,7 +86,7 @@ public abstract class AbstractTimeLineView extends BorderPane {
this.controller = controller; this.controller = controller;
this.filteredEvents = controller.getEventsModel(); this.filteredEvents = controller.getEventsModel();
this.filteredEvents.registerForEvents(this); this.filteredEvents.registerForEvents(this);
this.filteredEvents.zoomStateProperty().addListener(updateListener); this.filteredEvents.modelParamsProperty().addListener(updateListener);
TimeLineController.timeZoneProperty().addListener(updateListener); TimeLineController.timeZoneProperty().addListener(updateListener);
} }
@ -170,7 +170,7 @@ public abstract class AbstractTimeLineView extends BorderPane {
* *
* @return The FilteredEventsModel for this view. * @return The FilteredEventsModel for this view.
*/ */
protected FilteredEventsModel getEventsModel() { protected EventsModel getEventsModel() {
return filteredEvents; return filteredEvents;
} }
@ -224,7 +224,7 @@ public abstract class AbstractTimeLineView extends BorderPane {
updateTask = null; updateTask = null;
} }
//remvoe and gc updateListener //remvoe and gc updateListener
this.filteredEvents.zoomStateProperty().removeListener(updateListener); this.filteredEvents.modelParamsProperty().removeListener(updateListener);
TimeLineController.timeZoneProperty().removeListener(updateListener); TimeLineController.timeZoneProperty().removeListener(updateListener);
updateListener = null; updateListener = null;
filteredEvents.unRegisterForEvents(this); filteredEvents.unRegisterForEvents(this);

View File

@ -77,7 +77,7 @@ import org.sleuthkit.autopsy.coreutils.LoggedTask;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.ThreadConfined; import org.sleuthkit.autopsy.coreutils.ThreadConfined;
import org.sleuthkit.autopsy.timeline.FXMLConstructor; import org.sleuthkit.autopsy.timeline.FXMLConstructor;
import org.sleuthkit.autopsy.timeline.FilteredEventsModel; import org.sleuthkit.autopsy.timeline.EventsModel;
import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.TimeLineController;
import org.sleuthkit.autopsy.timeline.ViewMode; import org.sleuthkit.autopsy.timeline.ViewMode;
import org.sleuthkit.autopsy.timeline.actions.AddManualEvent; import org.sleuthkit.autopsy.timeline.actions.AddManualEvent;
@ -236,7 +236,7 @@ final public class ViewFrame extends BorderPane {
private final NotificationPane notificationPane = new NotificationPane(); private final NotificationPane notificationPane = new NotificationPane();
private final TimeLineController controller; private final TimeLineController controller;
private final FilteredEventsModel filteredEvents; private final EventsModel filteredEvents;
/** /**
* Listen to changes in the range slider selection and forward to the * Listen to changes in the range slider selection and forward to the
@ -426,7 +426,7 @@ final public class ViewFrame extends BorderPane {
//listen for changes in the time range / zoom params //listen for changes in the time range / zoom params
TimeLineController.timeZoneProperty().addListener(timeZoneProp -> refreshTimeUI()); TimeLineController.timeZoneProperty().addListener(timeZoneProp -> refreshTimeUI());
filteredEvents.timeRangeProperty().addListener(timeRangeProp -> refreshTimeUI()); filteredEvents.timeRangeProperty().addListener(timeRangeProp -> refreshTimeUI());
filteredEvents.zoomStateProperty().addListener(zoomListener); filteredEvents.modelParamsProperty().addListener(zoomListener);
refreshTimeUI(); //populate the view refreshTimeUI(); //populate the view
refreshHistorgram(); refreshHistorgram();
@ -474,7 +474,7 @@ final public class ViewFrame extends BorderPane {
@Subscribe @Subscribe
@NbBundle.Messages({ @NbBundle.Messages({
"ViewFrame.notification.cacheInvalidated=The event data has been updated, the visualization may be out of date."}) "ViewFrame.notification.cacheInvalidated=The event data has been updated, the visualization may be out of date."})
public void handleCacheInvalidated(FilteredEventsModel.CacheInvalidatedEvent event) { public void handleCacheInvalidated(EventsModel.CacheInvalidatedEvent event) {
Platform.runLater(() -> { Platform.runLater(() -> {
if (hostedView.needsRefresh() == false) { if (hostedView.needsRefresh() == false) {
hostedView.setNeedsRefresh(); hostedView.setNeedsRefresh();

View File

@ -56,7 +56,7 @@ import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.ThreadConfined; import org.sleuthkit.autopsy.coreutils.ThreadConfined;
import org.sleuthkit.autopsy.timeline.FXMLConstructor; import org.sleuthkit.autopsy.timeline.FXMLConstructor;
import org.sleuthkit.autopsy.timeline.FilteredEventsModel; import org.sleuthkit.autopsy.timeline.EventsModel;
import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.TimeLineController;
import org.sleuthkit.autopsy.timeline.ViewMode; import org.sleuthkit.autopsy.timeline.ViewMode;
import org.sleuthkit.autopsy.timeline.ui.AbstractTimelineChart; import org.sleuthkit.autopsy.timeline.ui.AbstractTimelineChart;
@ -374,7 +374,7 @@ public class CountsViewPane extends AbstractTimelineChart<String, Number, Node,
if (isCancelled()) { if (isCancelled()) {
return null; return null;
} }
FilteredEventsModel eventsModel = getEventsModel(); EventsModel eventsModel = getEventsModel();
final RangeDivision rangeInfo = RangeDivision.getRangeDivision(eventsModel.getTimeRange(), TimeLineController.getJodaTimeZone()); final RangeDivision rangeInfo = RangeDivision.getRangeDivision(eventsModel.getTimeRange(), TimeLineController.getJodaTimeZone());
getChart().setRangeInfo(rangeInfo); //do we need this. It seems like a hack. getChart().setRangeInfo(rangeInfo); //do we need this. It seems like a hack.

View File

@ -50,7 +50,7 @@ import org.joda.time.Seconds;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.ColorUtilities; import org.sleuthkit.autopsy.coreutils.ColorUtilities;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.timeline.FilteredEventsModel; import org.sleuthkit.autopsy.timeline.EventsModel;
import org.sleuthkit.autopsy.timeline.PromptDialogManager; import org.sleuthkit.autopsy.timeline.PromptDialogManager;
import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.TimeLineController;
import org.sleuthkit.autopsy.timeline.ViewMode; import org.sleuthkit.autopsy.timeline.ViewMode;
@ -73,7 +73,7 @@ final class EventCountsChart extends StackedBarChart<String, Number> implements
private ContextMenu chartContextMenu; private ContextMenu chartContextMenu;
private final TimeLineController controller; private final TimeLineController controller;
private final FilteredEventsModel filteredEvents; private final EventsModel filteredEvents;
private IntervalSelector<? extends String> intervalSelector; private IntervalSelector<? extends String> intervalSelector;

View File

@ -53,7 +53,7 @@ import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.ThreadConfined; import org.sleuthkit.autopsy.coreutils.ThreadConfined;
import org.sleuthkit.autopsy.timeline.FXMLConstructor; import org.sleuthkit.autopsy.timeline.FXMLConstructor;
import org.sleuthkit.autopsy.timeline.FilteredEventsModel; import org.sleuthkit.autopsy.timeline.EventsModel;
import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.TimeLineController;
import org.sleuthkit.autopsy.timeline.ViewMode; import org.sleuthkit.autopsy.timeline.ViewMode;
import org.sleuthkit.autopsy.timeline.ui.AbstractTimelineChart; import org.sleuthkit.autopsy.timeline.ui.AbstractTimelineChart;
@ -61,7 +61,7 @@ import org.sleuthkit.autopsy.timeline.ui.detailview.datamodel.DetailViewEvent;
import org.sleuthkit.autopsy.timeline.ui.detailview.datamodel.DetailsViewModel; import org.sleuthkit.autopsy.timeline.ui.detailview.datamodel.DetailsViewModel;
import org.sleuthkit.autopsy.timeline.ui.detailview.datamodel.EventStripe; import org.sleuthkit.autopsy.timeline.ui.detailview.datamodel.EventStripe;
import org.sleuthkit.autopsy.timeline.utils.MappedList; import org.sleuthkit.autopsy.timeline.utils.MappedList;
import org.sleuthkit.autopsy.timeline.zooming.ZoomState; import org.sleuthkit.autopsy.timeline.zooming.EventsModelParams;
import org.sleuthkit.datamodel.TimelineLevelOfDetail; import org.sleuthkit.datamodel.TimelineLevelOfDetail;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
@ -99,7 +99,7 @@ final public class DetailViewPane extends AbstractTimelineChart<DateTime, EventS
* Local copy of the zoomState. Used to backout of a zoomState change * Local copy of the zoomState. Used to backout of a zoomState change
* without needing to requery/redraw the view. * without needing to requery/redraw the view.
*/ */
private ZoomState currentZoom; private EventsModelParams currentZoom;
private final DetailsViewModel detailsViewModel; private final DetailsViewModel detailsViewModel;
/** /**
@ -391,8 +391,8 @@ final public class DetailViewPane extends AbstractTimelineChart<DateTime, EventS
if (isCancelled()) { if (isCancelled()) {
return null; return null;
} }
FilteredEventsModel eventsModel = getEventsModel(); EventsModel eventsModel = getEventsModel();
ZoomState newZoom = eventsModel.getZoomState(); EventsModelParams newZoom = eventsModel.getModelParams();
//If the view doesn't need refreshing or if the ZoomState hasn't actually changed, just bail //If the view doesn't need refreshing or if the ZoomState hasn't actually changed, just bail
if (needsRefresh() == false && Objects.equals(currentZoom, newZoom)) { if (needsRefresh() == false && Objects.equals(currentZoom, newZoom)) {

View File

@ -43,7 +43,7 @@ import org.joda.time.DateTime;
import org.joda.time.Interval; import org.joda.time.Interval;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.ThreadConfined; import org.sleuthkit.autopsy.coreutils.ThreadConfined;
import org.sleuthkit.autopsy.timeline.FilteredEventsModel; import org.sleuthkit.autopsy.timeline.EventsModel;
import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.TimeLineController;
import org.sleuthkit.autopsy.timeline.actions.AddManualEvent; import org.sleuthkit.autopsy.timeline.actions.AddManualEvent;
import org.sleuthkit.autopsy.timeline.ui.IntervalSelector; import org.sleuthkit.autopsy.timeline.ui.IntervalSelector;
@ -138,7 +138,7 @@ final class DetailsChart extends Control implements TimeLineChart<DateTime> {
this.pinnedDateAxis = pinnedDateAxis; this.pinnedDateAxis = pinnedDateAxis;
this.selectedNodes = selectedNodes; this.selectedNodes = selectedNodes;
FilteredEventsModel eventsModel = getController().getEventsModel(); EventsModel eventsModel = getController().getEventsModel();
/* /*
* If the time range is changed, clear the guide line and the interval * If the time range is changed, clear the guide line and the interval
@ -147,7 +147,7 @@ final class DetailsChart extends Control implements TimeLineChart<DateTime> {
eventsModel.timeRangeProperty().addListener(observable -> clearTimeBasedUIElements()); eventsModel.timeRangeProperty().addListener(observable -> clearTimeBasedUIElements());
//if the view paramaters change, clear the selection //if the view paramaters change, clear the selection
eventsModel.zoomStateProperty().addListener(observable -> getSelectedNodes().clear()); eventsModel.modelParamsProperty().addListener(observable -> getSelectedNodes().clear());
} }
/** /**

View File

@ -58,7 +58,7 @@ import org.sleuthkit.autopsy.timeline.ui.detailview.datamodel.SingleDetailsViewE
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.SqlFilterState; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.SqlFilterState;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.DescriptionFilter; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.DescriptionFilter;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState;
import org.sleuthkit.autopsy.timeline.zooming.ZoomState; import org.sleuthkit.autopsy.timeline.zooming.EventsModelParams;
import org.sleuthkit.datamodel.TimelineLevelOfDetail; import org.sleuthkit.datamodel.TimelineLevelOfDetail;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TimelineEventType; import org.sleuthkit.datamodel.TimelineEventType;
@ -177,12 +177,12 @@ final class EventClusterNode extends MultiEventNodeBase<EventCluster, EventStrip
* to the type and description of this cluster by intersecting a new * to the type and description of this cluster by intersecting a new
* filter with the existing root filter. * filter with the existing root filter.
*/ */
RootFilterState subClusterFilter = eventsModel.getFilterState() RootFilterState subClusterFilter = eventsModel.getEventFilterState()
.intersect(new SqlFilterState<>( .intersect(new SqlFilterState<>(
new EventTypeFilter(getEventType()), true)); new EventTypeFilter(getEventType()), true));
final Interval subClusterSpan = new Interval(getStartMillis(), getEndMillis() + 1000); final Interval subClusterSpan = new Interval(getStartMillis(), getEndMillis() + 1000);
final TimelineEventType.HierarchyLevel eventTypeZoomLevel = eventsModel.getEventTypeZoom(); final TimelineEventType.HierarchyLevel eventTypeZoomLevel = eventsModel.getEventTypeZoom();
final ZoomState zoom = new ZoomState(subClusterSpan, eventTypeZoomLevel, subClusterFilter, getDescriptionLevel()); final EventsModelParams zoom = new EventsModelParams(subClusterSpan, eventTypeZoomLevel, subClusterFilter, getDescriptionLevel());
DescriptionFilter descriptionFilter = new DescriptionFilter(getEvent().getDescriptionLevel(), getDescription()); DescriptionFilter descriptionFilter = new DescriptionFilter(getEvent().getDescriptionLevel(), getDescription());
/* /*

View File

@ -67,7 +67,7 @@ import org.joda.time.DateTime;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.ThreadConfined; import org.sleuthkit.autopsy.coreutils.ThreadConfined;
import org.sleuthkit.autopsy.timeline.FilteredEventsModel; import org.sleuthkit.autopsy.timeline.EventsModel;
import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.TimeLineController;
import org.sleuthkit.autopsy.timeline.events.TagsAddedEvent; import org.sleuthkit.autopsy.timeline.events.TagsAddedEvent;
import org.sleuthkit.autopsy.timeline.events.TagsDeletedEvent; import org.sleuthkit.autopsy.timeline.events.TagsDeletedEvent;
@ -126,7 +126,7 @@ public abstract class EventNodeBase<Type extends DetailViewEvent> extends StackP
final HBox controlsHBox = new HBox(5); final HBox controlsHBox = new HBox(5);
final HBox infoHBox = new HBox(5, eventTypeImageView, hashIV, tagIV, descrLabel, countLabel, controlsHBox); final HBox infoHBox = new HBox(5, eventTypeImageView, hashIV, tagIV, descrLabel, countLabel, controlsHBox);
final SleuthkitCase sleuthkitCase; final SleuthkitCase sleuthkitCase;
final FilteredEventsModel eventsModel; final EventsModel eventsModel;
private Timeline timeline; private Timeline timeline;
private Button pinButton; private Button pinButton;
private final Border SELECTION_BORDER; private final Border SELECTION_BORDER;

View File

@ -45,13 +45,13 @@ import org.joda.time.DateTimeZone;
import org.joda.time.Interval; import org.joda.time.Interval;
import org.joda.time.Period; import org.joda.time.Period;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.timeline.FilteredEventsModel; import org.sleuthkit.autopsy.timeline.EventsModel;
import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.TimeLineController;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.UIFilter; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.UIFilter;
import org.sleuthkit.autopsy.timeline.utils.CacheLoaderImpl; import org.sleuthkit.autopsy.timeline.utils.CacheLoaderImpl;
import org.sleuthkit.autopsy.timeline.utils.RangeDivision; import org.sleuthkit.autopsy.timeline.utils.RangeDivision;
import org.sleuthkit.autopsy.timeline.zooming.TimeUnits; import org.sleuthkit.autopsy.timeline.zooming.TimeUnits;
import org.sleuthkit.autopsy.timeline.zooming.ZoomState; import org.sleuthkit.autopsy.timeline.zooming.EventsModelParams;
import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TimelineManager; import org.sleuthkit.datamodel.TimelineManager;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
@ -68,12 +68,12 @@ final public class DetailsViewModel {
private final static Logger logger = Logger.getLogger(DetailsViewModel.class.getName()); private final static Logger logger = Logger.getLogger(DetailsViewModel.class.getName());
private final FilteredEventsModel eventsModel; private final EventsModel eventsModel;
private final LoadingCache<ZoomState, List<TimelineEvent>> eventCache; private final LoadingCache<EventsModelParams, List<TimelineEvent>> eventCache;
private final TimelineManager eventManager; private final TimelineManager eventManager;
private final SleuthkitCase sleuthkitCase; private final SleuthkitCase sleuthkitCase;
public DetailsViewModel(FilteredEventsModel eventsModel) { public DetailsViewModel(EventsModel eventsModel) {
this.eventsModel = eventsModel; this.eventsModel = eventsModel;
this.eventManager = eventsModel.getEventManager(); this.eventManager = eventsModel.getEventManager();
this.sleuthkitCase = eventsModel.getSleuthkitCase(); this.sleuthkitCase = eventsModel.getSleuthkitCase();
@ -86,7 +86,7 @@ final public class DetailsViewModel {
} }
@Subscribe @Subscribe
void handleCacheInvalidation(FilteredEventsModel.CacheInvalidatedEvent event) { void handleCacheInvalidation(EventsModel.CacheInvalidatedEvent event) {
eventCache.invalidateAll(); eventCache.invalidateAll();
} }
@ -99,7 +99,7 @@ final public class DetailsViewModel {
* *
* @throws org.sleuthkit.datamodel.TskCoreException * @throws org.sleuthkit.datamodel.TskCoreException
*/ */
public List<EventStripe> getEventStripes(ZoomState zoom) throws TskCoreException { public List<EventStripe> getEventStripes(EventsModelParams zoom) throws TskCoreException {
return getEventStripes(UIFilter.getAllPassFilter(), zoom); return getEventStripes(UIFilter.getAllPassFilter(), zoom);
} }
@ -113,11 +113,11 @@ final public class DetailsViewModel {
* *
* @throws org.sleuthkit.datamodel.TskCoreException * @throws org.sleuthkit.datamodel.TskCoreException
*/ */
public List<EventStripe> getEventStripes(UIFilter uiFilter, ZoomState zoom) throws TskCoreException { public List<EventStripe> getEventStripes(UIFilter uiFilter, EventsModelParams zoom) throws TskCoreException {
DateTimeZone timeZone = TimeLineController.getJodaTimeZone(); DateTimeZone timeZone = TimeLineController.getJodaTimeZone();
//unpack params //unpack params
Interval timeRange = zoom.getTimeRange(); Interval timeRange = zoom.getTimeRange();
TimelineLevelOfDetail descriptionLOD = zoom.getDescriptionLOD(); TimelineLevelOfDetail descriptionLOD = zoom.getTimelineLOD();
//intermediate results //intermediate results
Map<TimelineEventType, SetMultimap< String, EventCluster>> eventClusters = new HashMap<>(); Map<TimelineEventType, SetMultimap< String, EventCluster>> eventClusters = new HashMap<>();
@ -156,10 +156,10 @@ final public class DetailsViewModel {
* @throws org.sleuthkit.datamodel.TskCoreException If there is an error * @throws org.sleuthkit.datamodel.TskCoreException If there is an error
* querying the db. * querying the db.
*/ */
private List<TimelineEvent> getEvents(ZoomState zoom, DateTimeZone timeZone) throws TskCoreException { private List<TimelineEvent> getEvents(EventsModelParams zoom, DateTimeZone timeZone) throws TskCoreException {
//unpack params //unpack params
Interval timeRange = zoom.getTimeRange(); Interval timeRange = zoom.getTimeRange();
TimelineFilter.RootFilter activeFilter = zoom.getFilterState().getActiveFilter(); TimelineFilter.RootFilter activeFilter = zoom.getEventFilterState().getActiveFilter();
return eventManager.getEvents(timeRange, activeFilter); return eventManager.getEvents(timeRange, activeFilter);
} }

View File

@ -40,7 +40,7 @@ import org.controlsfx.control.action.Action;
import org.controlsfx.control.action.ActionUtils; import org.controlsfx.control.action.ActionUtils;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.timeline.FXMLConstructor; import org.sleuthkit.autopsy.timeline.FXMLConstructor;
import org.sleuthkit.autopsy.timeline.FilteredEventsModel; import org.sleuthkit.autopsy.timeline.EventsModel;
import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.TimeLineController;
import org.sleuthkit.autopsy.timeline.actions.ResetFilters; import org.sleuthkit.autopsy.timeline.actions.ResetFilters;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.DescriptionFilterState; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.DescriptionFilterState;
@ -79,7 +79,7 @@ final public class FilterSetPanel extends BorderPane {
@FXML @FXML
private SplitPane splitPane; private SplitPane splitPane;
private final FilteredEventsModel filteredEvents; private final EventsModel filteredEvents;
private final TimeLineController controller; private final TimeLineController controller;
/** /**
@ -115,16 +115,16 @@ final public class FilterSetPanel extends BorderPane {
legendColumn.setCellFactory(col -> new LegendCell(this.controller)); legendColumn.setCellFactory(col -> new LegendCell(this.controller));
//type is the only filter expanded initialy //type is the only filter expanded initialy
expansionMap.put(filteredEvents.getFilterState().getFilter(), true); expansionMap.put(filteredEvents.getEventFilterState().getFilter(), true);
expansionMap.put(filteredEvents.getFilterState().getEventTypeFilterState().getFilter(), true); expansionMap.put(filteredEvents.getEventFilterState().getEventTypeFilterState().getFilter(), true);
InvalidationListener applyFiltersListener = observable -> applyFilters(); InvalidationListener applyFiltersListener = observable -> applyFilters();
filteredEvents.eventTypeZoomProperty().addListener(applyFiltersListener); filteredEvents.eventTypesHierarchyLevelProperty().addListener(applyFiltersListener);
filteredEvents.descriptionLODProperty().addListener(applyFiltersListener); filteredEvents.descriptionLODProperty().addListener(applyFiltersListener);
filteredEvents.timeRangeProperty().addListener(applyFiltersListener); filteredEvents.timeRangeProperty().addListener(applyFiltersListener);
filteredEvents.filterProperty().addListener(observable -> refreshFilterUI()); filteredEvents.eventFilterProperty().addListener(observable -> refreshFilterUI());
refreshFilterUI(); refreshFilterUI();
hiddenDescriptionsListView.setItems(controller.getQuickHideFilters()); hiddenDescriptionsListView.setItems(controller.getQuickHideFilters());
@ -164,7 +164,7 @@ final public class FilterSetPanel extends BorderPane {
private void refreshFilterUI() { private void refreshFilterUI() {
Platform.runLater(() Platform.runLater(()
-> filterTreeTable.setRoot(new FilterTreeItem(filteredEvents.filterProperty().get().copyOf(), expansionMap))); -> filterTreeTable.setRoot(new FilterTreeItem(filteredEvents.eventFilterProperty().get().copyOf(), expansionMap)));
} }
private void applyFilters() { private void applyFilters() {

View File

@ -28,7 +28,7 @@ import javafx.scene.layout.HBox;
import javafx.scene.paint.Color; import javafx.scene.paint.Color;
import javafx.scene.shape.Rectangle; import javafx.scene.shape.Rectangle;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.timeline.FilteredEventsModel; import org.sleuthkit.autopsy.timeline.EventsModel;
import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.TimeLineController;
import org.sleuthkit.autopsy.timeline.ui.EventTypeUtils; import org.sleuthkit.autopsy.timeline.ui.EventTypeUtils;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState;
@ -46,7 +46,7 @@ final class LegendCell extends TreeTableCell<FilterState<?>, FilterState<?>> {
private final TimeLineController controller; private final TimeLineController controller;
private final FilteredEventsModel filteredEvents; private final EventsModel filteredEvents;
//We need a controller so we can listen to changes in EventTypeZoom to show/hide legends //We need a controller so we can listen to changes in EventTypeZoom to show/hide legends
LegendCell(TimeLineController controller) { LegendCell(TimeLineController controller) {
@ -75,7 +75,7 @@ final class LegendCell extends TreeTableCell<FilterState<?>, FilterState<?>> {
rect.setArcWidth(5); rect.setArcWidth(5);
rect.setStrokeWidth(3); rect.setStrokeWidth(3);
setLegendColor(filter, rect, this.filteredEvents.getEventTypeZoom()); setLegendColor(filter, rect, this.filteredEvents.getEventTypeZoom());
this.filteredEvents.eventTypeZoomProperty().addListener((obs, oldZoomLevel, newZoomLevel) -> { this.filteredEvents.eventTypesHierarchyLevelProperty().addListener((obs, oldZoomLevel, newZoomLevel) -> {
setLegendColor(filter, rect, newZoomLevel); setLegendColor(filter, rect, newZoomLevel);
}); });

View File

@ -28,7 +28,7 @@ import javafx.concurrent.Task;
import javafx.scene.Node; import javafx.scene.Node;
import org.joda.time.Interval; import org.joda.time.Interval;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.timeline.FilteredEventsModel; import org.sleuthkit.autopsy.timeline.EventsModel;
import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.TimeLineController;
import org.sleuthkit.autopsy.timeline.ViewMode; import org.sleuthkit.autopsy.timeline.ViewMode;
import org.sleuthkit.autopsy.timeline.events.ViewInTimelineRequestedEvent; import org.sleuthkit.autopsy.timeline.events.ViewInTimelineRequestedEvent;
@ -116,7 +116,7 @@ public class ListViewPane extends AbstractTimeLineView {
return null; return null;
} }
FilteredEventsModel eventsModel = getEventsModel(); EventsModel eventsModel = getEventsModel();
Set<Long> selectedEventIDs; Set<Long> selectedEventIDs;
TimeLineController controller = getController(); TimeLineController controller = getController();

View File

@ -29,7 +29,7 @@ import java.util.Map.Entry;
import java.util.Objects; import java.util.Objects;
import static java.util.stream.Collectors.groupingBy; import static java.util.stream.Collectors.groupingBy;
import org.joda.time.Interval; import org.joda.time.Interval;
import org.sleuthkit.autopsy.timeline.FilteredEventsModel; import org.sleuthkit.autopsy.timeline.EventsModel;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState;
import org.sleuthkit.datamodel.TimelineManager; import org.sleuthkit.datamodel.TimelineManager;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
@ -44,10 +44,10 @@ import org.sleuthkit.datamodel.TimelineLevelOfDetail;
*/ */
public class ListViewModel { public class ListViewModel {
private final FilteredEventsModel eventsModel; private final EventsModel eventsModel;
private final TimelineManager eventManager; private final TimelineManager eventManager;
public ListViewModel(FilteredEventsModel eventsModel) { public ListViewModel(EventsModel eventsModel) {
this.eventsModel = eventsModel; this.eventsModel = eventsModel;
this.eventManager = eventsModel.getEventManager(); this.eventManager = eventsModel.getEventManager();
} }
@ -63,7 +63,7 @@ public class ListViewModel {
* @throws org.sleuthkit.datamodel.TskCoreException * @throws org.sleuthkit.datamodel.TskCoreException
*/ */
public List<CombinedEvent> getCombinedEvents() throws TskCoreException { public List<CombinedEvent> getCombinedEvents() throws TskCoreException {
return getCombinedEvents(eventsModel.getTimeRange(), eventsModel.getFilterState()); return getCombinedEvents(eventsModel.getTimeRange(), eventsModel.getEventFilterState());
} }
/** /**

View File

@ -0,0 +1,135 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2013-2019 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.timeline.zooming;
import java.util.Objects;
import org.joda.time.Interval;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState;
import org.sleuthkit.datamodel.TimelineEventType;
import org.sleuthkit.datamodel.TimelineLevelOfDetail;
/**
* A container that bundles the user-specified parameters for the events model
* so that they can be passed around and saved as mementos to support a
* navigable (forwards-backwards) history feature for the events model.
*/
final public class EventsModelParams {
private final Interval timeRange;
private final TimelineEventType.HierarchyLevel eventTypesHierarchyLevel;
private final RootFilterState eventFilterState;
private final TimelineLevelOfDetail timelineLOD;
public EventsModelParams(Interval timeRange, TimelineEventType.HierarchyLevel eventTypesHierarchyLevel, RootFilterState eventFilterState, TimelineLevelOfDetail timelineLOD) {
this.timeRange = timeRange;
this.eventTypesHierarchyLevel = eventTypesHierarchyLevel;
this.eventFilterState = eventFilterState;
this.timelineLOD = timelineLOD;
}
public Interval getTimeRange() {
return timeRange;
}
public TimelineEventType.HierarchyLevel getEventTypesHierarchyLevel() {
return eventTypesHierarchyLevel;
}
public RootFilterState getEventFilterState() {
return eventFilterState;
}
public TimelineLevelOfDetail getTimelineLOD() {
return timelineLOD;
}
public EventsModelParams withTimeAndType(Interval timeRange, TimelineEventType.HierarchyLevel zoomLevel) {
return new EventsModelParams(timeRange, zoomLevel, eventFilterState, timelineLOD);
}
public EventsModelParams withTypeZoomLevel(TimelineEventType.HierarchyLevel zoomLevel) {
return new EventsModelParams(timeRange, zoomLevel, eventFilterState, timelineLOD);
}
public EventsModelParams withTimeRange(Interval timeRange) {
return new EventsModelParams(timeRange, eventTypesHierarchyLevel, eventFilterState, timelineLOD);
}
public EventsModelParams withDescrLOD(TimelineLevelOfDetail descrLOD) {
return new EventsModelParams(timeRange, eventTypesHierarchyLevel, eventFilterState, descrLOD);
}
public EventsModelParams withFilterState(RootFilterState filter) {
return new EventsModelParams(timeRange, eventTypesHierarchyLevel, filter, timelineLOD);
}
public boolean hasFilterState(RootFilterState filterSet) {
return this.eventFilterState.equals(filterSet);
}
public boolean hasTypeZoomLevel(TimelineEventType.HierarchyLevel typeZoom) {
return this.eventTypesHierarchyLevel.equals(typeZoom);
}
public boolean hasTimeRange(Interval timeRange) {
return this.timeRange != null && this.timeRange.equals(timeRange);
}
public boolean hasDescrLOD(TimelineLevelOfDetail newLOD) {
return this.timelineLOD.equals(newLOD);
}
@Override
public int hashCode() {
int hash = 3;
hash = 97 * hash + Objects.hashCode(this.timeRange.getStartMillis());
hash = 97 * hash + Objects.hashCode(this.timeRange.getEndMillis());
hash = 97 * hash + Objects.hashCode(this.eventTypesHierarchyLevel);
hash = 97 * hash + Objects.hashCode(this.eventFilterState);
hash = 97 * hash + Objects.hashCode(this.timelineLOD);
return hash;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final EventsModelParams other = (EventsModelParams) obj;
if (!Objects.equals(this.timeRange, other.getTimeRange())) {
return false;
}
if (this.eventTypesHierarchyLevel != other.getEventTypesHierarchyLevel()) {
return false;
}
if (this.eventFilterState.equals(other.getEventFilterState()) == false) {
return false;
}
return this.timelineLOD == other.getTimelineLOD();
}
@Override
public String toString() {
return "ZoomState{" + "timeRange=" + timeRange + ", typeZoomLevel=" + eventTypesHierarchyLevel + ", filter=" + eventFilterState.getActiveFilter().toString() + ", descrLOD=" + timelineLOD + '}'; //NON-NLS
}
}

View File

@ -34,7 +34,7 @@ import org.controlsfx.control.Notifications;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.timeline.FXMLConstructor; import org.sleuthkit.autopsy.timeline.FXMLConstructor;
import org.sleuthkit.autopsy.timeline.FilteredEventsModel; import org.sleuthkit.autopsy.timeline.EventsModel;
import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.TimeLineController;
import org.sleuthkit.autopsy.timeline.ViewMode; import org.sleuthkit.autopsy.timeline.ViewMode;
import org.sleuthkit.autopsy.timeline.utils.RangeDivision; import org.sleuthkit.autopsy.timeline.utils.RangeDivision;
@ -72,7 +72,7 @@ public class ZoomSettingsPane extends TitledPane {
private Slider timeUnitSlider; private Slider timeUnitSlider;
private final TimeLineController controller; private final TimeLineController controller;
private final FilteredEventsModel filteredEvents; private final EventsModel filteredEvents;
/** /**
* Constructor * Constructor
@ -97,7 +97,7 @@ public class ZoomSettingsPane extends TitledPane {
typeZoomSlider.setMax(TimelineEventType.HierarchyLevel.values().length - 1); typeZoomSlider.setMax(TimelineEventType.HierarchyLevel.values().length - 1);
configureSliderListeners(typeZoomSlider, configureSliderListeners(typeZoomSlider,
controller::pushEventTypeZoom, controller::pushEventTypeZoom,
filteredEvents.eventTypeZoomProperty(), filteredEvents.eventTypesHierarchyLevelProperty(),
TimelineEventType.HierarchyLevel.class, TimelineEventType.HierarchyLevel.class,
TimelineEventType.HierarchyLevel::ordinal, TimelineEventType.HierarchyLevel::ordinal,
Function.identity()); Function.identity());

View File

@ -1,138 +0,0 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2013-18 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.timeline.zooming;
import java.util.Objects;
import org.joda.time.Interval;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState;
import org.sleuthkit.datamodel.TimelineEvent;
import org.sleuthkit.datamodel.TimelineEventType;
import org.sleuthkit.datamodel.TimelineLevelOfDetail;
/**
* This class encapsulates all the zoom(and filter) parameters into one object
* for passing around and as a memento of the zoom/filter state.
*/
final public class ZoomState {
private final Interval timeRange;
private final TimelineEventType.HierarchyLevel typeZoomLevel;
private final RootFilterState filter;
private final TimelineLevelOfDetail descrLOD;
public Interval getTimeRange() {
return timeRange;
}
public TimelineEventType.HierarchyLevel getTypeZoomLevel() {
return typeZoomLevel;
}
public RootFilterState getFilterState() {
return filter;
}
public TimelineLevelOfDetail getDescriptionLOD() {
return descrLOD;
}
public ZoomState(Interval timeRange, TimelineEventType.HierarchyLevel zoomLevel, RootFilterState filter, TimelineLevelOfDetail descrLOD) {
this.timeRange = timeRange;
this.typeZoomLevel = zoomLevel;
this.filter = filter;
this.descrLOD = descrLOD;
}
public ZoomState withTimeAndType(Interval timeRange, TimelineEventType.HierarchyLevel zoomLevel) {
return new ZoomState(timeRange, zoomLevel, filter, descrLOD);
}
public ZoomState withTypeZoomLevel(TimelineEventType.HierarchyLevel zoomLevel) {
return new ZoomState(timeRange, zoomLevel, filter, descrLOD);
}
public ZoomState withTimeRange(Interval timeRange) {
return new ZoomState(timeRange, typeZoomLevel, filter, descrLOD);
}
public ZoomState withDescrLOD(TimelineLevelOfDetail descrLOD) {
return new ZoomState(timeRange, typeZoomLevel, filter, descrLOD);
}
public ZoomState withFilterState(RootFilterState filter) {
return new ZoomState(timeRange, typeZoomLevel, filter, descrLOD);
}
public boolean hasFilterState(RootFilterState filterSet) {
return this.filter.equals(filterSet);
}
public boolean hasTypeZoomLevel(TimelineEventType.HierarchyLevel typeZoom) {
return this.typeZoomLevel.equals(typeZoom);
}
public boolean hasTimeRange(Interval timeRange) {
return this.timeRange != null && this.timeRange.equals(timeRange);
}
public boolean hasDescrLOD(TimelineLevelOfDetail newLOD) {
return this.descrLOD.equals(newLOD);
}
@Override
public int hashCode() {
int hash = 3;
hash = 97 * hash + Objects.hashCode(this.timeRange.getStartMillis());
hash = 97 * hash + Objects.hashCode(this.timeRange.getEndMillis());
hash = 97 * hash + Objects.hashCode(this.typeZoomLevel);
hash = 97 * hash + Objects.hashCode(this.filter);
hash = 97 * hash + Objects.hashCode(this.descrLOD);
return hash;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final ZoomState other = (ZoomState) obj;
if (!Objects.equals(this.timeRange, other.timeRange)) {
return false;
}
if (this.typeZoomLevel != other.typeZoomLevel) {
return false;
}
if (this.filter.equals(other.filter) == false) {
return false;
}
return this.descrLOD == other.descrLOD;
}
@Override
public String toString() {
return "ZoomState{" + "timeRange=" + timeRange + ", typeZoomLevel=" + typeZoomLevel + ", filter=" + filter.getActiveFilter().toString() + ", descrLOD=" + descrLOD + '}'; //NON-NLS
}
}