diff --git a/Core/src/org/sleuthkit/autopsy/timeline/FilteredEventsModel.java b/Core/src/org/sleuthkit/autopsy/timeline/FilteredEventsModel.java index 2796255350..da9e56c648 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/FilteredEventsModel.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/FilteredEventsModel.java @@ -22,7 +22,6 @@ import com.google.common.cache.CacheBuilder; import com.google.common.cache.LoadingCache; import com.google.common.collect.ImmutableList; import com.google.common.eventbus.EventBus; -import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; @@ -40,7 +39,6 @@ import javafx.collections.ObservableList; import javafx.collections.ObservableMap; import javafx.collections.ObservableSet; import static org.apache.commons.collections4.CollectionUtils.emptyIfNull; -import static org.apache.commons.lang3.StringUtils.isBlank; import org.joda.time.DateTimeZone; import org.joda.time.Interval; import org.openide.util.NbBundle; @@ -61,7 +59,6 @@ import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.TagsFilterState; import org.sleuthkit.autopsy.timeline.utils.CacheLoaderImpl; -import org.sleuthkit.autopsy.timeline.utils.CheckedFunction; import org.sleuthkit.autopsy.timeline.utils.FilterUtils; import org.sleuthkit.autopsy.timeline.zooming.ZoomState; import org.sleuthkit.datamodel.AbstractFile; @@ -163,7 +160,7 @@ public final class FilteredEventsModel { syncFilters(rootFilter); requestedFilter.set(rootFilter.copyOf()); }; - + datasourcesMap.addListener(filterSyncListener); hashSets.addListener(filterSyncListener); tagNames.addListener(filterSyncListener); @@ -244,7 +241,7 @@ public final class FilteredEventsModel { SleuthkitCase skCase = autoCase.getSleuthkitCase(); hashSets.addAll(eventManager.getHashSetNames()); Set dataSourceIDs = eventManager.getDataSourceIDs(); - + //because there is no way to remove a datasource we only add to this map. for (Long id : dataSourceIDs) { try { @@ -271,18 +268,18 @@ public final class FilteredEventsModel { */ public void syncFilters(RootFilterState rootFilterState) { TagsFilterState tagsFilterState = rootFilterState.getTagsFilterState(); - for (TagName tagName : tagNames) { + for (TagName tagName : tagNames) { tagsFilterState.getFilter().addSubFilter(new TagNameFilter(tagName)); } for (FilterState tagFilterState : rootFilterState.getTagsFilterState().getSubFilterStates()) { tagFilterState.setDisabled(tagNames.contains(tagFilterState.getFilter().getTagName()) == false); } - + DataSourcesFilter dataSourcesFilter = rootFilterState.getDataSourcesFilterState().getFilter(); for (Map.Entry entry : datasourcesMap.entrySet()) { dataSourcesFilter.addSubFilter(new DataSourceFilter(entry.getValue().getName(), entry.getKey())); } - + HashHitsFilter hashSetsFilter = rootFilterState.getHashHitsFilterState().getFilter(); for (String hashSet : hashSets) { hashSetsFilter.addSubFilter(new HashSetFilter(hashSet)); @@ -404,16 +401,16 @@ public final class FilteredEventsModel { return eventManager.getTagCountsByTagName(eventIDsWithTags); } - public List getEventIDs(Interval timeRange, TimelineFilter filter) throws TskCoreException { + public List getEventIDs(Interval timeRange, FilterState filter) throws TskCoreException { final Interval overlap; - RootFilterState intersection; + RootFilter intersection; synchronized (this) { overlap = getSpanningInterval().overlap(timeRange); - intersection = getFilterState().intersect(filter); + intersection = getFilterState().intersect(filter).getActiveFilter(); } - return eventManager.getEventIDs(overlap, intersection.getActiveFilter()); + return eventManager.getEventIDs(overlap, intersection); } /** @@ -680,8 +677,8 @@ public final class FilteredEventsModel { public synchronized void invalidateCaches(Collection updatedEventIDs) throws TskCoreException { minCache.invalidateAll(); maxCache.invalidateAll(); - idToEventCache.invalidateAll(emptyIfNull(updatedEventIDs)); - eventCountsCache.invalidateAll(); + idToEventCache.invalidateAll(emptyIfNull(updatedEventIDs)); + eventCountsCache.invalidateAll(); populateFilterData(); @@ -698,33 +695,4 @@ public final class FilteredEventsModel { private CacheInvalidatedEvent() { } } - - /** - * take the result of a group_concat SQLite operation and split it into a - * set of X using the mapper to to convert from string to X If groupConcat - * is empty, null, or all whitespace, returns an empty list. - * - * @param the type of elements to return - * @param groupConcat a string containing the group_concat result ( a comma - * separated list) - * @param mapper a function from String to X - * - * @return a Set of X, each element mapped from one element of the original - * comma delimited string - * - * @throws org.sleuthkit.datamodel.TskCoreException - */ - public static List unGroupConcat(String groupConcat, CheckedFunction mapper) throws TskCoreException { - - if (isBlank(groupConcat)) { - return Collections.emptyList(); - } - - List result = new ArrayList<>(); - String[] split = groupConcat.split(","); - for (String s : split) { - result.add(mapper.apply(s)); - } - return result; - } } diff --git a/Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java b/Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java index 927704a3e1..56b8501c26 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java @@ -79,7 +79,8 @@ import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.autopsy.timeline.events.TimelineEventAddedEvent; import org.sleuthkit.autopsy.timeline.events.ViewInTimelineRequestedEvent; import org.sleuthkit.autopsy.timeline.ui.detailview.datamodel.DetailViewEvent; -import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState; +import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.DefaultFilterState; +import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.DescriptionFilterState; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState; import org.sleuthkit.autopsy.timeline.utils.IntervalUtils; import org.sleuthkit.autopsy.timeline.zooming.TimeUnits; @@ -90,7 +91,6 @@ import org.sleuthkit.datamodel.DescriptionLoD; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.timeline.EventType; import org.sleuthkit.datamodel.timeline.EventTypeZoomLevel; -import org.sleuthkit.datamodel.timeline.TimelineFilter.DescriptionFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter.EventTypeFilter; /** @@ -157,9 +157,9 @@ public class TimeLineController { private final Case autoCase; @ThreadConfined(type = ThreadConfined.ThreadType.JFX) - private final ObservableList> quickHideFilters = FXCollections.observableArrayList(); + private final ObservableList quickHideFilters = FXCollections.observableArrayList(); - public ObservableList> getQuickHideFilters() { + public ObservableList getQuickHideFilters() { return quickHideFilters; } @@ -607,7 +607,7 @@ public class TimeLineController { @Override protected Collection< Long> call() throws Exception { synchronized (TimeLineController.this) { - return filteredEvents.getEventIDs(timeRange, new EventTypeFilter(type)); + return filteredEvents.getEventIDs(timeRange, new DefaultFilterState<>(new EventTypeFilter(type), true)); } } diff --git a/Core/src/org/sleuthkit/autopsy/timeline/images/action_log.png b/Core/src/org/sleuthkit/autopsy/timeline/images/action_log.png new file mode 100644 index 0000000000..e808cb04c6 Binary files /dev/null and b/Core/src/org/sleuthkit/autopsy/timeline/images/action_log.png differ diff --git a/Core/src/org/sleuthkit/autopsy/timeline/images/raw_access_logs.png b/Core/src/org/sleuthkit/autopsy/timeline/images/raw_access_logs.png new file mode 100644 index 0000000000..dc24d63d68 Binary files /dev/null and b/Core/src/org/sleuthkit/autopsy/timeline/images/raw_access_logs.png differ diff --git a/Core/src/org/sleuthkit/autopsy/timeline/images/registry.png b/Core/src/org/sleuthkit/autopsy/timeline/images/registry.png new file mode 100644 index 0000000000..2791abe45d Binary files /dev/null and b/Core/src/org/sleuthkit/autopsy/timeline/images/registry.png differ diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/EventTypeUtils.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/EventTypeUtils.java index fb7c94eb13..e2a0080597 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/EventTypeUtils.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/EventTypeUtils.java @@ -85,6 +85,10 @@ final public class EventTypeUtils { imageFileName = "message.png"; } else if (typeID == EventType.RECENT_DOCUMENTS.getTypeID()) { imageFileName = "recent_docs.png"; + } else if (typeID == EventType.REGISTRY.getTypeID()) { + imageFileName = "registry.png"; + } else if (typeID == EventType.LOG_ENTRY.getTypeID()) { + imageFileName = "raw_access_logs.png"; } else { imageFileName = "timeline_marker.png"; } diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/DetailViewPane.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/DetailViewPane.java index 1fef4c0107..3208d22600 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/DetailViewPane.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/DetailViewPane.java @@ -62,8 +62,8 @@ import org.sleuthkit.autopsy.timeline.ui.detailview.datamodel.DetailsViewModel; import org.sleuthkit.autopsy.timeline.ui.detailview.datamodel.EventStripe; import org.sleuthkit.autopsy.timeline.utils.MappedList; import org.sleuthkit.autopsy.timeline.zooming.ZoomState; -import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.DescriptionLoD; +import org.sleuthkit.datamodel.TskCoreException; /** * Controller class for a DetailsChart based implementation of a timeline view. @@ -402,7 +402,7 @@ final public class DetailViewPane extends AbstractTimelineChart eventStripes = detailsViewModel.getEventStripes(); + List eventStripes = detailsViewModel.getEventStripes(newZoom); final int size = eventStripes.size(); //if there are too many stipes show a confirmation dialog if (size > 2000) { diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/DetailsChartLane.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/DetailsChartLane.java index 08c315eac1..e3b59e8d1b 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/DetailsChartLane.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/DetailsChartLane.java @@ -57,9 +57,9 @@ import org.sleuthkit.autopsy.timeline.ui.detailview.datamodel.DetailViewEvent; import org.sleuthkit.autopsy.timeline.ui.detailview.datamodel.EventCluster; import org.sleuthkit.autopsy.timeline.ui.detailview.datamodel.EventStripe; import org.sleuthkit.autopsy.timeline.ui.detailview.datamodel.SingleDetailsViewEvent; +import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.DescriptionFilter; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState; -import org.sleuthkit.datamodel.timeline.TimelineFilter.DescriptionFilter; /** * One "lane" of a the details view, contains all the core logic and layout diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/EventClusterNode.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/EventClusterNode.java index d50a88df50..a73de49fb0 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/EventClusterNode.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/EventClusterNode.java @@ -55,13 +55,14 @@ import org.sleuthkit.autopsy.timeline.ui.detailview.datamodel.DetailViewEvent; import org.sleuthkit.autopsy.timeline.ui.detailview.datamodel.EventCluster; import org.sleuthkit.autopsy.timeline.ui.detailview.datamodel.EventStripe; import org.sleuthkit.autopsy.timeline.ui.detailview.datamodel.SingleDetailsViewEvent; +import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.DefaultFilterState; +import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.DescriptionFilter; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState; import org.sleuthkit.autopsy.timeline.zooming.ZoomState; import org.sleuthkit.datamodel.DescriptionLoD; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.timeline.EventTypeZoomLevel; import org.sleuthkit.datamodel.timeline.TimelineEvent; -import org.sleuthkit.datamodel.timeline.TimelineFilter.DescriptionFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter.EventTypeFilter; /** @@ -168,7 +169,7 @@ final class EventClusterNode extends MultiEventNodeBase( + new EventTypeFilter(getEventType()), true)); final Interval subClusterSpan = new Interval(getStartMillis(), getEndMillis() + 1000); - final EventTypeZoomLevel eventTypeZoomLevel = eventsModel.eventTypeZoomProperty().get(); + final EventTypeZoomLevel eventTypeZoomLevel = eventsModel.getEventTypeZoom(); final ZoomState zoom = new ZoomState(subClusterSpan, eventTypeZoomLevel, subClusterFilter, getDescriptionLoD()); + DescriptionFilter descriptionFilter = new DescriptionFilter(getEvent().getDescriptionLoD(), getDescription()); /* * task to load sub-stripes in a background thread */ @@ -199,7 +200,6 @@ final class EventClusterNode extends MultiEventNodeBase sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package org.sleuthkit.autopsy.timeline.ui.detailview; @@ -9,11 +22,9 @@ import javafx.scene.image.Image; import javafx.scene.image.ImageView; import org.controlsfx.control.action.Action; import org.openide.util.NbBundle; -import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.DefaultFilterState; -import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState; +import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.DescriptionFilter; +import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.DescriptionFilterState; import org.sleuthkit.datamodel.DescriptionLoD; -import org.sleuthkit.datamodel.timeline.TimelineFilter.DescriptionFilter; -import static org.sleuthkit.datamodel.timeline.TimelineFilter.DescriptionFilter.FilterMode.EXCLUDE; /** * An Action that hides, in the given chart, events that have the given @@ -39,12 +50,13 @@ class HideDescriptionAction extends Action { * as the new filter for the given description. Set the (new) filter * active. */ - final FilterState testFilter - = new DefaultFilterState<>( - new DescriptionFilter(descriptionLoD, description, EXCLUDE)); + final DescriptionFilterState testFilter + = new DescriptionFilterState( + new DescriptionFilter(descriptionLoD, description)); - FilterState descriptionFilter = chart.getController().getQuickHideFilters().stream() - .filter(testFilter::equals).findFirst() + DescriptionFilterState descriptionFilter = chart.getController().getQuickHideFilters().stream() + .filter(otherFilterState -> testFilter.getFilter().equals(otherFilterState.getFilter())) + .findFirst() .orElseGet(() -> { //if the selected state of the filter changes, do chart layout testFilter.selectedProperty().addListener(selectedProperty -> chart.requestLayout()); diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/UnhideDescriptionAction.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/UnhideDescriptionAction.java index 541a8e347e..d2d495287c 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/UnhideDescriptionAction.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/UnhideDescriptionAction.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2014-16 Basis Technology Corp. + * Copyright 2014-18 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -22,8 +22,8 @@ import javafx.scene.image.Image; import javafx.scene.image.ImageView; import org.controlsfx.control.action.Action; import org.openide.util.NbBundle; +import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.DescriptionFilter; import org.sleuthkit.datamodel.DescriptionLoD; -import org.sleuthkit.datamodel.timeline.TimelineFilter.DescriptionFilter; /** * An Action that un-hides, in the given chart, events with the given @@ -44,9 +44,9 @@ class UnhideDescriptionAction extends Action { * test one and checking all the existing filters against it. * Disable them. */ - final DescriptionFilter testFilter = new DescriptionFilter(descriptionLoD, description, DescriptionFilter.FilterMode.EXCLUDE); + final DescriptionFilter testFilter = new DescriptionFilter(descriptionLoD, description); chart.getController().getQuickHideFilters().stream() - .filter(testFilter::equals) + .filter(otherFilterState -> testFilter.equals(otherFilterState.getFilter())) .forEach(descriptionfilter -> descriptionfilter.setSelected(false)); }); } diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/datamodel/DetailViewEvent.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/datamodel/DetailViewEvent.java index 455260a619..ab4d479b7c 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/datamodel/DetailViewEvent.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/datamodel/DetailViewEvent.java @@ -18,6 +18,7 @@ */ package org.sleuthkit.autopsy.timeline.ui.detailview.datamodel; +import java.util.Comparator; import java.util.Optional; import java.util.Set; import java.util.SortedSet; @@ -119,4 +120,12 @@ public interface DetailViewEvent { * @return The EventClusters that make up this event. */ SortedSet getClusters(); + + static class StartComparator implements Comparator { + + @Override + public int compare(DetailViewEvent o1, DetailViewEvent o2) { + return Long.compare(o1.getStartMillis(), o2.getStartMillis()); + } + } } diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/datamodel/DetailsViewModel.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/datamodel/DetailsViewModel.java index 99e2c8282e..5fedfadca2 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/datamodel/DetailsViewModel.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/datamodel/DetailsViewModel.java @@ -25,13 +25,12 @@ import com.google.common.collect.SetMultimap; import com.google.common.eventbus.Subscribe; import java.sql.ResultSet; import java.sql.SQLException; -import java.time.temporal.ChronoUnit; import java.util.ArrayList; -import java.util.Comparator; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.logging.Level; @@ -42,12 +41,11 @@ import org.joda.time.Interval; import org.joda.time.Period; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.timeline.FilteredEventsModel; -import static org.sleuthkit.autopsy.timeline.FilteredEventsModel.unGroupConcat; import org.sleuthkit.autopsy.timeline.TimeLineController; -import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState; +import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.UIFilter; import org.sleuthkit.autopsy.timeline.utils.CacheLoaderImpl; import org.sleuthkit.autopsy.timeline.utils.RangeDivision; -import org.sleuthkit.autopsy.timeline.utils.TimelineDBUtils; +import org.sleuthkit.autopsy.timeline.zooming.TimeUnits; import org.sleuthkit.autopsy.timeline.zooming.ZoomState; import org.sleuthkit.datamodel.DescriptionLoD; import org.sleuthkit.datamodel.SleuthkitCase; @@ -55,6 +53,8 @@ import org.sleuthkit.datamodel.TimelineManager; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.timeline.EventType; import org.sleuthkit.datamodel.timeline.EventTypeZoomLevel; +import org.sleuthkit.datamodel.timeline.TimelineEvent; +import org.sleuthkit.datamodel.timeline.TimelineFilter; /** * Model for the Details View. Uses FilteredEventsModel as underlying datamodel @@ -65,7 +65,7 @@ final public class DetailsViewModel { private final static Logger logger = Logger.getLogger(DetailsViewModel.class.getName()); private final FilteredEventsModel eventsModel; - private final LoadingCache> eventStripeCache; + private final LoadingCache> eventCache; private final TimelineManager eventManager; private final SleuthkitCase sleuthkitCase; @@ -73,41 +73,21 @@ final public class DetailsViewModel { this.eventsModel = eventsModel; this.eventManager = eventsModel.getEventManager(); this.sleuthkitCase = eventsModel.getSleuthkitCase(); - eventStripeCache = CacheBuilder.newBuilder() + eventCache = CacheBuilder.newBuilder() .maximumSize(1000L) .expireAfterAccess(10, TimeUnit.MINUTES) - .build(new CacheLoaderImpl<>(params -> getEventStripes(params, TimeLineController.getJodaTimeZone()))); + .build(new CacheLoaderImpl<>(params + -> getEvents(params, TimeLineController.getJodaTimeZone()))); eventsModel.registerForEvents(this); } @Subscribe void handleCacheInvalidation(FilteredEventsModel.CacheInvalidatedEvent event) { - eventStripeCache.invalidateAll(); + eventCache.invalidateAll(); } /** - * - * @return a list of event clusters at the requested zoom levels that are - * within the requested time range and pass the requested filter - * - * @throws org.sleuthkit.datamodel.TskCoreException - */ - public List getEventStripes() throws TskCoreException { - final Interval range; - final RootFilterState filter; - final EventTypeZoomLevel zoom; - final DescriptionLoD lod; - synchronized (this) { - range = eventsModel.getTimeRange(); - filter = eventsModel.getFilterState(); - zoom = eventsModel.getEventTypeZoom(); - lod = eventsModel.getDescriptionLOD(); - } - return getEventStripes(new ZoomState(range, zoom, filter, lod)); - } - - /** - * @param params + * @param zoom * * @return a list of aggregated events that are within the requested time * range and pass the requested filter, using the given aggregation @@ -115,11 +95,42 @@ final public class DetailsViewModel { * * @throws org.sleuthkit.datamodel.TskCoreException */ - public List getEventStripes(ZoomState params) throws TskCoreException { + public List getEventStripes(ZoomState zoom) throws TskCoreException { + return getEventStripes(UIFilter.getAllPassFilter(), zoom); + } + + /** + * @param zoom + * + * @return a list of aggregated events that are within the requested time + * range and pass the requested filter, using the given aggregation + * to control the grouping of events + * + * @throws org.sleuthkit.datamodel.TskCoreException + */ + public List getEventStripes(UIFilter uiFilter, ZoomState zoom) throws TskCoreException { + DateTimeZone timeZone = TimeLineController.getJodaTimeZone(); + //unpack params + Interval timeRange = zoom.getTimeRange(); + DescriptionLoD descriptionLOD = zoom.getDescriptionLOD(); + EventTypeZoomLevel typeZoomLevel = zoom.getTypeZoomLevel(); + + //intermediate results + Map> eventClusters = new HashMap<>(); try { - return eventStripeCache.get(params); + eventCache.get(zoom).stream() + .filter(uiFilter) + .forEach(event -> { + EventType clusterType = event.getEventType(typeZoomLevel); + eventClusters.computeIfAbsent(clusterType, eventType -> HashMultimap.create()) + .put(event.getDescription(descriptionLOD), new EventCluster(event, clusterType, descriptionLOD)); + }); + //get some info about the time range requested + TimeUnits periodSize = RangeDivision.getRangeDivision(timeRange, timeZone).getPeriodSize(); + return mergeClustersToStripes(periodSize.toUnitPeriod(), eventClusters); + } catch (ExecutionException ex) { - throw new TskCoreException("Failed to load Event Stripes from cache for " + params.toString(), ex); //NON-NLS + throw new TskCoreException("Failed to load Event Stripes from cache for " + zoom.toString(), ex); //NON-NLS } } @@ -138,12 +149,10 @@ final public class DetailsViewModel { * @throws org.sleuthkit.datamodel.TskCoreException If there is an error * querying the db. */ - public List getEventStripes(ZoomState zoom, DateTimeZone timeZone) throws TskCoreException { + List getEvents(ZoomState zoom, DateTimeZone timeZone) throws TskCoreException { //unpack params Interval timeRange = zoom.getTimeRange(); - RootFilterState filterState = zoom.getFilterState(); - DescriptionLoD descriptionLOD = zoom.getDescriptionLOD(); - EventTypeZoomLevel typeZoomLevel = zoom.getTypeZoomLevel(); + TimelineFilter.RootFilter activeFilter = zoom.getFilterState().getActiveFilter(); long start = timeRange.getStartMillis() / 1000; long end = timeRange.getEndMillis() / 1000; @@ -151,101 +160,98 @@ final public class DetailsViewModel { //ensure length of querried interval is not 0 end = Math.max(end, start + 1); - //get some info about the time range requested - RangeDivision rangeInfo = RangeDivision.getRangeDivision(timeRange, timeZone); - //build dynamic parts of query - String descriptionColumn = eventManager.getDescriptionColumn(descriptionLOD); - final boolean useSubTypes = typeZoomLevel.equals(EventTypeZoomLevel.SUB_TYPE); - String typeColumn = TimelineManager.typeColumnHelper(useSubTypes); + String querySql = "SELECT time, file_obj_id, data_source_obj_id, artifact_id, " // NON-NLS + + " event_id, " //NON-NLS + + " hash_hit, " //NON-NLS + + " tagged, " //NON-NLS + + " sub_type, base_type, " + + " full_description, med_description, short_description " // NON-NLS + + " FROM " + TimelineManager.getAugmentedEventsTablesSQL(activeFilter) // NON-NLS + + " WHERE time >= " + start + " AND time < " + end + " AND " + eventManager.getSQLWhere(activeFilter) // NON-NLS + + " ORDER BY time"; // NON-NLS - TimelineDBUtils dbUtils = new TimelineDBUtils(sleuthkitCase); - String querySql = "SELECT " + formatTimeFunctionHelper(rangeInfo.getPeriodSize().toChronoUnit(), timeZone) + " AS interval, " // NON-NLS - + dbUtils.csvAggFunction("tsk_events.event_id") + " as event_ids, " //NON-NLS - + dbUtils.csvAggFunction("CASE WHEN hash_hit = 1 THEN tsk_events.event_id ELSE NULL END") + " as hash_hits, " //NON-NLS - + dbUtils.csvAggFunction("CASE WHEN tagged = 1 THEN tsk_events.event_id ELSE NULL END") + " as taggeds, " //NON-NLS - + " min(time) AS minTime, max(time) AS maxTime, " + typeColumn + ", " + descriptionColumn // NON-NLS - + " FROM " + TimelineManager.getAugmentedEventsTablesSQL(filterState.getActiveFilter()) // NON-NLS - + " WHERE time >= " + start + " AND time < " + end + " AND " + eventManager.getSQLWhere(filterState.getActiveFilter()) // NON-NLS - + " GROUP BY interval, " + typeColumn + " , " + descriptionColumn // NON-NLS - + " ORDER BY min(time)"; // NON-NLS - - // perform query and map results to AggregateEvent objects - List events = new ArrayList<>(); + List events = new ArrayList<>(); try (SleuthkitCase.CaseDbQuery dbQuery = sleuthkitCase.executeQuery(querySql); ResultSet resultSet = dbQuery.getResultSet();) { while (resultSet.next()) { - events.add(eventClusterHelper(resultSet, typeColumn, descriptionLOD, timeZone)); + events.add(eventHelper(resultSet)); } + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, "Failed to get events with query: " + querySql, ex); // NON-NLS + throw ex; } catch (SQLException ex) { logger.log(Level.SEVERE, "Failed to get events with query: " + querySql, ex); // NON-NLS + throw new TskCoreException("Failed to get events with query: " + querySql, ex); } - - return mergeClustersToStripes(rangeInfo.getPeriodSize().toUnitPeriod(), events); + return events; } /** - * map a single row in a ResultSet to an EventCluster + * Map a single row in a ResultSet to an EventCluster * * @param resultSet the result set whose current row should be mapped - * @param useSubTypes use the sub_type column if true, else use the - * base_type column + * @param typeColumn The type column (sub_type or base_type) to use as + * the type of the event cluster * @param descriptionLOD the description level of detail for this event - * @param filter + * cluster * - * @return an AggregateEvent corresponding to the current row in the given + * @return an EventCluster corresponding to the current row in the given * result set * * @throws SQLException */ - private EventCluster eventClusterHelper(ResultSet resultSet, String typeColumn, DescriptionLoD descriptionLOD, DateTimeZone timeZone) throws SQLException, TskCoreException { - Interval interval = new Interval(resultSet.getLong("minTime") * 1000, resultSet.getLong("maxTime") * 1000, timeZone); + private TimelineEvent eventHelper(ResultSet resultSet) throws SQLException, TskCoreException { - List eventIDs = unGroupConcat(resultSet.getString("event_ids"), Long::valueOf); // NON-NLS - String description = resultSet.getString(eventManager.getDescriptionColumn(descriptionLOD)); - int eventTypeID = resultSet.getInt(typeColumn); + //the event tyepe to use to get the description. + int eventTypeID = resultSet.getInt("sub_type"); EventType eventType = eventManager.getEventType(eventTypeID).orElseThrow(() -> new TskCoreException("Error mapping event type id " + eventTypeID + "to EventType."));//NON-NLS - List hashHits = unGroupConcat(resultSet.getString("hash_hits"), Long::valueOf); //NON-NLS - List tagged = unGroupConcat(resultSet.getString("taggeds"), Long::valueOf); //NON-NLS - - return new EventCluster(interval, eventType, eventIDs, hashHits, tagged, description, descriptionLOD); + return new TimelineEvent( + resultSet.getLong("event_id"), // NON-NLS + resultSet.getLong("data_source_obj_id"), // NON-NLS + resultSet.getLong("file_obj_id"), // NON-NLS + resultSet.getLong("artifact_id"), // NON-NLS + resultSet.getLong("time"), // NON-NLS + eventType, + eventType.getDescription( + resultSet.getString("full_description"), // NON-NLS + resultSet.getString("med_description"), // NON-NLS + resultSet.getString("short_description")), // NON-NLS + resultSet.getInt("hash_hit") != 0, //NON-NLS + resultSet.getInt("tagged") != 0); } /** - * merge the events in the given list if they are within the same period + * Merge the events in the given list if they are within the same period * General algorithm is as follows: * - * 1) sort them into a map from (type, description)-> List + * 1) sort them into a map from (type, description)-> List * 2) for each key in map, merge the events and accumulate them in a list to * return * * @param timeUnitLength - * @param preMergedEvents + * @param eventClusters * * @return */ - static private List mergeClustersToStripes(Period timeUnitLength, List preMergedEvents) { + static private List mergeClustersToStripes(Period timeUnitLength, Map> eventClusters) { - //effectively map from type to (map from description to events) - Map> typeMap = new HashMap<>(); - - for (EventCluster aggregateEvent : preMergedEvents) { - typeMap.computeIfAbsent(aggregateEvent.getEventType(), eventType -> HashMultimap.create()) - .put(aggregateEvent.getDescription(), aggregateEvent); - } //result list to return - ArrayList aggEvents = new ArrayList<>(); + ArrayList mergedClusters = new ArrayList<>(); //For each (type, description) key, merge agg events - for (SetMultimap descrMap : typeMap.values()) { + for (Map.Entry> typeMapEntry : eventClusters.entrySet()) { + EventType type = typeMapEntry.getKey(); + SetMultimap descrMap = typeMapEntry.getValue(); //for each description ... for (String descr : descrMap.keySet()) { + Set events = descrMap.get(descr); //run through the sorted events, merging together adjacent events - Iterator iterator = descrMap.get(descr).stream() - .sorted(Comparator.comparing(event -> event.getSpan().getStartMillis())) + Iterator iterator = events.stream() + .sorted(new DetailViewEvent.StartComparator()) .iterator(); EventCluster current = iterator.next(); while (iterator.hasNext()) { @@ -259,106 +265,25 @@ final public class DetailsViewModel { current = EventCluster.merge(current, next); } else { //done merging into current, set next as new current - aggEvents.add(current); + mergedClusters.add(current); current = next; } } - aggEvents.add(current); + mergedClusters.add(current); } } //merge clusters to stripes Map, EventStripe> stripeDescMap = new HashMap<>(); - for (EventCluster eventCluster : aggEvents) { + for (EventCluster eventCluster : mergedClusters) { stripeDescMap.merge(ImmutablePair.of(eventCluster.getEventType(), eventCluster.getDescription()), new EventStripe(eventCluster), EventStripe::merge); } - return stripeDescMap.values().stream().sorted(Comparator.comparing(EventStripe::getStartMillis)).collect(Collectors.toList()); + return stripeDescMap.values().stream() + .sorted(new DetailViewEvent.StartComparator()) + .collect(Collectors.toList()); } - /** - * Get a column specification that will allow us to group by the requested - * period size. That is, with all info more granular than that requested - * dropped (replaced with zeros). For use in the select clause of a sql - * query. - * - * @param periodSize The ChronoUnit describing what granularity to use. - * @param timeZone - * - * @return - */ - private String formatTimeFunctionHelper(ChronoUnit periodSize, DateTimeZone timeZone) { - switch (sleuthkitCase.getDatabaseType()) { - case SQLITE: - String strfTimeFormat = getSQLIteTimeFormat(periodSize); - String useLocalTime = timeZone.equals(DateTimeZone.getDefault()) ? ", 'localtime'" : ""; // NON-NLS - return "strftime('" + strfTimeFormat + "', time , 'unixepoch'" + useLocalTime + ")"; - case POSTGRESQL: - String formatString = getPostgresTimeFormat(periodSize); - return "to_char(to_timestamp(time) AT TIME ZONE '" + timeZone.getID() + "', '" + formatString + "')"; - default: - throw new UnsupportedOperationException("Unsupported DB type: " + sleuthkitCase.getDatabaseType().name()); - } - } - - /* - * Get a format string that will allow us to group by the requested period - * size. That is, with all info more granular than that requested dropped - * (replaced with zeros). - * - * @param timeUnit The ChronoUnit describing what granularity to build a - * strftime string for - * - * @return a String formatted according to the sqlite strftime spec - * - * @see https://www.sqlite.org/lang_datefunc.html - */ - private static String getSQLIteTimeFormat(ChronoUnit timeUnit) { - switch (timeUnit) { - case YEARS: - return "%Y-01-01T00:00:00"; // NON-NLS - case MONTHS: - return "%Y-%m-01T00:00:00"; // NON-NLS - case DAYS: - return "%Y-%m-%dT00:00:00"; // NON-NLS - case HOURS: - return "%Y-%m-%dT%H:00:00"; // NON-NLS - case MINUTES: - return "%Y-%m-%dT%H:%M:00"; // NON-NLS - case SECONDS: - default: //seconds - should never happen - return "%Y-%m-%dT%H:%M:%S"; // NON-NLS - } - } - - /** - * Get a format string that will allow us to group by the requested period - * size. That is, with all info more granular than that requested dropped - * (replaced with zeros). - * - * @param timeUnit The ChronoUnit describing what granularity to build a - * strftime string for - * - * @return a String formatted according to the Postgres - * to_char(to_timestamp(time) ... ) spec - */ - private static String getPostgresTimeFormat(ChronoUnit timeUnit) { - switch (timeUnit) { - case YEARS: - return "YYYY-01-01T00:00:00"; // NON-NLS - case MONTHS: - return "YYYY-MM-01T00:00:00"; // NON-NLS - case DAYS: - return "YYYY-MM-DDT00:00:00"; // NON-NLS - case HOURS: - return "YYYY-MM-DDTHH24:00:00"; // NON-NLS - case MINUTES: - return "YYYY-MM-DDTHH24:MI:00"; // NON-NLS - case SECONDS: - default: //seconds - should never happen - return "YYYY-MM-DDTHH24:MI:SS"; // NON-NLS - } - } } diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/datamodel/EventCluster.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/datamodel/EventCluster.java index 7cc4c172bd..0e1b1c7716 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/datamodel/EventCluster.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/datamodel/EventCluster.java @@ -20,16 +20,20 @@ package org.sleuthkit.autopsy.timeline.ui.detailview.datamodel; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; -import com.google.common.collect.Sets; +import static com.google.common.collect.Sets.union; import java.util.Collection; +import static java.util.Collections.emptySet; +import static java.util.Collections.singleton; import java.util.Comparator; import java.util.Objects; import java.util.Optional; +import java.util.Set; import java.util.SortedSet; import org.joda.time.Interval; import org.sleuthkit.autopsy.timeline.utils.IntervalUtils; import org.sleuthkit.datamodel.DescriptionLoD; import org.sleuthkit.datamodel.timeline.EventType; +import org.sleuthkit.datamodel.timeline.TimelineEvent; /** * Represents a set of other events clustered together. All the sub events @@ -38,214 +42,224 @@ import org.sleuthkit.datamodel.timeline.EventType; */ public class EventCluster implements MultiEvent { - final private EventStripe parent; + final private EventStripe parent; - /** - * the smallest time interval containing all the clustered events - */ - final private Interval span; + /** + * the smallest time interval containing all the clustered events + */ + final private Interval span; - /** - * the type of all the clustered events - */ - final private EventType type; + /** + * the type of all the clustered events + */ + final private EventType type; - /** - * the common description of all the clustered events - */ - final private String description; + /** + * the common description of all the clustered events + */ + final private String description; - /** - * the description level of detail that the events were clustered at. - */ - private final DescriptionLoD lod; + /** + * the description level of detail that the events were clustered at. + */ + private final DescriptionLoD lod; - /** - * the set of ids of the clustered events - */ - final private ImmutableSet eventIDs; + /** + * the set of ids of the clustered events + */ + final private ImmutableSet eventIDs; - /** - * the ids of the subset of clustered events that have at least one tag - * applied to them - */ - private final ImmutableSet tagged; + /** + * the ids of the subset of clustered events that have at least one tag + * applied to them + */ + private final ImmutableSet tagged; - /** - * the ids of the subset of clustered events that have at least one hash set - * hit - */ - private final ImmutableSet hashHits; + /** + * the ids of the subset of clustered events that have at least one hash set + * hit + */ + private final ImmutableSet hashHits; - /** - * merge two event clusters into one new event cluster. - * - * @param cluster1 - * @param cluster2 - * - * @return a new event cluster that is the result of merging the given - * events clusters - */ - public static EventCluster merge(EventCluster cluster1, EventCluster cluster2) { - if (cluster1.getEventType() != cluster2.getEventType()) { - throw new IllegalArgumentException("event clusters are not compatible: they have different types"); - } + /** + * merge two event clusters into one new event cluster. + * + * @param cluster1 + * @param cluster2 + * + * @return a new event cluster that is the result of merging the given + * events clusters + */ + public static EventCluster merge(EventCluster cluster1, EventCluster cluster2) { + if (cluster1.getEventType() != cluster2.getEventType()) { + throw new IllegalArgumentException("event clusters are not compatible: they have different types"); + } - if (!cluster1.getDescription().equals(cluster2.getDescription())) { - throw new IllegalArgumentException("event clusters are not compatible: they have different descriptions"); - } - Sets.SetView idsUnion - = Sets.union(cluster1.getEventIDs(), cluster2.getEventIDs()); - Sets.SetView hashHitsUnion - = Sets.union(cluster1.getEventIDsWithHashHits(), cluster2.getEventIDsWithHashHits()); - Sets.SetView taggedUnion - = Sets.union(cluster1.getEventIDsWithTags(), cluster2.getEventIDsWithTags()); + if (!cluster1.getDescription().equals(cluster2.getDescription())) { + throw new IllegalArgumentException("event clusters are not compatible: they have different descriptions"); + } - return new EventCluster(IntervalUtils.span(cluster1.span, cluster2.span), - cluster1.getEventType(), idsUnion, hashHitsUnion, taggedUnion, - cluster1.getDescription(), cluster1.lod); - } + Interval spanningInterval = IntervalUtils.span(cluster1.span, cluster2.span); - private EventCluster(Interval spanningInterval, EventType type, Collection eventIDs, - Collection hashHits, Collection tagged, String description, DescriptionLoD lod, - EventStripe parent) { + Set idsUnion = union(cluster1.getEventIDs(), cluster2.getEventIDs()); + Set hashHitsUnion = union(cluster1.getEventIDsWithHashHits(), cluster2.getEventIDsWithHashHits()); + Set taggedUnion = union(cluster1.getEventIDsWithTags(), cluster2.getEventIDsWithTags()); - this.span = spanningInterval; - this.type = type; - this.hashHits = ImmutableSet.copyOf(hashHits); - this.tagged = ImmutableSet.copyOf(tagged); - this.description = description; - this.eventIDs = ImmutableSet.copyOf(eventIDs); - this.lod = lod; - this.parent = parent; - } + return new EventCluster(spanningInterval, + cluster1.getEventType(), idsUnion, hashHitsUnion, taggedUnion, + cluster1.getDescription(), cluster1.lod); + } - public EventCluster(Interval spanningInterval, EventType type, Collection eventIDs, - Collection hashHits, Collection tagged, String description, DescriptionLoD lod) { - this(spanningInterval, type, eventIDs, hashHits, tagged, description, lod, null); - } + private EventCluster(Interval spanningInterval, EventType type, Collection eventIDs, + Collection hashHits, Collection tagged, String description, DescriptionLoD lod, + EventStripe parent) { - /** - * get the EventStripe (if any) that contains this cluster - * - * @return an Optional containg the parent stripe of this cluster, or is - * empty if the cluster has no parent set. - */ - @Override - public Optional getParent() { - return Optional.ofNullable(parent); - } + this.span = spanningInterval; + this.type = type; + this.hashHits = ImmutableSet.copyOf(hashHits); + this.tagged = ImmutableSet.copyOf(tagged); + this.description = description; + this.eventIDs = ImmutableSet.copyOf(eventIDs); + this.lod = lod; + this.parent = parent; + } - /** - * get the EventStripe (if any) that contains this cluster - * - * @return an Optional containg the parent stripe of this cluster, or is - * empty if the cluster has no parent set. - */ - @Override - public Optional getParentStripe() { - //since this clusters parent must be an event stripe just delegate to getParent(); - return getParent(); - } + public EventCluster(Interval spanningInterval, EventType type, Collection eventIDs, + Collection hashHits, Collection tagged, String description, DescriptionLoD lod) { + this(spanningInterval, type, eventIDs, hashHits, tagged, description, lod, null); + } - public Interval getSpan() { - return span; - } + public EventCluster(TimelineEvent event, EventType type, DescriptionLoD lod) { + this(new Interval(event.getStartMillis(), event.getEndMillis()), + type, + singleton(event.getEventID()), + event.isHashHit() ? singleton(event.getEventID()) : emptySet(), + event.isTagged() ? singleton(event.getEventID()) : emptySet(), + event.getDescription(lod), + lod); + } - @Override - public long getStartMillis() { - return span.getStartMillis(); - } + /** + * get the EventStripe (if any) that contains this cluster + * + * @return an Optional containg the parent stripe of this cluster, or is + * empty if the cluster has no parent set. + */ + @Override + public Optional getParent() { + return Optional.ofNullable(parent); + } - @Override - public long getEndMillis() { - return span.getEndMillis(); - } + /** + * get the EventStripe (if any) that contains this cluster + * + * @return an Optional containg the parent stripe of this cluster, or is + * empty if the cluster has no parent set. + */ + @Override + public Optional getParentStripe() { + //since this clusters parent must be an event stripe just delegate to getParent(); + return getParent(); + } - @Override - public ImmutableSet getEventIDs() { - return eventIDs; - } + public Interval getSpan() { + return span; + } - @Override - public ImmutableSet getEventIDsWithHashHits() { - return hashHits; - } + @Override + public long getStartMillis() { + return span.getStartMillis(); + } - @Override - public ImmutableSet getEventIDsWithTags() { - return tagged; - } + @Override + public long getEndMillis() { + return span.getEndMillis(); + } - @Override - public String getDescription() { - return description; - } + @Override + public ImmutableSet getEventIDs() { + return eventIDs; + } - @Override - public EventType getEventType() { - return type; - } + @Override + public ImmutableSet getEventIDsWithHashHits() { + return hashHits; + } - @Override - public DescriptionLoD getDescriptionLoD() { - return lod; - } + @Override + public ImmutableSet getEventIDsWithTags() { + return tagged; + } - /** - * return a new EventCluster identical to this one, except with the given - * EventBundle as the parent. - * - * @param parent - * - * @return a new EventCluster identical to this one, except with the given - * EventBundle as the parent. - */ - public EventCluster withParent(EventStripe parent) { - return new EventCluster(span, type, eventIDs, hashHits, tagged, description, lod, parent); - } + @Override + public String getDescription() { + return description; + } - @Override - public SortedSet getClusters() { - return ImmutableSortedSet.orderedBy(Comparator.comparing(EventCluster::getStartMillis)).add(this).build(); - } + @Override + public EventType getEventType() { + return type; + } - @Override - public String toString() { - return "EventCluster{" + "description=" + description + ", eventIDs=" + eventIDs.size() + '}'; - } + @Override + public DescriptionLoD getDescriptionLoD() { + return lod; + } - @Override - public int hashCode() { - int hash = 7; - hash = 23 * hash + Objects.hashCode(this.type); - hash = 23 * hash + Objects.hashCode(this.description); - hash = 23 * hash + Objects.hashCode(this.lod); - hash = 23 * hash + Objects.hashCode(this.eventIDs); - return hash; - } + /** + * return a new EventCluster identical to this one, except with the given + * EventBundle as the parent. + * + * @param parent + * + * @return a new EventCluster identical to this one, except with the given + * EventBundle as the parent. + */ + public EventCluster withParent(EventStripe parent) { + return new EventCluster(span, type, eventIDs, hashHits, tagged, description, lod, parent); + } - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - final EventCluster other = (EventCluster) obj; - if (!Objects.equals(this.description, other.description)) { - return false; - } - if (!Objects.equals(this.type, other.type)) { - return false; - } - if (this.lod != other.lod) { - return false; - } - return Objects.equals(this.eventIDs, other.eventIDs); - } + @Override + public SortedSet getClusters() { + return ImmutableSortedSet.orderedBy(Comparator.comparing(EventCluster::getStartMillis)).add(this).build(); + } + + @Override + public String toString() { + return "EventCluster{" + "description=" + description + ", eventIDs=" + eventIDs.size() + '}'; + } + + @Override + public int hashCode() { + int hash = 7; + hash = 23 * hash + Objects.hashCode(this.type); + hash = 23 * hash + Objects.hashCode(this.description); + hash = 23 * hash + Objects.hashCode(this.lod); + hash = 23 * hash + Objects.hashCode(this.eventIDs); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final EventCluster other = (EventCluster) obj; + if (!Objects.equals(this.description, other.description)) { + return false; + } + if (!Objects.equals(this.type, other.type)) { + return false; + } + if (this.lod != other.lod) { + return false; + } + return Objects.equals(this.eventIDs, other.eventIDs); + } } diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/tree/EventsTree.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/tree/EventsTree.java index 483357cf24..c09c5cde58 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/tree/EventsTree.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/detailview/tree/EventsTree.java @@ -56,8 +56,8 @@ import static org.sleuthkit.autopsy.timeline.ui.EventTypeUtils.getColor; import static org.sleuthkit.autopsy.timeline.ui.EventTypeUtils.getImagePath; import org.sleuthkit.autopsy.timeline.ui.detailview.DetailViewPane; import org.sleuthkit.autopsy.timeline.ui.detailview.datamodel.DetailViewEvent; +import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.DescriptionFilter; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState; -import org.sleuthkit.datamodel.timeline.TimelineFilter.DescriptionFilter; /** * Shows all EventBundles from the assigned DetailViewPane in a tree organized diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/FilterSetPanel.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/FilterSetPanel.java index 91b97383db..fd943158cb 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/FilterSetPanel.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/FilterSetPanel.java @@ -43,10 +43,9 @@ import org.sleuthkit.autopsy.timeline.FXMLConstructor; import org.sleuthkit.autopsy.timeline.FilteredEventsModel; import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.actions.ResetFilters; +import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.DescriptionFilterState; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState; -import org.sleuthkit.datamodel.timeline.TimelineFilter; -import org.sleuthkit.datamodel.timeline.TimelineFilter.DescriptionFilter; /** * The FXML controller for the filter ui. @@ -74,7 +73,7 @@ final public class FilterSetPanel extends BorderPane { private TreeTableColumn, FilterState> legendColumn; @FXML - private ListView> hiddenDescriptionsListView; + private ListView hiddenDescriptionsListView; @FXML private TitledPane hiddenDescriptionsPane; @FXML @@ -87,7 +86,7 @@ final public class FilterSetPanel extends BorderPane { * Map from filter to its expansion state in the ui, used to restore the * expansion state as we navigate back and forward in the history */ - private final ObservableMap< TimelineFilter, Boolean> expansionMap = FXCollections.observableHashMap(); + private final ObservableMap< Object, Boolean> expansionMap = FXCollections.observableHashMap(); private double dividerPosition; @NbBundle.Messages({ @@ -119,14 +118,13 @@ final public class FilterSetPanel extends BorderPane { expansionMap.put(filteredEvents.getFilterState().getFilter(), true); expansionMap.put(filteredEvents.getFilterState().getEventTypeFilterState().getFilter(), true); - InvalidationListener applyFiltersListener = observable -> applyFilters(); filteredEvents.eventTypeZoomProperty().addListener(applyFiltersListener); filteredEvents.descriptionLODProperty().addListener(applyFiltersListener); filteredEvents.timeRangeProperty().addListener(applyFiltersListener); - filteredEvents.filterProperty().addListener(observable -> refresh()); + filteredEvents.filterProperty().addListener(observable -> refresh()); refresh(); hiddenDescriptionsListView.setItems(controller.getQuickHideFilters()); @@ -166,9 +164,8 @@ final public class FilterSetPanel extends BorderPane { } private void refresh() { - Platform.runLater(() -> { - filterTreeTable.setRoot(new FilterTreeItem(filteredEvents.filterProperty().get().copyOf(), expansionMap)); - }); + Platform.runLater(() + -> filterTreeTable.setRoot(new FilterTreeItem(filteredEvents.filterProperty().get().copyOf(), expansionMap))); } private void applyFilters() { @@ -177,8 +174,8 @@ final public class FilterSetPanel extends BorderPane { }); } - private ListCell> getNewDiscriptionFilterListCell() { - final ListCell> cell = new FilterCheckBoxCellFactory< FilterState>().forList(); + private ListCell getNewDiscriptionFilterListCell() { + final ListCell cell = new FilterCheckBoxCellFactory< DescriptionFilterState>().forList(); cell.itemProperty().addListener(itemProperty -> { if (cell.getItem() == null) { cell.setContextMenu(null); @@ -210,7 +207,7 @@ final public class FilterSetPanel extends BorderPane { private static final Image SHOW = new Image("/org/sleuthkit/autopsy/timeline/images/eye--plus.png"); // NON-NLS - RemoveDescriptionFilterAction(TimeLineController controller, Cell> cell) { + RemoveDescriptionFilterAction(TimeLineController controller, Cell cell) { super(actionEvent -> controller.getQuickHideFilters().remove(cell.getItem())); setGraphic(new ImageView(SHOW)); textProperty().bind( diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/FilterTreeItem.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/FilterTreeItem.java index 5fe5d6a210..9dc2c237fb 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/FilterTreeItem.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/FilterTreeItem.java @@ -40,7 +40,7 @@ final public class FilterTreeItem extends TreeItem> { * children of this FilterTreeItem * @param expansionMap Map from filter to whether it is expanded or not. */ - public FilterTreeItem(FilterState filterState, ObservableMap expansionMap) { + public FilterTreeItem(FilterState filterState, ObservableMap expansionMap) { super(filterState); //keep expanion map upto date if user expands/collapses filter @@ -72,7 +72,7 @@ final public class FilterTreeItem extends TreeItem> { } }); - compoundFilter.selectedProperty().addListener( observable -> { + compoundFilter.selectedProperty().addListener(observable -> { if (compoundFilter.isSelected()) { setExpanded(true); } diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/CompoundFilterStateImpl.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/CompoundFilterStateImpl.java index 9207b0ccd3..62c5a6177a 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/CompoundFilterStateImpl.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/CompoundFilterStateImpl.java @@ -21,6 +21,7 @@ package org.sleuthkit.autopsy.timeline.ui.filtering.datamodel; import com.google.common.collect.Lists; import java.util.Collection; import java.util.List; +import java.util.Objects; import java.util.stream.Collectors; import javafx.collections.FXCollections; import javafx.collections.ListChangeListener; @@ -119,7 +120,7 @@ class CompoundFilterStateImpl other = (CompoundFilterStateImpl) obj; + if (!Objects.equals(this.getFilter(), other.getFilter())) { + return false; + } + if (!Objects.equals(this.isSelected(), other.isSelected())) { + return false; + } + if (!Objects.equals(this.isDisabled(), other.isDisabled())) { + return false; + } + if (!Objects.equals(this.subFilterStates, other.subFilterStates)) { + return false; + } + return true; + } + +} diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/DefaultFilterState.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/DefaultFilterState.java index 90ab7cf47a..9f37bd7b33 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/DefaultFilterState.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/DefaultFilterState.java @@ -18,6 +18,7 @@ */ package org.sleuthkit.autopsy.timeline.ui.filtering.datamodel; +import java.util.Objects; import javafx.beans.binding.Bindings; import javafx.beans.binding.BooleanBinding; import javafx.beans.binding.BooleanExpression; @@ -35,7 +36,12 @@ public class DefaultFilterState implements Fi private final FilterType filter; public DefaultFilterState(FilterType filter) { + this(filter, false); + } + + public DefaultFilterState(FilterType filter, boolean selected) { this.filter = filter; + this.selected.set(selected); } private final SimpleBooleanProperty selected = new SimpleBooleanProperty(false); @@ -105,4 +111,37 @@ public class DefaultFilterState implements Fi public FilterType getActiveFilter() { return isActive() ? getFilter() : null; } + + @Override + public int hashCode() { + int hash = 7; + hash = 37 * hash + Objects.hashCode(this.filter); + hash = 37 * hash + Objects.hashCode(this.selected); + hash = 37 * hash + Objects.hashCode(this.disabled); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final DefaultFilterState other = (DefaultFilterState) obj; + if (!Objects.equals(this.filter, other.filter)) { + return false; + } + if (!Objects.equals(this.selected, other.selected)) { + return false; + } + if (!Objects.equals(this.disabled, other.disabled)) { + return false; + } + return true; + } } diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/DescriptionFilter.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/DescriptionFilter.java new file mode 100644 index 0000000000..1c8ab06088 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/DescriptionFilter.java @@ -0,0 +1,83 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2018 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.timeline.ui.filtering.datamodel; + +import java.util.Objects; +import org.sleuthkit.datamodel.DescriptionLoD; +import org.sleuthkit.datamodel.timeline.TimelineEvent; + +/** + * Ui level filter for events that have the given description. + */ +public final class DescriptionFilter implements UIFilter { + + private final DescriptionLoD descriptionLoD; + private final String description; + + public DescriptionFilter(DescriptionLoD descriptionLoD, String description) { + super(); + this.descriptionLoD = descriptionLoD; + this.description = description; + } + + public DescriptionLoD getDescriptionLoD() { + return descriptionLoD; + } + + /** + * @return the description + */ + public String getDescription() { + return description; + } + + @Override + public int hashCode() { + int hash = 7; + hash = 23 * hash + Objects.hashCode(this.descriptionLoD); + hash = 23 * hash + Objects.hashCode(this.description); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final DescriptionFilter other = (DescriptionFilter) obj; + if (!Objects.equals(this.description, other.description)) { + return false; + } + if (this.descriptionLoD != other.descriptionLoD) { + return false; + } + return true; + } + + @Override + public boolean test(TimelineEvent event) { + return event.getDescription(descriptionLoD).equalsIgnoreCase(description); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/DescriptionFilterState.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/DescriptionFilterState.java new file mode 100644 index 0000000000..1d44be3dc2 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/DescriptionFilterState.java @@ -0,0 +1,143 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2018 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.timeline.ui.filtering.datamodel; + +import java.util.Objects; +import javafx.beans.binding.Bindings; +import javafx.beans.binding.BooleanBinding; +import javafx.beans.binding.BooleanExpression; +import javafx.beans.property.BooleanProperty; +import javafx.beans.property.SimpleBooleanProperty; + +/** + * A FilterState implementation for DescriptionFilters + */ +public class DescriptionFilterState implements FilterState { + + private final DescriptionFilter filter; + + public DescriptionFilterState(DescriptionFilter filter) { + this(filter, false); + } + + public DescriptionFilterState(DescriptionFilter filter, boolean selected) { + this.filter = filter; + this.selected.set(selected); + } + + private final SimpleBooleanProperty selected = new SimpleBooleanProperty(false); + private final SimpleBooleanProperty disabled = new SimpleBooleanProperty(false); + private final BooleanBinding activeProp = Bindings.and(selected, disabled.not()); + + @Override + public BooleanProperty selectedProperty() { + return selected; + } + + @Override + public BooleanProperty disabledProperty() { + return disabled; + } + + @Override + public void setSelected(Boolean act) { + selected.set(act); + } + + @Override + public boolean isSelected() { + return selected.get(); + } + + @Override + public void setDisabled(Boolean act) { + disabled.set(act); + } + + @Override + public boolean isDisabled() { + return disabledProperty().get(); + } + + @Override + public boolean isActive() { + return activeProperty().get(); + } + + @Override + public BooleanExpression activeProperty() { + return activeProp; + } + + @Override + public String getDisplayName() { + return filter.getDescription(); + } + + @Override + public DescriptionFilter getFilter() { + return filter; + } + + @Override + public DescriptionFilter getActiveFilter() { + return isActive() ? getFilter() : null; + } + + @Override + public int hashCode() { + int hash = 7; + hash = 37 * hash + Objects.hashCode(this.filter); + hash = 37 * hash + Objects.hashCode(this.selected); + hash = 37 * hash + Objects.hashCode(this.disabled); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final DescriptionFilterState other = (DescriptionFilterState) obj; + if (!Objects.equals(this.filter, other.filter)) { + return false; + } + if (!Objects.equals(this.selected, other.selected)) { + return false; + } + if (!Objects.equals(this.disabled, other.disabled)) { + return false; + } + return true; + } + + @Override + public DescriptionFilterState copyOf() { + DescriptionFilterState copy = new DescriptionFilterState(filter); + copy.setSelected(isSelected()); + copy.setDisabled(isDisabled()); + return copy; + } +} diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/FilterState.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/FilterState.java index 72bba54ef5..be4da26801 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/FilterState.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/FilterState.java @@ -20,14 +20,13 @@ package org.sleuthkit.autopsy.timeline.ui.filtering.datamodel; import javafx.beans.binding.BooleanExpression; import javafx.beans.property.ReadOnlyBooleanProperty; -import org.sleuthkit.datamodel.timeline.TimelineFilter; /** * The state of a filter: selected, disabled, active, etc. * * @param The type of filter this is the state for. */ -public interface FilterState { +public interface FilterState { String getDisplayName(); diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/RootFilterState.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/RootFilterState.java index b12c2121e1..ad21f83827 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/RootFilterState.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/RootFilterState.java @@ -23,6 +23,7 @@ import javafx.beans.property.ReadOnlyBooleanProperty; import javafx.beans.property.ReadOnlyBooleanWrapper; import javafx.collections.FXCollections; import javafx.collections.ObservableList; +import javafx.collections.ObservableSet; import static org.apache.commons.lang3.ObjectUtils.notEqual; import org.sleuthkit.datamodel.timeline.TimelineFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter.DataSourceFilter; @@ -51,7 +52,8 @@ public class RootFilterState implements CompoundFilterState< TimelineFilter, Roo private static final ReadOnlyBooleanProperty ALWAYS_TRUE = new ReadOnlyBooleanWrapper(true).getReadOnlyProperty(); private final static ReadOnlyBooleanProperty ALWAYS_FALSE = new ReadOnlyBooleanWrapper(false).getReadOnlyProperty(); - private final ObservableList< FilterState< ?>> subFilterStates = FXCollections.observableArrayList(); + private final ObservableList< FilterState< ? extends TimelineFilter>> subFilterStates = FXCollections.observableArrayList(); + private final ObservableSet< FilterState< ? extends TimelineFilter>> namedFilterStates = FXCollections.observableSet(); private final RootFilter delegate; public RootFilterState(RootFilter delegate) { @@ -89,7 +91,8 @@ public class RootFilterState implements CompoundFilterState< TimelineFilter, Roo hashHitsFilterState, dataSourcesFilterState, fileTypesFilterState, - eventTypeFilterState); + eventTypeFilterState); + namedFilterStates.addAll(subFilterStates); } /** @@ -100,28 +103,24 @@ public class RootFilterState implements CompoundFilterState< TimelineFilter, Roo * @return A new RootFilter model that intersects the given filter with this * one. */ - public RootFilterState intersect(TimelineFilter otherFilter) { + public RootFilterState intersect(FilterState otherFilter) { RootFilterState copyOf = copyOf(); copyOf.addSubFilterState(otherFilter); return copyOf; } - private void addSubFilterState(TimelineFilter subFilter) { - - if (subFilter instanceof TimelineFilter.CompoundFilter) { - CompoundFilterStateImpl> compoundFilterStateImpl = new CompoundFilterStateImpl<>((TimelineFilter.CompoundFilter) subFilter); - getSubFilterStates().add(compoundFilterStateImpl); - compoundFilterStateImpl.setSelected(Boolean.TRUE); - } else { - DefaultFilterState defaultFilterState = new DefaultFilterState<>(subFilter); - getSubFilterStates().add(defaultFilterState); - defaultFilterState.setSelected(Boolean.TRUE); + public void addSubFilterState(FilterState newSubFilterState) { + if (subFilterStates.contains(newSubFilterState) == false) { + subFilterStates.add(newSubFilterState); + } + if (delegate.getSubFilters().contains(newSubFilterState.getFilter())) { + getFilter().getSubFilters().add(newSubFilterState.getFilter()); } } @Override public RootFilterState copyOf() { - return new RootFilterState(getFilter().copyOf(), + RootFilterState copy = new RootFilterState(getFilter().copyOf(), getEventTypeFilterState().copyOf(), getKnownFilterState().copyOf(), getTextFilterState().copyOf(), @@ -130,6 +129,10 @@ public class RootFilterState implements CompoundFilterState< TimelineFilter, Roo getDataSourcesFilterState().copyOf(), getFileTypesFilterState().copyOf() ); + this.getSubFilterStates().stream() + .filter(filterState -> namedFilterStates.contains(filterState) == false) + .forEach(copy::addSubFilterState); + return copy; } public CompoundFilterState getEventTypeFilterState() { @@ -270,4 +273,4 @@ public class RootFilterState implements CompoundFilterState< TimelineFilter, Roo public void setSelected(Boolean act) { throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. } -} \ No newline at end of file +} diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/UIFilter.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/UIFilter.java new file mode 100644 index 0000000000..7df3568a45 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/UIFilter.java @@ -0,0 +1,32 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2018 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.timeline.ui.filtering.datamodel; + +import java.util.function.Predicate; +import org.sleuthkit.datamodel.timeline.TimelineEvent; + +/** + * A Filter over TimelineEvents that is applied in the UI, not the DB. * + */ +public interface UIFilter extends Predicate { + + static UIFilter getAllPassFilter() { + return event -> true; + } +} diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/listvew/datamodel/ListViewModel.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/listvew/datamodel/ListViewModel.java index 65f41c935d..0ae3958ded 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/listvew/datamodel/ListViewModel.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/listvew/datamodel/ListViewModel.java @@ -27,9 +27,9 @@ import java.util.Map; import java.util.Objects; import org.joda.time.Interval; import org.sleuthkit.autopsy.timeline.FilteredEventsModel; -import static org.sleuthkit.autopsy.timeline.FilteredEventsModel.unGroupConcat; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState; import org.sleuthkit.autopsy.timeline.utils.TimelineDBUtils; +import static org.sleuthkit.autopsy.timeline.utils.TimelineDBUtils.unGroupConcat; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TimelineManager; import org.sleuthkit.datamodel.TskCoreException; diff --git a/Core/src/org/sleuthkit/autopsy/timeline/utils/TimelineDBUtils.java b/Core/src/org/sleuthkit/autopsy/timeline/utils/TimelineDBUtils.java index 48ebbdfd8e..7d30c118a3 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/utils/TimelineDBUtils.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/utils/TimelineDBUtils.java @@ -18,7 +18,14 @@ */ package org.sleuthkit.autopsy.timeline.utils; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.function.Function; +import java.util.regex.Pattern; +import static org.apache.commons.lang3.StringUtils.isBlank; import org.sleuthkit.datamodel.SleuthkitCase; +import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; /** @@ -36,4 +43,32 @@ public class TimelineDBUtils { return (sleuthkitCase.getDatabaseType() == TskData.DbType.POSTGRESQL ? "string_agg" : "group_concat") + "(Cast (" + args + " AS VARCHAR) , '" + "," + "')"; } + + /** + * take the result of a group_concat SQLite operation and split it into a + * set of X using the mapper to to convert from string to X If groupConcat + * is empty, null, or all whitespace, returns an empty list. + * + * @param the type of elements to return + * @param groupConcat a string containing the group_concat result ( a comma + * separated list) + * @param mapper a function from String to X + * + * @return a Set of X, each element mapped from one element of the original + * comma delimited string + * + * @throws org.sleuthkit.datamodel.TskCoreException + */ + public static List unGroupConcat(String groupConcat, CheckedFunction mapper) throws TskCoreException { + + if (isBlank(groupConcat)) { + return Collections.emptyList(); + } + + List result = new ArrayList<>(); + for (String s : groupConcat.split(",")) { + result.add(mapper.apply(s)); + } + return result; + } } diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/navpanel/HashHitGroupList.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/navpanel/HashHitGroupList.java index 8c5d1dfdae..6e4f48fdaa 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/navpanel/HashHitGroupList.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/navpanel/HashHitGroupList.java @@ -19,6 +19,9 @@ package org.sleuthkit.autopsy.imagegallery.gui.navpanel; import java.util.function.Function; +import javafx.application.Platform; +import javafx.collections.FXCollections; +import javafx.collections.ListChangeListener; import javafx.collections.transformation.SortedList; import javafx.fxml.FXML; import javafx.scene.control.Label; @@ -45,7 +48,7 @@ final public class HashHitGroupList extends NavPanel { * sorting in the ListView. */ @ThreadConfined(type = ThreadConfined.ThreadType.JFX) - private SortedList sorted; + private SortedList sorted = FXCollections.observableArrayList().sorted(); public HashHitGroupList(ImageGalleryController controller) { super(controller); @@ -78,9 +81,24 @@ final public class HashHitGroupList extends NavPanel { setGraphic(new ImageView("org/sleuthkit/autopsy/imagegallery/images/hashset_hits.png")); //NON-NLS getBorderPane().setCenter(groupList); - sorted = getController().getGroupManager().getAnalyzedGroups() - .filtered(group -> group.getHashSetHitsCount() > 0) - .sorted(getDefaultComparator()); + getController().getGroupManager().getAnalyzedGroups().addListener(new ListChangeListener() { + @Override + public void onChanged(ListChangeListener.Change c) { + + while (c.next()) { + + c.getAddedSubList().forEach((DrawableGroup t) -> { + if (t.getHashSetHitsCount() > 0) { + Platform.runLater(() -> sorted.add(t)); + } + }); + c.getRemoved().forEach((DrawableGroup t) -> { + Platform.runLater(() -> sorted.remove(t)); + }); + + } + } + }); GroupCellFactory groupCellFactory = new GroupCellFactory(getController(), comparatorProperty()); groupList.setCellFactory(groupCellFactory::getListCell); @@ -89,7 +107,8 @@ final public class HashHitGroupList extends NavPanel { @ThreadConfined(type = ThreadConfined.ThreadType.JFX) @Override - void setFocusedGroup(DrawableGroup grouping) { + void setFocusedGroup(DrawableGroup grouping + ) { groupList.getSelectionModel().select(grouping); } diff --git a/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties b/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties index 0798d3e75e..2b9ec56deb 100644 --- a/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties +++ b/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties @@ -1,5 +1,5 @@ #Updated by build script -#Tue, 11 Dec 2018 14:41:40 -0500 +#Wed, 19 Dec 2018 18:37:27 +0100 LBL_splash_window_title=Starting Autopsy SPLASH_HEIGHT=314 SPLASH_WIDTH=538 diff --git a/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties b/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties index 1080917618..3d4b7a2318 100644 --- a/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties +++ b/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties @@ -1,4 +1,4 @@ #Updated by build script -#Tue, 11 Dec 2018 14:41:40 -0500 +#Wed, 19 Dec 2018 18:37:27 +0100 CTL_MainWindow_Title=Autopsy 4.9.1 CTL_MainWindow_Title_No_Project=Autopsy 4.9.1