Merge pull request #4264 from millmanorama/274-TL_filetype_filter

274 tl filetype filter
This commit is contained in:
Richard Cordovano 2018-11-09 10:11:30 -05:00 committed by GitHub
commit d2abd39e38
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 238 additions and 96 deletions

View File

@ -42,6 +42,7 @@ import javafx.collections.ObservableList;
import javafx.collections.ObservableMap; import javafx.collections.ObservableMap;
import javafx.collections.ObservableSet; import javafx.collections.ObservableSet;
import javafx.collections.SetChangeListener; import javafx.collections.SetChangeListener;
import static org.apache.commons.lang3.StringUtils.isBlank;
import org.joda.time.DateTimeZone; import org.joda.time.DateTimeZone;
import org.joda.time.Interval; import org.joda.time.Interval;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
@ -62,6 +63,7 @@ import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState;
import org.sleuthkit.autopsy.timeline.utils.CacheLoaderImpl; import org.sleuthkit.autopsy.timeline.utils.CacheLoaderImpl;
import org.sleuthkit.autopsy.timeline.utils.CheckedFunction; import org.sleuthkit.autopsy.timeline.utils.CheckedFunction;
import org.sleuthkit.autopsy.timeline.utils.FilterUtils;
import org.sleuthkit.autopsy.timeline.zooming.ZoomState; import org.sleuthkit.autopsy.timeline.zooming.ZoomState;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
@ -80,6 +82,8 @@ import org.sleuthkit.datamodel.timeline.TimelineEvent;
import org.sleuthkit.datamodel.timeline.TimelineFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.DataSourceFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter.DataSourceFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.DataSourcesFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter.DataSourcesFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.EventTypeFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.FileTypesFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.HashHitsFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter.HashHitsFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.HashSetFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter.HashSetFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.HideKnownFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter.HideKnownFilter;
@ -87,7 +91,6 @@ import org.sleuthkit.datamodel.timeline.TimelineFilter.RootFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.TagNameFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter.TagNameFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.TagsFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter.TagsFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.TextFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter.TextFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.TypeFilter;
/** /**
* This class acts as the model for a TimelineView * This class acts as the model for a TimelineView
@ -101,9 +104,9 @@ import org.sleuthkit.datamodel.timeline.TimelineFilter.TypeFilter;
* as to avoid unnecessary db calls through the TimelineManager -jm * as to avoid unnecessary db calls through the TimelineManager -jm
* *
* Concurrency Policy: TimelineManager is internally synchronized, so methods * Concurrency Policy: TimelineManager is internally synchronized, so methods
* that only access the repo atomically do not need further synchronization. All * that only access the TimelineManager atomically do not need further
* other member state variables should only be accessed with intrinsic lock of * synchronization. All other member state variables should only be accessed
* containing FilteredEventsModel held. * with intrinsic lock of containing FilteredEventsModel held.
* *
*/ */
public final class FilteredEventsModel { public final class FilteredEventsModel {
@ -121,15 +124,18 @@ public final class FilteredEventsModel {
private final ReadOnlyObjectWrapper<ZoomState> requestedZoomState = new ReadOnlyObjectWrapper<>(); private final ReadOnlyObjectWrapper<ZoomState> requestedZoomState = new ReadOnlyObjectWrapper<>();
private final ReadOnlyObjectWrapper< EventTypeZoomLevel> requestedTypeZoom = new ReadOnlyObjectWrapper<>(EventTypeZoomLevel.BASE_TYPE); private final ReadOnlyObjectWrapper< EventTypeZoomLevel> requestedTypeZoom = new ReadOnlyObjectWrapper<>(EventTypeZoomLevel.BASE_TYPE);
private final ReadOnlyObjectWrapper< DescriptionLoD> requestedLOD = new ReadOnlyObjectWrapper<>(DescriptionLoD.SHORT); private final ReadOnlyObjectWrapper< DescriptionLoD> requestedLOD = new ReadOnlyObjectWrapper<>(DescriptionLoD.SHORT);
// end Filter and zoome state
//caches //caches
private final LoadingCache<Object, Long> maxCache; private final LoadingCache<Object, Long> maxCache;
private final LoadingCache<Object, Long> minCache; private final LoadingCache<Object, Long> minCache;
private final LoadingCache<Long, TimelineEvent> idToEventCache; private final LoadingCache<Long, TimelineEvent> idToEventCache;
private final LoadingCache<ZoomState, Map<EventType, Long>> eventCountsCache; private final LoadingCache<ZoomState, Map<EventType, Long>> eventCountsCache;
/** Map from datasource id to datasource name. */
private final ObservableMap<Long, String> datasourcesMap = FXCollections.observableHashMap(); private final ObservableMap<Long, String> datasourcesMap = FXCollections.observableHashMap();
private final ObservableSet< String> hashSets = FXCollections.observableSet(); private final ObservableSet< String> hashSets = FXCollections.observableSet();
private final ObservableList<TagName> tagNames = FXCollections.observableArrayList(); private final ObservableList<TagName> tagNames = FXCollections.observableArrayList();
// end caches
public FilteredEventsModel(Case autoCase, ReadOnlyObjectProperty<ZoomState> currentStateProperty) throws TskCoreException { public FilteredEventsModel(Case autoCase, ReadOnlyObjectProperty<ZoomState> currentStateProperty) throws TskCoreException {
this.autoCase = autoCase; this.autoCase = autoCase;
@ -151,19 +157,19 @@ public final class FilteredEventsModel {
minCache = CacheBuilder.newBuilder() minCache = CacheBuilder.newBuilder()
.build(new CacheLoaderImpl<>(ignored -> eventManager.getMinTime())); .build(new CacheLoaderImpl<>(ignored -> eventManager.getMinTime()));
getDatasourcesMap().addListener((MapChangeListener.Change<? extends Long, ? extends String> change) -> { datasourcesMap.addListener((MapChangeListener.Change<? extends Long, ? extends String> change) -> {
DataSourceFilter dataSourceFilter = new DataSourceFilter(change.getValueAdded(), change.getKey()); DataSourceFilter dataSourceFilter = new DataSourceFilter(change.getValueAdded(), change.getKey());
RootFilterState rootFilter = filterProperty().get(); RootFilterState rootFilter = filterProperty().get();
rootFilter.getDataSourcesFilterState().getFilter().getSubFilters().add(dataSourceFilter); rootFilter.getDataSourcesFilterState().getFilter().getSubFilters().add(dataSourceFilter);
requestedFilter.set(rootFilter.copyOf()); requestedFilter.set(rootFilter.copyOf());
}); });
getHashSets().addListener((SetChangeListener.Change< ? extends String> change) -> { hashSets.addListener((SetChangeListener.Change< ? extends String> change) -> {
HashSetFilter hashSetFilter = new HashSetFilter(change.getElementAdded()); HashSetFilter hashSetFilter = new HashSetFilter(change.getElementAdded());
RootFilterState rootFilter = filterProperty().get(); RootFilterState rootFilter = filterProperty().get();
rootFilter.getHashHitsFilterState().getFilter().getSubFilters().add(hashSetFilter); rootFilter.getHashHitsFilterState().getFilter().getSubFilters().add(hashSetFilter);
requestedFilter.set(rootFilter.copyOf()); requestedFilter.set(rootFilter.copyOf());
}); });
getTagNames().addListener((ListChangeListener.Change<? extends TagName> change) -> { tagNames.addListener((ListChangeListener.Change<? extends TagName> change) -> {
RootFilterState rootFilter = filterProperty().get(); RootFilterState rootFilter = filterProperty().get();
syncTagsFilter(rootFilter); syncTagsFilter(rootFilter);
requestedFilter.set(rootFilter.copyOf()); requestedFilter.set(rootFilter.copyOf());
@ -215,18 +221,6 @@ public final class FilteredEventsModel {
return autoCase.getSleuthkitCase(); return autoCase.getSleuthkitCase();
} }
public ObservableList<TagName> getTagNames() {
return tagNames;
}
synchronized public ObservableMap<Long, String> getDatasourcesMap() {
return datasourcesMap;
}
synchronized public ObservableSet< String> getHashSets() {
return hashSets;
}
public Interval getBoundingEventsInterval(Interval timeRange, RootFilter filter, DateTimeZone timeZone) throws TskCoreException { public Interval getBoundingEventsInterval(Interval timeRange, RootFilter filter, DateTimeZone timeZone) throws TskCoreException {
return eventManager.getSpanningInterval(timeRange, filter, timeZone); return eventManager.getSpanningInterval(timeRange, filter, timeZone);
} }
@ -273,16 +267,17 @@ public final class FilteredEventsModel {
* for tags that are not in use in the case, and add new filters for tags * for tags that are not in use in the case, and add new filters for tags
* that don't have them. New filters are selected by default. * that don't have them. New filters are selected by default.
* *
* @param rootFilter the filter state to modify so it is consistent with the * @param rootFilterState the filter state to modify so it is consistent
* tags in use in the case * with the tags in use in the case
*/ */
public void syncTagsFilter(RootFilterState rootFilter) { public void syncTagsFilter(RootFilterState rootFilterState) {
for (TagName tagName : tagNames) { tagNames.forEach((tagName) -> {
rootFilter.getTagsFilterState().getFilter().addSubFilter(new TagNameFilter(tagName)); rootFilterState.getTagsFilterState().getFilter().addSubFilter(new TagNameFilter(tagName));
} });
for (FilterState<? extends TagNameFilter> filterState : rootFilter.getTagsFilterState().getSubFilterStates()) { for (FilterState<? extends TagNameFilter> filterState : rootFilterState.getTagsFilterState().getSubFilterStates()) {
filterState.setDisabled(tagNames.contains(filterState.getFilter().getTagName()) == false); filterState.setDisabled(tagNames.contains(filterState.getFilter().getTagName()) == false);
} }
} }
/** /**
@ -342,31 +337,27 @@ public final class FilteredEventsModel {
/** /**
* @return the default filter used at startup * @return the default filter used at startup
*/ */
public RootFilterState getDefaultFilter() { public synchronized RootFilterState getDefaultFilter() {
DataSourcesFilter dataSourcesFilter = new DataSourcesFilter(); DataSourcesFilter dataSourcesFilter = new DataSourcesFilter();
datasourcesMap.entrySet().forEach(dataSourceEntry
getDatasourcesMap().entrySet().stream().forEach((Map.Entry<Long, String> entry) -> { -> dataSourcesFilter.addSubFilter(new DataSourceFilter(dataSourceEntry.getValue(), dataSourceEntry.getKey()))
DataSourceFilter dataSourceFilter = new DataSourceFilter(entry.getValue(), entry.getKey()); );
dataSourcesFilter.addSubFilter(dataSourceFilter);
});
HashHitsFilter hashHitsFilter = new HashHitsFilter(); HashHitsFilter hashHitsFilter = new HashHitsFilter();
getHashSets().forEach(hashSetName -> { hashSets.stream().map(HashSetFilter::new).forEach(hashHitsFilter::addSubFilter);
HashSetFilter hashSetFilter = new HashSetFilter(hashSetName);
hashHitsFilter.addSubFilter(hashSetFilter);
});
TagsFilter tagsFilter = new TagsFilter(); TagsFilter tagsFilter = new TagsFilter();
getTagNames().stream().forEach(tagName -> { tagNames.stream().map(TagNameFilter::new).forEach(tagsFilter::addSubFilter);
TagNameFilter tagNameFilter = new TagNameFilter(tagName);
tagsFilter.addSubFilter(tagNameFilter); FileTypesFilter fileTypesFilter = FilterUtils.createDefaultFileTypesFilter();
});
return new RootFilterState(new RootFilter(new HideKnownFilter(), return new RootFilterState(new RootFilter(new HideKnownFilter(),
tagsFilter, tagsFilter,
hashHitsFilter, hashHitsFilter,
new TextFilter(), new TextFilter(),
new TypeFilter(EventType.ROOT_EVENT_TYPE), new EventTypeFilter(EventType.ROOT_EVENT_TYPE),
dataSourcesFilter, dataSourcesFilter,
fileTypesFilter,
Collections.emptySet())); Collections.emptySet()));
} }
@ -397,6 +388,8 @@ public final class FilteredEventsModel {
* @param eventIDsWithTags the event ids to get the tag counts map for * @param eventIDsWithTags the event ids to get the tag counts map for
* *
* @return a map from tagname displayname to count of applications * @return a map from tagname displayname to count of applications
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/ */
public Map<String, Long> getTagCountsByTagName(Set<Long> eventIDsWithTags) throws TskCoreException { public Map<String, Long> getTagCountsByTagName(Set<Long> eventIDsWithTags) throws TskCoreException {
return eventManager.getTagCountsByTagName(eventIDsWithTags); return eventManager.getTagCountsByTagName(eventIDsWithTags);
@ -415,7 +408,7 @@ public final class FilteredEventsModel {
} }
/** /**
* return the number of events that pass the requested filter and are within * Return the number of events that pass the requested filter and are within
* the given time range. * the given time range.
* *
* NOTE: this method does not change the requested time range * NOTE: this method does not change the requested time range
@ -442,15 +435,23 @@ public final class FilteredEventsModel {
} }
/** /**
* @return The smallest interval spanning all the events from the * @return The smallest interval spanning all the events from the case,
* repository, ignoring any filters or requested ranges. * ignoring any filters or requested ranges.
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/ */
public Interval getSpanningInterval() throws TskCoreException { public Interval getSpanningInterval() throws TskCoreException {
return new Interval(getMinTime() * 1000, 1000 + getMaxTime() * 1000); return new Interval(getMinTime() * 1000, 1000 + getMaxTime() * 1000);
} }
/** /**
* Get the smallest interval spanning all the given events.
*
* @param eventIDs The IDs of the events to get a spanning interval arround.
*
* @return the smallest interval spanning all the given events * @return the smallest interval spanning all the given events
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/ */
public Interval getSpanningInterval(Collection<Long> eventIDs) throws TskCoreException { public Interval getSpanningInterval(Collection<Long> eventIDs) throws TskCoreException {
return eventManager.getSpanningInterval(eventIDs); return eventManager.getSpanningInterval(eventIDs);
@ -460,6 +461,8 @@ public final class FilteredEventsModel {
* @return the time (in seconds from unix epoch) of the absolutely first * @return the time (in seconds from unix epoch) of the absolutely first
* event available from the repository, ignoring any filters or * event available from the repository, ignoring any filters or
* requested ranges * requested ranges
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/ */
public Long getMinTime() throws TskCoreException { public Long getMinTime() throws TskCoreException {
try { try {
@ -473,6 +476,8 @@ public final class FilteredEventsModel {
* @return the time (in seconds from unix epoch) of the absolutely last * @return the time (in seconds from unix epoch) of the absolutely last
* event available from the repository, ignoring any filters or * event available from the repository, ignoring any filters or
* requested ranges * requested ranges
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/ */
public Long getMaxTime() throws TskCoreException { public Long getMaxTime() throws TskCoreException {
try { try {
@ -536,6 +541,8 @@ public final class FilteredEventsModel {
* *
* @return A List of event IDs for the events that are derived from the * @return A List of event IDs for the events that are derived from the
* given file. * given file.
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/ */
public List<Long> getEventIDsForFile(AbstractFile file, boolean includeDerivedArtifacts) throws TskCoreException { public List<Long> getEventIDsForFile(AbstractFile file, boolean includeDerivedArtifacts) throws TskCoreException {
return eventManager.getEventIDsForFile(file, includeDerivedArtifacts); return eventManager.getEventIDsForFile(file, includeDerivedArtifacts);
@ -549,6 +556,8 @@ public final class FilteredEventsModel {
* *
* @return A List of event IDs for the events that are derived from the * @return A List of event IDs for the events that are derived from the
* given artifact. * given artifact.
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/ */
public List<Long> getEventIDsForArtifact(BlackboardArtifact artifact) throws TskCoreException { public List<Long> getEventIDsForArtifact(BlackboardArtifact artifact) throws TskCoreException {
return eventManager.getEventIDsForArtifact(artifact); return eventManager.getEventIDsForArtifact(artifact);
@ -617,6 +626,8 @@ public final class FilteredEventsModel {
/** /**
* (Re)Post an AutopsyEvent received from another event distribution system * (Re)Post an AutopsyEvent received from another event distribution system
* locally to all registered subscribers. * locally to all registered subscribers.
*
* @param event The event to re-post.
*/ */
public void postAutopsyEventLocally(AutopsyEvent event) { public void postAutopsyEventLocally(AutopsyEvent event) {
eventbus.post(event); eventbus.post(event);
@ -694,10 +705,12 @@ public final class FilteredEventsModel {
* *
* @return a Set of X, each element mapped from one element of the original * @return a Set of X, each element mapped from one element of the original
* comma delimited string * comma delimited string
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/ */
public static <X> List<X> unGroupConcat(String groupConcat, CheckedFunction<String, X, TskCoreException> mapper) throws TskCoreException { public static <X> List<X> unGroupConcat(String groupConcat, CheckedFunction<String, X, TskCoreException> mapper) throws TskCoreException {
if (org.apache.commons.lang3.StringUtils.isBlank(groupConcat)) { if (isBlank(groupConcat)) {
return Collections.emptyList(); return Collections.emptyList();
} }

View File

@ -86,7 +86,7 @@ import org.sleuthkit.datamodel.timeline.EventTypeZoomLevel;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState;
import org.sleuthkit.autopsy.timeline.zooming.TimeUnits; import org.sleuthkit.autopsy.timeline.zooming.TimeUnits;
import org.sleuthkit.datamodel.timeline.TimelineFilter.DescriptionFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter.DescriptionFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.TypeFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter.EventTypeFilter;
/** /**
* Controller in the MVC design along with FilteredEventsModel TimeLineView. * Controller in the MVC design along with FilteredEventsModel TimeLineView.
@ -607,7 +607,7 @@ public class TimeLineController {
@Override @Override
protected Collection< Long> call() throws Exception { protected Collection< Long> call() throws Exception {
synchronized (TimeLineController.this) { synchronized (TimeLineController.this) {
return filteredEvents.getEventIDs(timeRange, new TypeFilter(type)); return filteredEvents.getEventIDs(timeRange, new EventTypeFilter(type));
} }
} }

View File

@ -1,18 +0,0 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2014 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

View File

@ -58,11 +58,11 @@ import org.sleuthkit.autopsy.timeline.ui.detailview.datamodel.SingleDetailsViewE
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState;
import org.sleuthkit.autopsy.timeline.zooming.ZoomState; import org.sleuthkit.autopsy.timeline.zooming.ZoomState;
import org.sleuthkit.datamodel.DescriptionLoD; import org.sleuthkit.datamodel.DescriptionLoD;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.timeline.EventTypeZoomLevel; import org.sleuthkit.datamodel.timeline.EventTypeZoomLevel;
import org.sleuthkit.datamodel.timeline.TimelineEvent; import org.sleuthkit.datamodel.timeline.TimelineEvent;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.timeline.TimelineFilter.DescriptionFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter.DescriptionFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.TypeFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter.EventTypeFilter;
/** /**
* A Node to represent an EventCluster in a DetailsChart * A Node to represent an EventCluster in a DetailsChart
@ -179,7 +179,7 @@ final class EventClusterNode extends MultiEventNodeBase<EventCluster, EventStrip
final RootFilterState subClusterFilter = eventsModel.getFilterState().copyOf(); final RootFilterState subClusterFilter = eventsModel.getFilterState().copyOf();
subClusterFilter.getFilter().getSubFilters().addAll( subClusterFilter.getFilter().getSubFilters().addAll(
new DescriptionFilter(getEvent().getDescriptionLoD(), getDescription(), DescriptionFilter.FilterMode.INCLUDE), new DescriptionFilter(getEvent().getDescriptionLoD(), getDescription(), DescriptionFilter.FilterMode.INCLUDE),
new TypeFilter(getEventType())); new EventTypeFilter(getEventType()));
final Interval subClusterSpan = new Interval(getStartMillis(), getEndMillis() + 1000); final Interval subClusterSpan = new Interval(getStartMillis(), getEndMillis() + 1000);
final EventTypeZoomLevel eventTypeZoomLevel = eventsModel.eventTypeZoomProperty().get(); final EventTypeZoomLevel eventTypeZoomLevel = eventsModel.eventTypeZoomProperty().get();
final ZoomState zoom = new ZoomState(subClusterSpan, eventTypeZoomLevel, subClusterFilter, getDescriptionLoD()); final ZoomState zoom = new ZoomState(subClusterSpan, eventTypeZoomLevel, subClusterFilter, getDescriptionLoD());

View File

@ -157,15 +157,14 @@ final public class DetailsViewModel {
String descriptionColumn = eventManager.getDescriptionColumn(descriptionLOD); String descriptionColumn = eventManager.getDescriptionColumn(descriptionLOD);
final boolean useSubTypes = typeZoomLevel.equals(EventTypeZoomLevel.SUB_TYPE); final boolean useSubTypes = typeZoomLevel.equals(EventTypeZoomLevel.SUB_TYPE);
String typeColumn = TimelineManager.typeColumnHelper(useSubTypes); String typeColumn = TimelineManager.typeColumnHelper(useSubTypes);
final boolean needsTags = filterState.hasActiveTagsFilters();
final boolean needsHashSets = filterState.hasActiveHashFilters();
TimelineDBUtils dbUtils = new TimelineDBUtils(sleuthkitCase); TimelineDBUtils dbUtils = new TimelineDBUtils(sleuthkitCase);
String querySql = "SELECT " + formatTimeFunctionHelper(rangeInfo.getPeriodSize().toChronoUnit(), timeZone) + " AS interval, " // NON-NLS String querySql = "SELECT " + formatTimeFunctionHelper(rangeInfo.getPeriodSize().toChronoUnit(), timeZone) + " AS interval, " // NON-NLS
+ dbUtils.csvAggFunction("tsk_events.event_id") + " as event_ids, " //NON-NLS + dbUtils.csvAggFunction("tsk_events.event_id") + " as event_ids, " //NON-NLS
+ dbUtils.csvAggFunction("CASE WHEN hash_hit = 1 THEN tsk_events.event_id ELSE NULL END") + " as hash_hits, " //NON-NLS + dbUtils.csvAggFunction("CASE WHEN hash_hit = 1 THEN tsk_events.event_id ELSE NULL END") + " as hash_hits, " //NON-NLS
+ dbUtils.csvAggFunction("CASE WHEN tagged = 1 THEN tsk_events.event_id ELSE NULL END") + " as taggeds, " //NON-NLS + dbUtils.csvAggFunction("CASE WHEN tagged = 1 THEN tsk_events.event_id ELSE NULL END") + " as taggeds, " //NON-NLS
+ " min(time) AS minTime, max(time) AS maxTime, " + typeColumn + ", " + descriptionColumn // NON-NLS + " min(time) AS minTime, max(time) AS maxTime, " + typeColumn + ", " + descriptionColumn // NON-NLS
+ " FROM " + TimelineManager.getAugmentedEventsTablesSQL(needsTags, needsHashSets) // NON-NLS + " FROM " + TimelineManager.getAugmentedEventsTablesSQL(filterState.getActiveFilter()) // NON-NLS
+ " WHERE time >= " + start + " AND time < " + end + " AND " + eventManager.getSQLWhere(filterState.getActiveFilter()) // NON-NLS + " WHERE time >= " + start + " AND time < " + end + " AND " + eventManager.getSQLWhere(filterState.getActiveFilter()) // NON-NLS
+ " GROUP BY interval, " + typeColumn + " , " + descriptionColumn // NON-NLS + " GROUP BY interval, " + typeColumn + " , " + descriptionColumn // NON-NLS
+ " ORDER BY min(time)"; // NON-NLS + " ORDER BY min(time)"; // NON-NLS

View File

@ -117,7 +117,7 @@ final public class FilterSetPanel extends BorderPane {
//type is the only filter expanded initialy //type is the only filter expanded initialy
expansionMap.put(controller.getEventsModel().getFilterState().getFilter(), true); expansionMap.put(controller.getEventsModel().getFilterState().getFilter(), true);
expansionMap.put(controller.getEventsModel().getFilterState().getTypeFilterState().getFilter(), true); expansionMap.put(controller.getEventsModel().getFilterState().getEventTypeFilterState().getFilter(), true);
this.filteredEvents.eventTypeZoomProperty().addListener((Observable observable) -> applyFilters()); this.filteredEvents.eventTypeZoomProperty().addListener((Observable observable) -> applyFilters());
this.filteredEvents.descriptionLODProperty().addListener((Observable observable1) -> applyFilters()); this.filteredEvents.descriptionLODProperty().addListener((Observable observable1) -> applyFilters());

View File

@ -34,7 +34,7 @@ import org.sleuthkit.autopsy.timeline.ui.EventTypeUtils;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState;
import org.sleuthkit.datamodel.timeline.EventTypeZoomLevel; import org.sleuthkit.datamodel.timeline.EventTypeZoomLevel;
import org.sleuthkit.datamodel.timeline.TimelineFilter.TextFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter.TextFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.TypeFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter.EventTypeFilter;
/** /**
* A TreeTableCell that shows an icon and color corresponding to the represented * A TreeTableCell that shows an icon and color corresponding to the represented
@ -67,8 +67,8 @@ final class LegendCell extends TreeTableCell<FilterState<?>, FilterState<?>> {
} else { } else {
//TODO: make some subclasses rather than use this if else chain. //TODO: make some subclasses rather than use this if else chain.
if (item.getFilter() instanceof TypeFilter) { if (item.getFilter() instanceof EventTypeFilter) {
TypeFilter filter = (TypeFilter) item.getFilter(); EventTypeFilter filter = (EventTypeFilter) item.getFilter();
Rectangle rect = new Rectangle(20, 20); Rectangle rect = new Rectangle(20, 20);
rect.setArcHeight(5); rect.setArcHeight(5);
@ -104,7 +104,7 @@ final class LegendCell extends TreeTableCell<FilterState<?>, FilterState<?>> {
} }
} }
private void setLegendColor(TypeFilter filter, Rectangle rect, EventTypeZoomLevel eventTypeZoom) { private void setLegendColor(EventTypeFilter filter, Rectangle rect, EventTypeZoomLevel eventTypeZoom) {
//only show legend color if filter is of the same zoomlevel as requested in filteredEvents //only show legend color if filter is of the same zoomlevel as requested in filteredEvents
if (eventTypeZoom.equals(filter.getEventType().getZoomLevel())) { if (eventTypeZoom.equals(filter.getEventType().getZoomLevel())) {
Platform.runLater(() -> { Platform.runLater(() -> {

View File

@ -23,9 +23,9 @@ import javafx.beans.property.ReadOnlyBooleanProperty;
import org.sleuthkit.datamodel.timeline.TimelineFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter;
/** /**
* The state of a filter: selected, disabled, active, etc.
* *
* * @param <FilterType> The type of filter this is the state for.
* @param <FilterType>
*/ */
public interface FilterState<FilterType extends TimelineFilter> { public interface FilterState<FilterType extends TimelineFilter> {

View File

@ -26,23 +26,26 @@ import org.python.google.common.collect.Lists;
import org.sleuthkit.datamodel.timeline.TimelineFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.DataSourceFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter.DataSourceFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.DataSourcesFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter.DataSourcesFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.EventTypeFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.FileTypeFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.FileTypesFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.HashHitsFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter.HashHitsFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.HashSetFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter.HashSetFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.HideKnownFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter.HideKnownFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.RootFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter.RootFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.TextFilter; import org.sleuthkit.datamodel.timeline.TimelineFilter.TextFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.TypeFilter;
/** /**
*/ */
public class RootFilterState implements FilterState<RootFilter>, CompoundFilterState< TimelineFilter, RootFilter> { public class RootFilterState implements FilterState<RootFilter>, CompoundFilterState< TimelineFilter, RootFilter> {
private final CompoundFilterState<TypeFilter, TypeFilter> typeFilterState; private final CompoundFilterState<EventTypeFilter, EventTypeFilter> eventTypeFilterState;
private final DefaultFilterState<HideKnownFilter> knownFilterState; private final DefaultFilterState<HideKnownFilter> knownFilterState;
private final DefaultFilterState<TextFilter> textFilterState; private final DefaultFilterState<TextFilter> textFilterState;
private final TagsFilterState tagsFilterState; private final TagsFilterState tagsFilterState;
private final CompoundFilterState<HashSetFilter, HashHitsFilter> hashHitsFilterState; private final CompoundFilterState<HashSetFilter, HashHitsFilter> hashHitsFilterState;
private final CompoundFilterState<DataSourceFilter, DataSourcesFilter> dataSourcesFilterState; private final CompoundFilterState<DataSourceFilter, DataSourcesFilter> dataSourcesFilterState;
private final CompoundFilterState<TimelineFilter.FileTypeFilter, TimelineFilter.FileTypesFilter> fileTypesFilterState;
private static final ReadOnlyBooleanProperty ALWAYS_TRUE = new ReadOnlyBooleanWrapper(true).getReadOnlyProperty(); private static final ReadOnlyBooleanProperty ALWAYS_TRUE = new ReadOnlyBooleanWrapper(true).getReadOnlyProperty();
private final static ReadOnlyBooleanProperty ALWAYS_FALSE = new ReadOnlyBooleanWrapper(false).getReadOnlyProperty(); private final static ReadOnlyBooleanProperty ALWAYS_FALSE = new ReadOnlyBooleanWrapper(false).getReadOnlyProperty();
@ -52,34 +55,40 @@ public class RootFilterState implements FilterState<RootFilter>, CompoundFilterS
public RootFilterState(RootFilter delegate) { public RootFilterState(RootFilter delegate) {
this(delegate, this(delegate,
new CompoundFilterStateImpl<>(delegate.getTypeFilter()), new CompoundFilterStateImpl<>(delegate.getEventTypeFilter()),
new DefaultFilterState<>(delegate.getKnownFilter()), new DefaultFilterState<>(delegate.getKnownFilter()),
new DefaultFilterState<>(delegate.getTextFilter()), new DefaultFilterState<>(delegate.getTextFilter()),
new TagsFilterState(delegate.getTagsFilter()), new TagsFilterState(delegate.getTagsFilter()),
new CompoundFilterStateImpl<>(delegate.getHashHitsFilter()), new CompoundFilterStateImpl<>(delegate.getHashHitsFilter()),
new CompoundFilterStateImpl<>(delegate.getDataSourcesFilter()) new CompoundFilterStateImpl<>(delegate.getDataSourcesFilter()),
new CompoundFilterStateImpl<>(delegate.getFileTypesFilter())
); );
} }
private RootFilterState(RootFilter delegate, private RootFilterState(RootFilter delegate,
CompoundFilterState<TypeFilter, TypeFilter> typeFilterState, CompoundFilterState<EventTypeFilter, EventTypeFilter> eventTypeFilterState,
DefaultFilterState<HideKnownFilter> knownFilterState, DefaultFilterState<HideKnownFilter> knownFilterState,
DefaultFilterState<TextFilter> textFilterState, DefaultFilterState<TextFilter> textFilterState,
TagsFilterState tagsFilterState, TagsFilterState tagsFilterState,
CompoundFilterState<HashSetFilter, HashHitsFilter> hashHitsFilterState, CompoundFilterState<HashSetFilter, HashHitsFilter> hashHitsFilterState,
CompoundFilterState<DataSourceFilter, DataSourcesFilter> dataSourcesFilterState) { CompoundFilterState<DataSourceFilter, DataSourcesFilter> dataSourcesFilterState,
CompoundFilterState<FileTypeFilter, FileTypesFilter> fileTypesFilterState) {
this.delegate = delegate; this.delegate = delegate;
this.typeFilterState = typeFilterState; this.eventTypeFilterState = eventTypeFilterState;
this.knownFilterState = knownFilterState; this.knownFilterState = knownFilterState;
this.textFilterState = textFilterState; this.textFilterState = textFilterState;
this.tagsFilterState = tagsFilterState; this.tagsFilterState = tagsFilterState;
this.hashHitsFilterState = hashHitsFilterState; this.hashHitsFilterState = hashHitsFilterState;
this.dataSourcesFilterState = dataSourcesFilterState; this.dataSourcesFilterState = dataSourcesFilterState;
this.fileTypesFilterState = fileTypesFilterState;
subFilterStates.addAll( subFilterStates.addAll(
knownFilterState, textFilterState, knownFilterState,
textFilterState,
tagsFilterState, tagsFilterState,
hashHitsFilterState, hashHitsFilterState,
dataSourcesFilterState, typeFilterState); dataSourcesFilterState,
fileTypesFilterState,
eventTypeFilterState);
} }
/** /**
@ -112,16 +121,18 @@ public class RootFilterState implements FilterState<RootFilter>, CompoundFilterS
@Override @Override
public RootFilterState copyOf() { public RootFilterState copyOf() {
return new RootFilterState(getFilter().copyOf(), return new RootFilterState(getFilter().copyOf(),
getTypeFilterState().copyOf(), getEventTypeFilterState().copyOf(),
getKnownFilterState().copyOf(), getKnownFilterState().copyOf(),
getTextFilterState().copyOf(), getTextFilterState().copyOf(),
getTagsFilterState().copyOf(), getTagsFilterState().copyOf(),
getHashHitsFilterState().copyOf(), getHashHitsFilterState().copyOf(),
getDataSourcesFilterState().copyOf()); getDataSourcesFilterState().copyOf(),
getFileTypesFilterState().copyOf()
);
} }
public CompoundFilterState<TypeFilter, TypeFilter> getTypeFilterState() { public CompoundFilterState<EventTypeFilter, EventTypeFilter> getEventTypeFilterState() {
return typeFilterState; return eventTypeFilterState;
} }
public DefaultFilterState<HideKnownFilter> getKnownFilterState() { public DefaultFilterState<HideKnownFilter> getKnownFilterState() {
@ -144,14 +155,19 @@ public class RootFilterState implements FilterState<RootFilter>, CompoundFilterS
return dataSourcesFilterState; return dataSourcesFilterState;
} }
public CompoundFilterState<FileTypeFilter, FileTypesFilter> getFileTypesFilterState() {
return fileTypesFilterState;
}
@Override @Override
public RootFilter getActiveFilter() { public RootFilter getActiveFilter() {
return new RootFilter(knownFilterState.getActiveFilter(), return new RootFilter(knownFilterState.getActiveFilter(),
tagsFilterState.getActiveFilter(), tagsFilterState.getActiveFilter(),
hashHitsFilterState.getActiveFilter(), hashHitsFilterState.getActiveFilter(),
textFilterState.getActiveFilter(), textFilterState.getActiveFilter(),
typeFilterState.getActiveFilter(), eventTypeFilterState.getActiveFilter(),
dataSourcesFilterState.getActiveFilter(), dataSourcesFilterState.getActiveFilter(),
fileTypesFilterState.getActiveFilter(),
Lists.transform(subFilterStates, FilterState::getActiveFilter)); Lists.transform(subFilterStates, FilterState::getActiveFilter));
} }

View File

@ -85,13 +85,12 @@ public class ListViewModel {
} }
ArrayList<CombinedEvent> combinedEvents = new ArrayList<>(); ArrayList<CombinedEvent> combinedEvents = new ArrayList<>();
final boolean needsTags = filterState.hasActiveTagsFilters();
final boolean needsHashSets = filterState.hasActiveHashFilters();
TimelineDBUtils dbUtils = new TimelineDBUtils(sleuthkitCase); TimelineDBUtils dbUtils = new TimelineDBUtils(sleuthkitCase);
final String querySql = "SELECT full_description, time, file_obj_id, " final String querySql = "SELECT full_description, time, file_obj_id, "
+ dbUtils.csvAggFunction("CAST(tsk_events.event_id AS VARCHAR)") + " AS eventIDs, " + dbUtils.csvAggFunction("CAST(tsk_events.event_id AS VARCHAR)") + " AS eventIDs, "
+ dbUtils.csvAggFunction("CAST(sub_type AS VARCHAR)") + " AS eventTypes" + dbUtils.csvAggFunction("CAST(sub_type AS VARCHAR)") + " AS eventTypes"
+ " FROM " + TimelineManager.getAugmentedEventsTablesSQL(needsTags, needsHashSets) + " FROM " + TimelineManager.getAugmentedEventsTablesSQL(filterState.getActiveFilter())
+ " WHERE time >= " + startTime + " AND time <" + endTime + " AND " + eventManager.getSQLWhere(filterState.getActiveFilter()) + " WHERE time >= " + startTime + " AND time <" + endTime + " AND " + eventManager.getSQLWhere(filterState.getActiveFilter())
+ " GROUP BY time, full_description, file_obj_id ORDER BY time ASC, full_description"; + " GROUP BY time, full_description, file_obj_id ORDER BY time ASC, full_description";

View File

@ -0,0 +1,133 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.timeline.utils;
import com.google.common.net.MediaType;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.openide.util.NbBundle;
import org.sleuthkit.datamodel.TimelineManager;
import org.sleuthkit.datamodel.timeline.TimelineFilter.FileTypeFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.FileTypesFilter;
/**
* Utilities to deal with TimelineFilters
*/
public final class FilterUtils {
private static final Set<MediaType> MEDIA_MIME_TYPES = Stream.of(
"image/*",//NON-NLS
"video/*",//NON-NLS
"audio/*",//NON-NLS
"application/vnd.ms-asf", //NON-NLS
"application/vnd.rn-realmedia", //NON-NLS
"application/x-shockwave-flash" //NON-NLS
).map(MediaType::parse).collect(Collectors.toSet());
private static final Set<MediaType> EXECUTABLE_MIME_TYPES = Stream.of(
"application/x-bat",//NON-NLS
"application/x-dosexec",//NON-NLS
"application/vnd.microsoft.portable-executable",//NON-NLS
"application/x-msdownload",//NON-NLS
"application/exe",//NON-NLS
"application/x-exe",//NON-NLS
"application/dos-exe",//NON-NLS
"vms/exe",//NON-NLS
"application/x-winexe",//NON-NLS
"application/msdos-windows",//NON-NLS
"application/x-msdos-program"//NON-NLS
).map(MediaType::parse).collect(Collectors.toSet());
private static final Set<MediaType> DOCUMENT_MIME_TYPES = Stream.of(
"text/*", //NON-NLS
"application/rtf", //NON-NLS
"application/pdf", //NON-NLS
"application/json", //NON-NLS
"application/javascript", //NON-NLS
"application/xml", //NON-NLS
"application/x-msoffice", //NON-NLS
"application/x-ooxml", //NON-NLS
"application/msword", //NON-NLS
"application/vnd.openxmlformats-officedocument.wordprocessingml.document", //NON-NLS
"application/vnd.ms-powerpoint", //NON-NLS
"application/vnd.openxmlformats-officedocument.presentationml.presentation", //NON-NLS
"application/vnd.ms-excel", //NON-NLS
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", //NON-NLS
"application/vnd.oasis.opendocument.presentation", //NON-NLS
"application/vnd.oasis.opendocument.spreadsheet", //NON-NLS
"application/vnd.oasis.opendocument.text" //NON-NLS
).map(MediaType::parse).collect(Collectors.toSet());
private static final Set<MediaType> NON_OTHER_MIME_TYPES = new HashSet<>();
static {
NON_OTHER_MIME_TYPES.addAll(MEDIA_MIME_TYPES);
NON_OTHER_MIME_TYPES.addAll(DOCUMENT_MIME_TYPES);
NON_OTHER_MIME_TYPES.addAll(EXECUTABLE_MIME_TYPES);
}
private FilterUtils() {
}
/**
* Create a new FileTypesFilter with the default FileTypeFilters for Media,
* Documents, Executables, and Other.
*
* @return The new FileTypesFilter.
*/
@NbBundle.Messages({
"FilterUtils.mediaFilter.displayName=Media",
"FilterUtils.documentsFilter.displayName=Documents",
"FilterUtils.executablesFilter.displayName=Executables",
"FilterUtils.otherFilter.displayName=Other"})
public static FileTypesFilter createDefaultFileTypesFilter() {
FileTypesFilter fileTypesFilter = new FileTypesFilter();
fileTypesFilter.addSubFilter(new FileTypeFilter(Bundle.FilterUtils_mediaFilter_displayName(), MEDIA_MIME_TYPES));
fileTypesFilter.addSubFilter(new FileTypeFilter(Bundle.FilterUtils_documentsFilter_displayName(), DOCUMENT_MIME_TYPES));
fileTypesFilter.addSubFilter(new FileTypeFilter(Bundle.FilterUtils_executablesFilter_displayName(), EXECUTABLE_MIME_TYPES));
fileTypesFilter.addSubFilter(new InverseFileTypeFilter(Bundle.FilterUtils_otherFilter_displayName(), NON_OTHER_MIME_TYPES));
return fileTypesFilter;
}
/**
* Subclass of FileTypeFilter that excludes rather than includes the given
* MediaTypes.
*/
private static class InverseFileTypeFilter extends FileTypeFilter {
InverseFileTypeFilter(String displayName, Collection<MediaType> mediaTypes) {
super(displayName, mediaTypes);
}
@Override
public InverseFileTypeFilter copyOf() {
return new InverseFileTypeFilter("Other", NON_OTHER_MIME_TYPES);
}
@Override
protected String getSQLWhere(TimelineManager manager) {
return " NOT " + super.getSQLWhere(manager);
}
}
}