TL file type filter first commit

This commit is contained in:
millmanorama 2018-11-06 11:55:39 +01:00
parent 9ef65ebca3
commit 72c7e856e8
8 changed files with 94 additions and 32 deletions

View File

@ -20,8 +20,13 @@ package org.sleuthkit.autopsy.timeline;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Multimap;
import com.google.common.eventbus.EventBus;
import com.google.common.net.MediaType;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
@ -42,8 +47,10 @@ import javafx.collections.ObservableList;
import javafx.collections.ObservableMap;
import javafx.collections.ObservableSet;
import javafx.collections.SetChangeListener;
import org.apache.commons.lang3.StringUtils;
import org.joda.time.DateTimeZone;
import org.joda.time.Interval;
import org.openide.util.Exceptions;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagAddedEvent;
@ -87,7 +94,7 @@ import org.sleuthkit.datamodel.timeline.TimelineFilter.RootFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.TagNameFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.TagsFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.TextFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.TypeFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.EventTypeFilter;
/**
* This class acts as the model for a TimelineView
@ -129,6 +136,7 @@ public final class FilteredEventsModel {
private final LoadingCache<ZoomState, Map<EventType, Long>> eventCountsCache;
private final ObservableMap<Long, String> datasourcesMap = FXCollections.observableHashMap();
private final ObservableSet< String> hashSets = FXCollections.observableSet();
private final ObservableMap< MediaType, Long> fileTypesMap = FXCollections.observableHashMap();
private final ObservableList<TagName> tagNames = FXCollections.observableArrayList();
public FilteredEventsModel(Case autoCase, ReadOnlyObjectProperty<ZoomState> currentStateProperty) throws TskCoreException {
@ -227,6 +235,10 @@ public final class FilteredEventsModel {
return hashSets;
}
private ObservableMap<MediaType, Long> getMediaTypes() {
return fileTypesMap;
}
public Interval getBoundingEventsInterval(Interval timeRange, RootFilter filter, DateTimeZone timeZone) throws TskCoreException {
return eventManager.getSpanningInterval(timeRange, filter, timeZone);
}
@ -266,6 +278,22 @@ public final class FilteredEventsModel {
//should this only be tags applied to files or event bearing artifacts?
tagNames.setAll(skCase.getTagNamesInUse());
//TODO: limit this to files that have events derived from them.
try (SleuthkitCase.CaseDbQuery executeQuery = skCase.executeQuery("SELECT mime_type , COUNT(mime_type) FROM tsk_files GROUP BY mime_type");
ResultSet results = executeQuery.getResultSet();) {
while (results.next()) {
String mimeType = results.getString("mime_type");
if (StringUtils.isNotBlank(mimeType)) {
String[] splitMime = mimeType.split("/");
fileTypesMap.put(MediaType.create(splitMime[0], splitMime[1]), results.getLong("COUNT(mime_type)"));
}
}
} catch (SQLException ex) {
Exceptions.printStackTrace(ex);
}
;
}
/**
@ -283,6 +311,7 @@ public final class FilteredEventsModel {
for (FilterState<? extends TagNameFilter> filterState : rootFilter.getTagsFilterState().getSubFilterStates()) {
filterState.setDisabled(tagNames.contains(filterState.getFilter().getTagName()) == false);
}
}
/**
@ -361,12 +390,23 @@ public final class FilteredEventsModel {
TagNameFilter tagNameFilter = new TagNameFilter(tagName);
tagsFilter.addSubFilter(tagNameFilter);
});
Multimap<String, String> mimeTypesMap = HashMultimap.create();
getMediaTypes().forEach((fileType, count) -> mimeTypesMap.put(fileType.type(), fileType.subtype()));
TimelineFilter.FileTypesFilter fileTypesFilter = new TimelineFilter.FileTypesFilter();
mimeTypesMap.asMap().forEach((type, subTypes) -> {
TimelineFilter.FileTypeFilter fileTypeFilter = new TimelineFilter.FileTypeFilter(type);
subTypes.forEach(subType -> fileTypeFilter.addSubFilter(new TimelineFilter.FileSubTypeFilter(type, subType)));
fileTypesFilter.addSubFilter(fileTypeFilter);
});
return new RootFilterState(new RootFilter(new HideKnownFilter(),
tagsFilter,
hashHitsFilter,
new TextFilter(),
new TypeFilter(EventType.ROOT_EVENT_TYPE),
new EventTypeFilter(EventType.ROOT_EVENT_TYPE),
dataSourcesFilter,
fileTypesFilter,
Collections.emptySet()));
}
@ -444,6 +484,8 @@ public final class FilteredEventsModel {
/**
* @return The smallest interval spanning all the events from the
* repository, ignoring any filters or requested ranges.
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
public Interval getSpanningInterval() throws TskCoreException {
return new Interval(getMinTime() * 1000, 1000 + getMaxTime() * 1000);
@ -460,6 +502,8 @@ public final class FilteredEventsModel {
* @return the time (in seconds from unix epoch) of the absolutely first
* event available from the repository, ignoring any filters or
* requested ranges
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
public Long getMinTime() throws TskCoreException {
try {
@ -473,6 +517,8 @@ public final class FilteredEventsModel {
* @return the time (in seconds from unix epoch) of the absolutely last
* event available from the repository, ignoring any filters or
* requested ranges
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
public Long getMaxTime() throws TskCoreException {
try {
@ -617,6 +663,8 @@ public final class FilteredEventsModel {
/**
* (Re)Post an AutopsyEvent received from another event distribution system
* locally to all registered subscribers.
*
* @param event The event to re-post.
*/
public void postAutopsyEventLocally(AutopsyEvent event) {
eventbus.post(event);

View File

@ -86,7 +86,7 @@ import org.sleuthkit.datamodel.timeline.EventTypeZoomLevel;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState;
import org.sleuthkit.autopsy.timeline.zooming.TimeUnits;
import org.sleuthkit.datamodel.timeline.TimelineFilter.DescriptionFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.TypeFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.EventTypeFilter;
/**
* Controller in the MVC design along with FilteredEventsModel TimeLineView.
@ -607,7 +607,7 @@ public class TimeLineController {
@Override
protected Collection< Long> call() throws Exception {
synchronized (TimeLineController.this) {
return filteredEvents.getEventIDs(timeRange, new TypeFilter(type));
return filteredEvents.getEventIDs(timeRange, new EventTypeFilter(type));
}
}

View File

@ -58,11 +58,11 @@ import org.sleuthkit.autopsy.timeline.ui.detailview.datamodel.SingleDetailsViewE
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState;
import org.sleuthkit.autopsy.timeline.zooming.ZoomState;
import org.sleuthkit.datamodel.DescriptionLoD;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.timeline.EventTypeZoomLevel;
import org.sleuthkit.datamodel.timeline.TimelineEvent;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.timeline.TimelineFilter.DescriptionFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.TypeFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.EventTypeFilter;
/**
* A Node to represent an EventCluster in a DetailsChart
@ -179,7 +179,7 @@ final class EventClusterNode extends MultiEventNodeBase<EventCluster, EventStrip
final RootFilterState subClusterFilter = eventsModel.getFilterState().copyOf();
subClusterFilter.getFilter().getSubFilters().addAll(
new DescriptionFilter(getEvent().getDescriptionLoD(), getDescription(), DescriptionFilter.FilterMode.INCLUDE),
new TypeFilter(getEventType()));
new EventTypeFilter(getEventType()));
final Interval subClusterSpan = new Interval(getStartMillis(), getEndMillis() + 1000);
final EventTypeZoomLevel eventTypeZoomLevel = eventsModel.eventTypeZoomProperty().get();
final ZoomState zoom = new ZoomState(subClusterSpan, eventTypeZoomLevel, subClusterFilter, getDescriptionLoD());

View File

@ -157,15 +157,14 @@ final public class DetailsViewModel {
String descriptionColumn = eventManager.getDescriptionColumn(descriptionLOD);
final boolean useSubTypes = typeZoomLevel.equals(EventTypeZoomLevel.SUB_TYPE);
String typeColumn = TimelineManager.typeColumnHelper(useSubTypes);
final boolean needsTags = filterState.hasActiveTagsFilters();
final boolean needsHashSets = filterState.hasActiveHashFilters();
TimelineDBUtils dbUtils = new TimelineDBUtils(sleuthkitCase);
String querySql = "SELECT " + formatTimeFunctionHelper(rangeInfo.getPeriodSize().toChronoUnit(), timeZone) + " AS interval, " // NON-NLS
+ dbUtils.csvAggFunction("tsk_events.event_id") + " as event_ids, " //NON-NLS
+ dbUtils.csvAggFunction("CASE WHEN hash_hit = 1 THEN tsk_events.event_id ELSE NULL END") + " as hash_hits, " //NON-NLS
+ dbUtils.csvAggFunction("CASE WHEN tagged = 1 THEN tsk_events.event_id ELSE NULL END") + " as taggeds, " //NON-NLS
+ " min(time) AS minTime, max(time) AS maxTime, " + typeColumn + ", " + descriptionColumn // NON-NLS
+ " FROM " + TimelineManager.getAugmentedEventsTablesSQL(needsTags, needsHashSets) // NON-NLS
+ " FROM " + TimelineManager.getAugmentedEventsTablesSQL(filterState.getActiveFilter()) // NON-NLS
+ " WHERE time >= " + start + " AND time < " + end + " AND " + eventManager.getSQLWhere(filterState.getActiveFilter()) // NON-NLS
+ " GROUP BY interval, " + typeColumn + " , " + descriptionColumn // NON-NLS
+ " ORDER BY min(time)"; // NON-NLS

View File

@ -117,7 +117,7 @@ final public class FilterSetPanel extends BorderPane {
//type is the only filter expanded initialy
expansionMap.put(controller.getEventsModel().getFilterState().getFilter(), true);
expansionMap.put(controller.getEventsModel().getFilterState().getTypeFilterState().getFilter(), true);
expansionMap.put(controller.getEventsModel().getFilterState().getEventTypeFilterState().getFilter(), true);
this.filteredEvents.eventTypeZoomProperty().addListener((Observable observable) -> applyFilters());
this.filteredEvents.descriptionLODProperty().addListener((Observable observable1) -> applyFilters());

View File

@ -34,7 +34,7 @@ import org.sleuthkit.autopsy.timeline.ui.EventTypeUtils;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState;
import org.sleuthkit.datamodel.timeline.EventTypeZoomLevel;
import org.sleuthkit.datamodel.timeline.TimelineFilter.TextFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.TypeFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.EventTypeFilter;
/**
* A TreeTableCell that shows an icon and color corresponding to the represented
@ -67,8 +67,8 @@ final class LegendCell extends TreeTableCell<FilterState<?>, FilterState<?>> {
} else {
//TODO: make some subclasses rather than use this if else chain.
if (item.getFilter() instanceof TypeFilter) {
TypeFilter filter = (TypeFilter) item.getFilter();
if (item.getFilter() instanceof EventTypeFilter) {
EventTypeFilter filter = (EventTypeFilter) item.getFilter();
Rectangle rect = new Rectangle(20, 20);
rect.setArcHeight(5);
@ -104,7 +104,7 @@ final class LegendCell extends TreeTableCell<FilterState<?>, FilterState<?>> {
}
}
private void setLegendColor(TypeFilter filter, Rectangle rect, EventTypeZoomLevel eventTypeZoom) {
private void setLegendColor(EventTypeFilter filter, Rectangle rect, EventTypeZoomLevel eventTypeZoom) {
//only show legend color if filter is of the same zoomlevel as requested in filteredEvents
if (eventTypeZoom.equals(filter.getEventType().getZoomLevel())) {
Platform.runLater(() -> {

View File

@ -26,23 +26,26 @@ import org.python.google.common.collect.Lists;
import org.sleuthkit.datamodel.timeline.TimelineFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.DataSourceFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.DataSourcesFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.EventTypeFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.FileTypeFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.FileTypesFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.HashHitsFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.HashSetFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.HideKnownFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.RootFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.TextFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.TypeFilter;
/**
*/
public class RootFilterState implements FilterState<RootFilter>, CompoundFilterState< TimelineFilter, RootFilter> {
private final CompoundFilterState<TypeFilter, TypeFilter> typeFilterState;
private final CompoundFilterState<EventTypeFilter, EventTypeFilter> eventTypeFilterState;
private final DefaultFilterState<HideKnownFilter> knownFilterState;
private final DefaultFilterState<TextFilter> textFilterState;
private final TagsFilterState tagsFilterState;
private final CompoundFilterState<HashSetFilter, HashHitsFilter> hashHitsFilterState;
private final CompoundFilterState<DataSourceFilter, DataSourcesFilter> dataSourcesFilterState;
private final CompoundFilterState<TimelineFilter.FileTypeFilter, TimelineFilter.FileTypesFilter> fileTypesFilterState;
private static final ReadOnlyBooleanProperty ALWAYS_TRUE = new ReadOnlyBooleanWrapper(true).getReadOnlyProperty();
private final static ReadOnlyBooleanProperty ALWAYS_FALSE = new ReadOnlyBooleanWrapper(false).getReadOnlyProperty();
@ -52,34 +55,40 @@ public class RootFilterState implements FilterState<RootFilter>, CompoundFilterS
public RootFilterState(RootFilter delegate) {
this(delegate,
new CompoundFilterStateImpl<>(delegate.getTypeFilter()),
new CompoundFilterStateImpl<>(delegate.getEventTypeFilter()),
new DefaultFilterState<>(delegate.getKnownFilter()),
new DefaultFilterState<>(delegate.getTextFilter()),
new TagsFilterState(delegate.getTagsFilter()),
new CompoundFilterStateImpl<>(delegate.getHashHitsFilter()),
new CompoundFilterStateImpl<>(delegate.getDataSourcesFilter())
new CompoundFilterStateImpl<>(delegate.getDataSourcesFilter()),
new CompoundFilterStateImpl<>(delegate.getFileTypesFilter())
);
}
private RootFilterState(RootFilter delegate,
CompoundFilterState<TypeFilter, TypeFilter> typeFilterState,
CompoundFilterState<EventTypeFilter, EventTypeFilter> eventTypeFilterState,
DefaultFilterState<HideKnownFilter> knownFilterState,
DefaultFilterState<TextFilter> textFilterState,
TagsFilterState tagsFilterState,
CompoundFilterState<HashSetFilter, HashHitsFilter> hashHitsFilterState,
CompoundFilterState<DataSourceFilter, DataSourcesFilter> dataSourcesFilterState) {
CompoundFilterState<DataSourceFilter, DataSourcesFilter> dataSourcesFilterState,
CompoundFilterState<FileTypeFilter, FileTypesFilter> fileTypesFilterState) {
this.delegate = delegate;
this.typeFilterState = typeFilterState;
this.eventTypeFilterState = eventTypeFilterState;
this.knownFilterState = knownFilterState;
this.textFilterState = textFilterState;
this.tagsFilterState = tagsFilterState;
this.hashHitsFilterState = hashHitsFilterState;
this.dataSourcesFilterState = dataSourcesFilterState;
this.fileTypesFilterState = fileTypesFilterState;
subFilterStates.addAll(
knownFilterState, textFilterState,
knownFilterState,
textFilterState,
tagsFilterState,
hashHitsFilterState,
dataSourcesFilterState, typeFilterState);
dataSourcesFilterState,
fileTypesFilterState,
eventTypeFilterState);
}
/**
@ -112,16 +121,18 @@ public class RootFilterState implements FilterState<RootFilter>, CompoundFilterS
@Override
public RootFilterState copyOf() {
return new RootFilterState(getFilter().copyOf(),
getTypeFilterState().copyOf(),
getEventTypeFilterState().copyOf(),
getKnownFilterState().copyOf(),
getTextFilterState().copyOf(),
getTagsFilterState().copyOf(),
getHashHitsFilterState().copyOf(),
getDataSourcesFilterState().copyOf());
getDataSourcesFilterState().copyOf(),
getFileTypesFilterState().copyOf()
);
}
public CompoundFilterState<TypeFilter, TypeFilter> getTypeFilterState() {
return typeFilterState;
public CompoundFilterState<EventTypeFilter, EventTypeFilter> getEventTypeFilterState() {
return eventTypeFilterState;
}
public DefaultFilterState<HideKnownFilter> getKnownFilterState() {
@ -144,14 +155,19 @@ public class RootFilterState implements FilterState<RootFilter>, CompoundFilterS
return dataSourcesFilterState;
}
public CompoundFilterState<FileTypeFilter, FileTypesFilter> getFileTypesFilterState() {
return fileTypesFilterState;
}
@Override
public RootFilter getActiveFilter() {
return new RootFilter(knownFilterState.getActiveFilter(),
tagsFilterState.getActiveFilter(),
hashHitsFilterState.getActiveFilter(),
textFilterState.getActiveFilter(),
typeFilterState.getActiveFilter(),
eventTypeFilterState.getActiveFilter(),
dataSourcesFilterState.getActiveFilter(),
fileTypesFilterState.getActiveFilter(),
Lists.transform(subFilterStates, FilterState::getActiveFilter));
}

View File

@ -85,13 +85,12 @@ public class ListViewModel {
}
ArrayList<CombinedEvent> combinedEvents = new ArrayList<>();
final boolean needsTags = filterState.hasActiveTagsFilters();
final boolean needsHashSets = filterState.hasActiveHashFilters();
TimelineDBUtils dbUtils = new TimelineDBUtils(sleuthkitCase);
final String querySql = "SELECT full_description, time, file_obj_id, "
+ dbUtils.csvAggFunction("CAST(tsk_events.event_id AS VARCHAR)") + " AS eventIDs, "
+ dbUtils.csvAggFunction("CAST(sub_type AS VARCHAR)") + " AS eventTypes"
+ " FROM " + TimelineManager.getAugmentedEventsTablesSQL(needsTags, needsHashSets)
+ " FROM " + TimelineManager.getAugmentedEventsTablesSQL(filterState.getActiveFilter())
+ " WHERE time >= " + startTime + " AND time <" + endTime + " AND " + eventManager.getSQLWhere(filterState.getActiveFilter())
+ " GROUP BY time, full_description, file_obj_id ORDER BY time ASC, full_description";