From 0fc4cbd753dc4fe6ad466ef4b7dde5830d76b3af Mon Sep 17 00:00:00 2001 From: jmillman Date: Mon, 10 Aug 2015 15:34:21 -0400 Subject: [PATCH] cleanup getAggregateEvents() --- .../timeline/actions/SaveSnapshot.java | 2 +- .../timeline/events/FilteredEventsModel.java | 4 +- .../autopsy/timeline/events/db/EventDB.java | 251 ++++++++---------- .../autopsy/timeline/events/db/SQLHelper.java | 150 ++++++++--- .../autopsy/timeline/zooming/ZoomParams.java | 2 +- 5 files changed, 231 insertions(+), 178 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/timeline/actions/SaveSnapshot.java b/Core/src/org/sleuthkit/autopsy/timeline/actions/SaveSnapshot.java index 093f43af87..907420ca97 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/actions/SaveSnapshot.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/actions/SaveSnapshot.java @@ -84,7 +84,7 @@ public class SaveSnapshot extends Action { ZoomParams get = controller.getEventsModel().zoomParamtersProperty().get(); reportMetaData.add(new Pair<>("Time Range", get.getTimeRange().toString())); // NON-NLS - reportMetaData.add(new Pair<>("Description Level of Detail", get.getDescrLOD().getDisplayName())); // NON-NLS + reportMetaData.add(new Pair<>("Description Level of Detail", get.getDescriptionLOD().getDisplayName())); // NON-NLS reportMetaData.add(new Pair<>("Event Type Zoom Level", get.getTypeZoomLevel().getDisplayName())); // NON-NLS reportMetaData.add(new Pair<>("Filters", get.getFilter().getHTMLReportString())); // NON-NLS diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/FilteredEventsModel.java b/Core/src/org/sleuthkit/autopsy/timeline/events/FilteredEventsModel.java index dc2b52a79f..ada8cfcadb 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/events/FilteredEventsModel.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/events/FilteredEventsModel.java @@ -173,14 +173,14 @@ public final class FilteredEventsModel { if (zoomParams != null) { if (zoomParams.getTypeZoomLevel().equals(requestedTypeZoom.get()) == false - || zoomParams.getDescrLOD().equals(requestedLOD.get()) == false + || zoomParams.getDescriptionLOD().equals(requestedLOD.get()) == false || zoomParams.getFilter().equals(requestedFilter.get()) == false || zoomParams.getTimeRange().equals(requestedTimeRange.get()) == false) { requestedTypeZoom.set(zoomParams.getTypeZoomLevel()); requestedFilter.set(zoomParams.getFilter().copyOf()); requestedTimeRange.set(zoomParams.getTimeRange()); - requestedLOD.set(zoomParams.getDescrLOD()); + requestedLOD.set(zoomParams.getDescriptionLOD()); } } }); diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java b/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java index 6184c79afc..9d154f6629 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/events/db/EventDB.java @@ -32,6 +32,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; @@ -44,7 +45,6 @@ import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.logging.Level; import java.util.stream.Collectors; -import java.util.stream.Stream; import javax.annotation.Nonnull; import org.apache.commons.lang3.StringUtils; import org.joda.time.DateTimeZone; @@ -56,23 +56,15 @@ import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.events.AggregateEvent; import org.sleuthkit.autopsy.timeline.events.TimeLineEvent; +import static org.sleuthkit.autopsy.timeline.events.db.SQLHelper.useHashHitTablesHelper; +import static org.sleuthkit.autopsy.timeline.events.db.SQLHelper.useTagTablesHelper; import org.sleuthkit.autopsy.timeline.events.type.BaseTypes; import org.sleuthkit.autopsy.timeline.events.type.EventType; import org.sleuthkit.autopsy.timeline.events.type.RootEventType; import org.sleuthkit.autopsy.timeline.filters.RootFilter; import org.sleuthkit.autopsy.timeline.utils.RangeDivisionInfo; import org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD; -import static org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD.FULL; -import static org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD.MEDIUM; -import static org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD.SHORT; import org.sleuthkit.autopsy.timeline.zooming.EventTypeZoomLevel; -import org.sleuthkit.autopsy.timeline.zooming.TimeUnits; -import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.DAYS; -import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.HOURS; -import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.MINUTES; -import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.MONTHS; -import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.SECONDS; -import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.YEARS; import org.sleuthkit.autopsy.timeline.zooming.ZoomParams; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskData; @@ -868,9 +860,9 @@ public class EventDB { final boolean useSubTypes = (zoomLevel == EventTypeZoomLevel.SUB_TYPE); //get some info about the range of dates requested - final String queryString = "select count(*), " + useSubTypeHelper(useSubTypes) + final String queryString = "select count(*), " + typeColumnHelper(useSubTypes) + " from events" + useHashHitTablesHelper(filter) + " where time >= " + startTime + " and time < " + endTime + " and " + SQLHelper.getSQLWhere(filter) // NON-NLS - + " GROUP BY " + useSubTypeHelper(useSubTypes); // NON-NLS + + " GROUP BY " + typeColumnHelper(useSubTypes); // NON-NLS DBLock.lock(); try (Statement stmt = con.createStatement(); @@ -891,119 +883,140 @@ public class EventDB { return typeMap; } - List getAggregatedEvents(ZoomParams params) { - return getAggregatedEvents(params.getTimeRange(), params.getFilter(), params.getTypeZoomLevel(), params.getDescrLOD()); - } - /** - * //TODO: update javadoc //TODO: split this into helper methods - * - * get a list of {@link AggregateEvent}s. - * - * General algorithm is as follows: - * - * 1)get all aggregate events, via one db query. 2) sort them into a map - * from (type, description)-> aggevent 3) for each key in map, merge the - * events and accumulate them in a list to return - * - * - * @param timeRange the Interval within in which all returned aggregate - * events will be. - * @param filter only events that pass the filter will be included in - * aggregates events returned - * @param zoomLevel only events of this level will be included - * @param lod description level of detail to use when grouping events + * get a list of {@link AggregateEvent}s, clustered according to the given + * zoom paramaters. * + * @param params the zoom params that determine the zooming, filtering and + * clustering. * * @return a list of aggregate events within the given timerange, that pass * the supplied filter, aggregated according to the given event type * and description zoom levels */ - private List getAggregatedEvents(Interval timeRange, RootFilter filter, EventTypeZoomLevel zoomLevel, DescriptionLOD lod) { - String descriptionColumn = getDescriptionColumn(lod); - final boolean useSubTypes = (zoomLevel.equals(EventTypeZoomLevel.SUB_TYPE)); + List getAggregatedEvents(ZoomParams params) { + //unpack params + Interval timeRange = params.getTimeRange(); + RootFilter filter = params.getFilter(); + DescriptionLOD descriptionLOD = params.getDescriptionLOD(); + EventTypeZoomLevel typeZoomLevel = params.getTypeZoomLevel(); - //get some info about the time range requested - RangeDivisionInfo rangeInfo = RangeDivisionInfo.getRangeDivisionInfo(timeRange); - //use 'rounded out' range - long start = timeRange.getStartMillis() / 1000;//.getLowerBound(); - long end = timeRange.getEndMillis() / 1000;//Millis();//rangeInfo.getUpperBound(); - if (Objects.equals(start, end)) { + //ensure length of querried interval is not 0 + long start = timeRange.getStartMillis() / 1000; + long end = timeRange.getEndMillis() / 1000; + if (start == end) { end++; } + //get some info about the time range requested + RangeDivisionInfo rangeInfo = RangeDivisionInfo.getRangeDivisionInfo(timeRange); - //get a sqlite srtftime format string - String strfTimeFormat = getStrfTimeFormat(rangeInfo.getPeriodSize()); + //build dynamic parts of query + String strfTimeFormat = SQLHelper.getStrfTimeFormat(rangeInfo); + String descriptionColumn = SQLHelper.getDescriptionColumn(descriptionLOD); + final boolean useSubTypes = typeZoomLevel.equals(EventTypeZoomLevel.SUB_TYPE); + String timeZone = TimeLineController.getTimeZone().get().equals(TimeZone.getDefault()) ? ", 'localtime'" : ""; // NON-NLS + String typeColumn = typeColumnHelper(useSubTypes); - //effectively map from type to (map from description to events) - Map> typeMap = new HashMap<>(); + //compose query string + String query = "SELECT strftime('" + strfTimeFormat + "',time , 'unixepoch'" + timeZone + ") AS interval," // NON-NLS + + " group_concat(events.event_id) as event_ids, min(time), max(time), " + typeColumn + ", " + descriptionColumn // NON-NLS + + "\n FROM events" + useHashHitTablesHelper(filter) + useTagTablesHelper(filter) // NON-NLS + + "\n WHERE time >= " + start + " AND time < " + end + " AND " + SQLHelper.getSQLWhere(filter) // NON-NLS + + "\n GROUP BY interval, " + typeColumn + " , " + descriptionColumn // NON-NLS + + "\n ORDER BY min(time)"; // NON-NLS - //get all agregate events in this time unit + // perform query and map results to AggregateEvent objects + List events = new ArrayList<>(); DBLock.lock(); - String query = "select strftime('" + strfTimeFormat + "',time , 'unixepoch'" + (TimeLineController.getTimeZone().get().equals(TimeZone.getDefault()) ? ", 'localtime'" : "") + ") as interval," - + " group_concat(events.event_id) as event_ids, Min(time), Max(time), " + descriptionColumn + ", " + useSubTypeHelper(useSubTypes) - + " from events" + useHashHitTablesHelper(filter) + useTagTablesHelper(filter) + " where " + "time >= " + start + " and time < " + end + " and " + SQLHelper.getSQLWhere(filter) // NON-NLS - + " group by interval, " + useSubTypeHelper(useSubTypes) + " , " + descriptionColumn // NON-NLS - + " order by Min(time)"; // NON-NLS - // scoop up requested events in groups organized by interval, type, and desription - try (ResultSet rs = con.createStatement().executeQuery(query);) { + + try (Statement createStatement = con.createStatement(); + ResultSet rs = createStatement.executeQuery(query)) { while (rs.next()) { - Interval interval = new Interval(rs.getLong("Min(time)") * 1000, rs.getLong("Max(time)") * 1000, TimeLineController.getJodaTimeZone()); - String eventIDS = rs.getString("event_ids"); - EventType type = useSubTypes ? RootEventType.allTypes.get(rs.getInt("sub_type")) : BaseTypes.values()[rs.getInt("base_type")]; - - HashSet hashHits = new HashSet<>(); - HashSet tagged = new HashSet<>(); - try (Statement st2 = con.createStatement(); - ResultSet eventHashHitOrTagged = st2.executeQuery("select event_id , tagged, hash_hit from events where event_id in (" + eventIDS + ")");) { - while (eventHashHitOrTagged.next()) { - long eventID = eventHashHitOrTagged.getLong("event_id"); - if (eventHashHitOrTagged.getInt("tagged") != 0) { - tagged.add(eventID); - } - if (eventHashHitOrTagged.getInt("hash_hit") != 0) { - hashHits.add(eventID); - } - } - } - - AggregateEvent aggregateEvent = new AggregateEvent( - interval, - type, - Stream.of(eventIDS.split(",")).map(Long::valueOf).collect(Collectors.toSet()), // NON-NLS - hashHits, - tagged, - rs.getString(descriptionColumn), - lod); - - //put events in map from type/descrition -> event - SetMultimap descrMap = typeMap.get(type); - if (descrMap == null) { - descrMap = HashMultimap.create(); - typeMap.put(type, descrMap); - } - descrMap.put(aggregateEvent.getDescription(), aggregateEvent); + events.add(aggregateEventHelper(rs, useSubTypes, descriptionLOD)); } - } catch (SQLException ex) { - Exceptions.printStackTrace(ex); + LOGGER.log(Level.SEVERE, "Failed to get aggregate events with query: " + query, ex); // NON-NLS } finally { DBLock.unlock(); } + return mergeAggregateEvents(rangeInfo.getPeriodSize().getPeriod(), events); + } + + /** + * map a single row in a ResultSet to an AggregateEvent + * + * @param rs the result set whose current row should be mapped + * @param useSubTypes use the sub_type column if true, else use the + * base_type column + * @param descriptionLOD the description level of detail for this event + * + * @return an AggregateEvent corresponding to the current row in the given + * result set + * + * @throws SQLException + */ + private AggregateEvent aggregateEventHelper(ResultSet rs, boolean useSubTypes, DescriptionLOD descriptionLOD) throws SQLException { + Interval interval = new Interval(rs.getLong("min(time)") * 1000, rs.getLong("max(time)") * 1000, TimeLineController.getJodaTimeZone());// NON-NLS + String eventIDsString = rs.getString("event_ids");// NON-NLS + Set eventIDs = SQLHelper.unGroupConcat(eventIDsString, Long::valueOf); + String description = rs.getString(SQLHelper.getDescriptionColumn(descriptionLOD)); + EventType type = useSubTypes ? RootEventType.allTypes.get(rs.getInt("sub_type")) : BaseTypes.values()[rs.getInt("base_type")];// NON-NLS + + Set hashHits = new HashSet<>(); + String hashHitQuery = "SELECT group_concat(event_id) FROM events WHERE event_id IN (" + eventIDsString + ") AND hash_hit = 1";// NON-NLS + try (Statement stmt = con.createStatement(); + ResultSet hashHitsRS = stmt.executeQuery(hashHitQuery)) { + while (hashHitsRS.next()) { + hashHits = SQLHelper.unGroupConcat(hashHitsRS.getString("group_concat(event_id)"), Long::valueOf);// NON-NLS + } + } + + Set tagged = new HashSet<>(); + String taggedQuery = "SELECT group_concat(event_id) FROM events WHERE event_id IN (" + eventIDsString + ") AND tagged = 1";// NON-NLS + try (Statement stmt = con.createStatement(); + ResultSet taggedRS = stmt.executeQuery(taggedQuery)) { + while (taggedRS.next()) { + tagged = SQLHelper.unGroupConcat(taggedRS.getString("group_concat(event_id)"), Long::valueOf);// NON-NLS + } + } + + return new AggregateEvent(interval, type, eventIDs, hashHits, tagged, + description, descriptionLOD); + } + + /** + * merge the events in the given list if they are within the same period + * General algorithm is as follows: + * + * 1) sort them into a map from (type, description)-> List + * 2) for each key in map, merge the events and accumulate them in a list to + * return + * + * @param timeUnitLength + * @param preMergedEvents + * + * @return + */ + static private List mergeAggregateEvents(Period timeUnitLength, List preMergedEvents) { + + //effectively map from type to (map from description to events) + Map> typeMap = new HashMap<>(); + + for (AggregateEvent aggregateEvent : preMergedEvents) { + typeMap.computeIfAbsent(aggregateEvent.getType(), eventType -> HashMultimap.create()) + .put(aggregateEvent.getDescription(), aggregateEvent); + } //result list to return ArrayList aggEvents = new ArrayList<>(); - //save this for use when comparing gap size - Period timeUnitLength = rangeInfo.getPeriodSize().getPeriod(); - //For each (type, description) key, merge agg events for (SetMultimap descrMap : typeMap.values()) { + //for each description ... for (String descr : descrMap.keySet()) { //run through the sorted events, merging together adjacent events Iterator iterator = descrMap.get(descr).stream() - .sorted((AggregateEvent o1, AggregateEvent o2) - -> Long.compare(o1.getSpan().getStartMillis(), o2.getSpan().getStartMillis())) + .sorted(Comparator.comparing(event -> event.getSpan().getStartMillis())) .iterator(); AggregateEvent current = iterator.next(); while (iterator.hasNext()) { @@ -1024,21 +1037,10 @@ public class EventDB { aggEvents.add(current); } } - - //at this point we should have a list of aggregate events. - //one per type/description spanning consecutive time units as determined in rangeInfo return aggEvents; } - private String useHashHitTablesHelper(RootFilter filter) { - return SQLHelper.hasActiveHashFilter(filter) ? ", hash_set_hits" : ""; - } - - private String useTagTablesHelper(RootFilter filter) { - return SQLHelper.hasActiveTagFilter(filter) ? ", content_tags, blackboard_artifact_tags " : ""; - } - - private static String useSubTypeHelper(final boolean useSubTypes) { + private static String typeColumnHelper(final boolean useSubTypes) { return useSubTypes ? "sub_type" : "base_type"; } @@ -1065,37 +1067,6 @@ public class EventDB { return defaultValue; } - private String getDescriptionColumn(DescriptionLOD lod) { - switch (lod) { - case FULL: - return "full_description"; - case MEDIUM: - return "med_description"; - case SHORT: - default: - return "short_description"; - } - } - - private String getStrfTimeFormat(TimeUnits info) { - switch (info) { - case DAYS: - return "%Y-%m-%dT00:00:00"; // NON-NLS - case HOURS: - return "%Y-%m-%dT%H:00:00"; // NON-NLS - case MINUTES: - return "%Y-%m-%dT%H:%M:00"; // NON-NLS - case MONTHS: - return "%Y-%m-01T00:00:00"; // NON-NLS - case SECONDS: - return "%Y-%m-%dT%H:%M:%S"; // NON-NLS - case YEARS: - return "%Y-01-01T00:00:00"; // NON-NLS - default: - return "%Y-%m-%dT%H:%M:%S"; // NON-NLS - } - } - private PreparedStatement prepareStatement(String queryString) throws SQLException { PreparedStatement prepareStatement = con.prepareStatement(queryString); preparedStatements.add(prepareStatement); diff --git a/Core/src/org/sleuthkit/autopsy/timeline/events/db/SQLHelper.java b/Core/src/org/sleuthkit/autopsy/timeline/events/db/SQLHelper.java index 90b9efa96a..bf2d3df3b6 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/events/db/SQLHelper.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/events/db/SQLHelper.java @@ -1,13 +1,30 @@ /* - * To change this license header, choose License Headers in Project Properties. - * To change this template file, choose Tools | Templates - * and open the template in the editor. + * Autopsy Forensic Browser + * + * Copyright 2013-15 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package org.sleuthkit.autopsy.timeline.events.db; import java.util.Collections; import java.util.List; +import java.util.Set; +import java.util.function.Function; import java.util.stream.Collectors; +import java.util.stream.Stream; +import javax.annotation.Nonnull; import org.apache.commons.lang3.StringUtils; import org.sleuthkit.autopsy.timeline.events.type.RootEventType; import org.sleuthkit.autopsy.timeline.filters.AbstractFilter; @@ -24,47 +41,58 @@ import org.sleuthkit.autopsy.timeline.filters.TagsFilter; import org.sleuthkit.autopsy.timeline.filters.TextFilter; import org.sleuthkit.autopsy.timeline.filters.TypeFilter; import org.sleuthkit.autopsy.timeline.filters.UnionFilter; +import org.sleuthkit.autopsy.timeline.utils.RangeDivisionInfo; +import org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD; +import static org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD.FULL; +import static org.sleuthkit.autopsy.timeline.zooming.DescriptionLOD.MEDIUM; +import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.DAYS; +import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.HOURS; +import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.MINUTES; +import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.MONTHS; +import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.SECONDS; +import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.YEARS; import org.sleuthkit.datamodel.TskData; /** - * + * Static helper methods for converting between java data model objects and + * sqlite queries. */ public class SQLHelper { - private static List getActiveSubTypes(TypeFilter filter) { - if (filter.isSelected()) { - if (filter.getSubFilters().isEmpty()) { - return Collections.singletonList(RootEventType.allTypes.indexOf(filter.getEventType())); - } else { - return filter.getSubFilters().stream().flatMap((Filter t) -> getActiveSubTypes((TypeFilter) t).stream()).collect(Collectors.toList()); - } - } else { - return Collections.emptyList(); - } - } - - static boolean hasActiveHashFilter(RootFilter filter) { + static String useHashHitTablesHelper(RootFilter filter) { HashHitsFilter hashHitFilter = filter.getHashHitsFilter(); - return hashHitFilter.isSelected() && false == hashHitFilter.isDisabled(); + return hashHitFilter.isSelected() && false == hashHitFilter.isDisabled() ? ", hash_set_hits" : ""; } - static boolean hasActiveTagFilter(RootFilter filter) { + static String useTagTablesHelper(RootFilter filter) { TagsFilter tagsFilter = filter.getTagsFilter(); - return tagsFilter.isSelected() && false == tagsFilter.isDisabled(); + return tagsFilter.isSelected() && false == tagsFilter.isDisabled() ? ", content_tags, blackboard_artifact_tags " : ""; } - private SQLHelper() { + static Set unGroupConcat(String s, Function mapper) { + return Stream.of(s.split(",")) + .map(mapper::apply) + .collect(Collectors.toSet()); } - static String getSQLWhere(IntersectionFilter filter) { - return filter.getSubFilters().stream().filter(Filter::isSelected).map(SQLHelper::getSQLWhere).collect(Collectors.joining(" and ", "( ", ")")); + private static String getSQLWhere(IntersectionFilter filter) { + return filter.getSubFilters().stream() + .filter(Filter::isSelected) + .map(SQLHelper::getSQLWhere) + .collect(Collectors.joining(" and ", "( ", ")")); } - static String getSQLWhere(UnionFilter filter) { - return filter.getSubFilters().stream().filter(Filter::isSelected).map(SQLHelper::getSQLWhere).collect(Collectors.joining(" or ", "( ", ")")); + private static String getSQLWhere(UnionFilter filter) { + return filter.getSubFilters().stream() + .filter(Filter::isSelected).map(SQLHelper::getSQLWhere) + .collect(Collectors.joining(" or ", "( ", ")")); } - static String getSQLWhere(Filter filter) { + static String getSQLWhere(RootFilter filter) { + return getSQLWhere((IntersectionFilter) filter); + } + + private static String getSQLWhere(Filter filter) { String result = ""; if (filter == null) { return "1"; @@ -96,7 +124,7 @@ public class SQLHelper { return result; } - static String getSQLWhere(HideKnownFilter filter) { + private static String getSQLWhere(HideKnownFilter filter) { if (filter.isSelected()) { return "(known_state IS NOT '" + TskData.FileKnown.KNOWN.getFileKnownValue() + "')"; // NON-NLS } else { @@ -104,7 +132,7 @@ public class SQLHelper { } } - static String getSQLWhere(TagsFilter filter) { + private static String getSQLWhere(TagsFilter filter) { if (filter.isSelected() && (false == filter.isDisabled()) && (filter.getSubFilters().isEmpty() == false)) { @@ -114,13 +142,12 @@ public class SQLHelper { .collect(Collectors.joining(", ", "(", ")")); return "((blackboard_artifact_tags.artifact_id == events.artifact_id AND blackboard_artifact_tags.tag_name_id IN " + tagNameIDs + ") " + "OR ( content_tags.obj_id == events.file_id AND content_tags.tag_name_id IN " + tagNameIDs + "))"; - } else { return "1"; } } - static String getSQLWhere(HashHitsFilter filter) { + private static String getSQLWhere(HashHitsFilter filter) { if (filter.isSelected() && (false == filter.isDisabled()) && (filter.getSubFilters().isEmpty() == false)) { @@ -134,11 +161,11 @@ public class SQLHelper { } } - static String getSQLWhere(DataSourceFilter filter) { + private static String getSQLWhere(DataSourceFilter filter) { return (filter.isSelected()) ? "(datasource_id = '" + filter.getDataSourceID() + "')" : "1"; } - static String getSQLWhere(DataSourcesFilter filter) { + private static String getSQLWhere(DataSourcesFilter filter) { return (filter.isSelected()) ? "(datasource_id in (" + filter.getSubFilters().stream() .filter(AbstractFilter::isSelected) @@ -146,7 +173,7 @@ public class SQLHelper { .collect(Collectors.joining(", ")) + "))" : "1"; } - static String getSQLWhere(TextFilter filter) { + private static String getSQLWhere(TextFilter filter) { if (filter.isSelected()) { if (StringUtils.isBlank(filter.getText())) { return "1"; @@ -168,7 +195,7 @@ public class SQLHelper { * * @return */ - static String getSQLWhere(TypeFilter typeFilter) { + private static String getSQLWhere(TypeFilter typeFilter) { if (typeFilter.isSelected() == false) { return "0"; } else if (typeFilter.getEventType() instanceof RootEventType) { @@ -180,4 +207,59 @@ public class SQLHelper { return "(sub_type IN (" + StringUtils.join(getActiveSubTypes(typeFilter), ",") + "))"; } + private static List getActiveSubTypes(TypeFilter filter) { + if (filter.isSelected()) { + if (filter.getSubFilters().isEmpty()) { + return Collections.singletonList(RootEventType.allTypes.indexOf(filter.getEventType())); + } else { + return filter.getSubFilters().stream().flatMap((Filter t) -> getActiveSubTypes((TypeFilter) t).stream()).collect(Collectors.toList()); + } + } else { + return Collections.emptyList(); + } + } + + /** + * get a sqlite strftime format string that will allow us to group by the + * requested period size. That is, with all info more granular that that + * requested dropped (replaced with zeros). + * + * @param info the {@link RangeDivisionInfo} with the requested period size + * + * @return a String formatted according to the sqlite strftime spec + * + * @see https://www.sqlite.org/lang_datefunc.html + */ + static String getStrfTimeFormat(@Nonnull RangeDivisionInfo info) { + switch (info.getPeriodSize()) { + case YEARS: + return "%Y-01-01T00:00:00"; // NON-NLS + case MONTHS: + return "%Y-%m-01T00:00:00"; // NON-NLS + case DAYS: + return "%Y-%m-%dT00:00:00"; // NON-NLS + case HOURS: + return "%Y-%m-%dT%H:00:00"; // NON-NLS + case MINUTES: + return "%Y-%m-%dT%H:%M:00"; // NON-NLS + case SECONDS: + default: //seconds - should never happen + return "%Y-%m-%dT%H:%M:%S"; // NON-NLS + } + } + + static String getDescriptionColumn(DescriptionLOD lod) { + switch (lod) { + case FULL: + return "full_description"; + case MEDIUM: + return "med_description"; + case SHORT: + default: + return "short_description"; + } + } + + private SQLHelper() { + } } diff --git a/Core/src/org/sleuthkit/autopsy/timeline/zooming/ZoomParams.java b/Core/src/org/sleuthkit/autopsy/timeline/zooming/ZoomParams.java index c6de9f5d1e..2bb4524e66 100644 --- a/Core/src/org/sleuthkit/autopsy/timeline/zooming/ZoomParams.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/zooming/ZoomParams.java @@ -50,7 +50,7 @@ public class ZoomParams { return filter; } - public DescriptionLOD getDescrLOD() { + public DescriptionLOD getDescriptionLOD() { return descrLOD; }