mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-14 17:06:16 +00:00
rework event clustering
This commit is contained in:
parent
f27c6ce649
commit
1fa2cc17bc
@ -33,6 +33,7 @@ import javafx.concurrent.Task;
|
||||
import javafx.event.EventHandler;
|
||||
import javafx.geometry.Pos;
|
||||
import javafx.scene.Cursor;
|
||||
import javafx.scene.control.Alert;
|
||||
import javafx.scene.control.Button;
|
||||
import javafx.scene.image.Image;
|
||||
import javafx.scene.image.ImageView;
|
||||
@ -179,7 +180,7 @@ final class EventClusterNode extends MultiEventNodeBase<EventCluster, EventStrip
|
||||
*/
|
||||
RootFilterState subClusterFilter = eventsModel.getFilterState()
|
||||
.intersect(new DefaultFilterState<>(
|
||||
new DescriptionFilter(getDescription(), DescriptionFilter.FilterMode.INCLUDE), true))
|
||||
new DescriptionFilter(getEvent().getDescriptionLoD(),getDescription() ), true))
|
||||
.intersect(new DefaultFilterState<>(
|
||||
new EventTypeFilter(getEventType()), true));
|
||||
final Interval subClusterSpan = new Interval(getStartMillis(), getEndMillis() + 1000);
|
||||
@ -201,7 +202,6 @@ final class EventClusterNode extends MultiEventNodeBase<EventCluster, EventStrip
|
||||
//next LoD in diraction of given relativeDetail
|
||||
DescriptionLoD next = loadedDescriptionLoD;
|
||||
do {
|
||||
|
||||
loadedDescriptionLoD = next;
|
||||
if (loadedDescriptionLoD == getEvent().getDescriptionLoD()) {
|
||||
//if we are back at the level of detail of the original cluster, return empty list to inidicate.
|
||||
@ -246,6 +246,7 @@ final class EventClusterNode extends MultiEventNodeBase<EventCluster, EventStrip
|
||||
}
|
||||
} catch (TskCoreException | InterruptedException | ExecutionException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Error loading subnodes", ex); //NON-NLS
|
||||
|
||||
}
|
||||
|
||||
getChartLane().requestChartLayout();
|
||||
|
@ -18,7 +18,6 @@
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.ui.detailview;
|
||||
|
||||
import java.util.function.Predicate;
|
||||
import javafx.scene.image.Image;
|
||||
import javafx.scene.image.ImageView;
|
||||
import org.controlsfx.control.action.Action;
|
||||
@ -27,7 +26,6 @@ import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.DefaultFilterState;
|
||||
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState;
|
||||
import org.sleuthkit.datamodel.DescriptionLoD;
|
||||
import org.sleuthkit.datamodel.timeline.TimelineFilter.DescriptionFilter;
|
||||
import static org.sleuthkit.datamodel.timeline.TimelineFilter.DescriptionFilter.FilterMode.EXCLUDE;
|
||||
|
||||
/**
|
||||
* An Action that hides, in the given chart, events that have the given
|
||||
@ -55,7 +53,7 @@ class HideDescriptionAction extends Action {
|
||||
*/
|
||||
final FilterState<DescriptionFilter> testFilter
|
||||
= new DefaultFilterState<>(
|
||||
new DescriptionFilter(description, EXCLUDE));
|
||||
new DescriptionFilter(descriptionLoD, description));
|
||||
|
||||
FilterState<DescriptionFilter> descriptionFilter = chart.getController().getQuickHideFilters().stream()
|
||||
.filter(otherFilterState -> testFilter.getFilter().equals(otherFilterState.getFilter()))
|
||||
|
@ -18,14 +18,10 @@
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.ui.detailview;
|
||||
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Predicate;
|
||||
import javafx.event.ActionEvent;
|
||||
import javafx.scene.image.Image;
|
||||
import javafx.scene.image.ImageView;
|
||||
import org.controlsfx.control.action.Action;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState;
|
||||
import org.sleuthkit.datamodel.DescriptionLoD;
|
||||
import org.sleuthkit.datamodel.timeline.TimelineFilter.DescriptionFilter;
|
||||
|
||||
@ -48,7 +44,7 @@ class UnhideDescriptionAction extends Action {
|
||||
* test one and checking all the existing filters against it.
|
||||
* Disable them.
|
||||
*/
|
||||
final DescriptionFilter testFilter = new DescriptionFilter(description, DescriptionFilter.FilterMode.EXCLUDE);
|
||||
final DescriptionFilter testFilter = new DescriptionFilter(descriptionLoD, description);
|
||||
chart.getController().getQuickHideFilters().stream()
|
||||
.filter(otherFilterState -> testFilter.equals(otherFilterState.getFilter()))
|
||||
.forEach(descriptionfilter -> descriptionfilter.setSelected(false));
|
||||
|
@ -25,12 +25,13 @@ import com.google.common.collect.SetMultimap;
|
||||
import com.google.common.eventbus.Subscribe;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.logging.Level;
|
||||
@ -44,8 +45,6 @@ import org.sleuthkit.autopsy.timeline.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.TimeLineController;
|
||||
import org.sleuthkit.autopsy.timeline.utils.CacheLoaderImpl;
|
||||
import org.sleuthkit.autopsy.timeline.utils.RangeDivision;
|
||||
import org.sleuthkit.autopsy.timeline.utils.TimelineDBUtils;
|
||||
import static org.sleuthkit.autopsy.timeline.utils.TimelineDBUtils.unGroupConcat;
|
||||
import org.sleuthkit.autopsy.timeline.zooming.TimeUnits;
|
||||
import org.sleuthkit.autopsy.timeline.zooming.ZoomState;
|
||||
import org.sleuthkit.datamodel.DescriptionLoD;
|
||||
@ -55,7 +54,11 @@ import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.timeline.EventType;
|
||||
import org.sleuthkit.datamodel.timeline.EventTypeZoomLevel;
|
||||
import static org.sleuthkit.datamodel.timeline.EventTypeZoomLevel.SUB_TYPE;
|
||||
import org.sleuthkit.datamodel.timeline.TimelineEvent;
|
||||
import org.sleuthkit.datamodel.timeline.TimelineFilter;
|
||||
import org.sleuthkit.datamodel.timeline.TimelineFilter.DescriptionFilter;
|
||||
import org.sleuthkit.datamodel.timeline.TimelineFilter.IntersectionFilter;
|
||||
import org.sleuthkit.datamodel.timeline.TimelineFilter.UnionFilter;
|
||||
|
||||
/**
|
||||
* Model for the Details View. Uses FilteredEventsModel as underlying datamodel
|
||||
@ -135,29 +138,38 @@ final public class DetailsViewModel {
|
||||
TimeUnits periodSize = RangeDivision.getRangeDivision(timeRange, timeZone).getPeriodSize();
|
||||
|
||||
//build dynamic parts of query
|
||||
String typeColumn = TimelineManager.typeColumnHelper(typeZoomLevel.equals(SUB_TYPE));
|
||||
TimelineDBUtils dbUtils = new TimelineDBUtils(sleuthkitCase);
|
||||
|
||||
String querySql = "SELECT " + formatTimeFunctionHelper(periodSize.toChronoUnit(), timeZone) + " AS interval, " // NON-NLS
|
||||
+ dbUtils.csvAggFunction("tsk_events.event_id") + " as event_ids, " //NON-NLS
|
||||
+ dbUtils.csvAggFunction("CASE WHEN hash_hit = 1 THEN tsk_events.event_id ELSE NULL END") + " as hash_hits, " //NON-NLS
|
||||
+ dbUtils.csvAggFunction("CASE WHEN tagged = 1 THEN tsk_events.event_id ELSE NULL END") + " as taggeds, " //NON-NLS
|
||||
+ " min(time) AS minTime, max(time) AS maxTime, sub_type, base_type, full_description, med_description, short_description " // NON-NLS
|
||||
String querySql = "SELECT time, file_obj_id, data_source_obj_id, artifact_id, " // NON-NLS
|
||||
+ " event_id, " //NON-NLS
|
||||
+ " hash_hit, " //NON-NLS
|
||||
+ " tagged, " //NON-NLS
|
||||
+ " sub_type, base_type, "
|
||||
+ " full_description, med_description, short_description " // NON-NLS
|
||||
+ " FROM " + TimelineManager.getAugmentedEventsTablesSQL(activeFilter) // NON-NLS
|
||||
+ " WHERE time >= " + start + " AND time < " + end + " AND " + eventManager.getSQLWhere(activeFilter) // NON-NLS
|
||||
+ " GROUP BY interval, full_description, " + typeColumn // NON-NLS
|
||||
+ " ORDER BY min(time)"; // NON-NLS
|
||||
+ " ORDER BY time"; // NON-NLS
|
||||
|
||||
// perform query and map results to EventCluster objects
|
||||
List<EventCluster> eventClusters = new ArrayList<>();
|
||||
|
||||
Map<EventType, SetMultimap< String, EventCluster>> eventClusters = new HashMap<>();
|
||||
|
||||
try (SleuthkitCase.CaseDbQuery dbQuery = sleuthkitCase.executeQuery(querySql);
|
||||
ResultSet resultSet = dbQuery.getResultSet();) {
|
||||
while (resultSet.next()) {
|
||||
eventClusters.add(eventClusterHelper(resultSet, typeColumn, descriptionLOD, timeZone));
|
||||
TimelineEvent event = eventHelper(resultSet);
|
||||
boolean passes = passes(activeFilter, event);
|
||||
if (passes) {
|
||||
EventType clusterType = typeZoomLevel.equals(SUB_TYPE) ? event.getEventType() : event.getEventType().getBaseType();
|
||||
eventClusters.computeIfAbsent(clusterType, eventType -> HashMultimap.create())
|
||||
.put(event.getDescription(descriptionLOD), new EventCluster(event, clusterType, descriptionLOD));
|
||||
} else {
|
||||
System.out.println("");
|
||||
}
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Failed to get events with query: " + querySql, ex); // NON-NLS
|
||||
throw ex;
|
||||
} catch (SQLException ex) {
|
||||
logger.log(Level.SEVERE, "Failed to get events with query: " + querySql, ex); // NON-NLS
|
||||
throw new TskCoreException("Failed to get events with query: " + querySql, ex);
|
||||
}
|
||||
|
||||
return mergeClustersToStripes(periodSize.toUnitPeriod(), eventClusters);
|
||||
@ -177,29 +189,26 @@ final public class DetailsViewModel {
|
||||
*
|
||||
* @throws SQLException
|
||||
*/
|
||||
private EventCluster eventClusterHelper(ResultSet resultSet, String typeColumn, DescriptionLoD descriptionLOD, DateTimeZone timeZone) throws SQLException, TskCoreException {
|
||||
Interval interval = new Interval(resultSet.getLong("minTime") * 1000, resultSet.getLong("maxTime") * 1000, timeZone);
|
||||
private TimelineEvent eventHelper(ResultSet resultSet) throws SQLException, TskCoreException {
|
||||
|
||||
List<Long> eventIDs = unGroupConcat(resultSet.getString("event_ids"), Long::valueOf); // NON-NLS
|
||||
List<Long> hashHits = unGroupConcat(resultSet.getString("hash_hits"), Long::valueOf); //NON-NLS
|
||||
List<Long> tagged = unGroupConcat(resultSet.getString("taggeds"), Long::valueOf); //NON-NLS
|
||||
|
||||
//The actual event type of this cluster
|
||||
int eventTypeID = resultSet.getInt(typeColumn);
|
||||
//the event tyepe to use to get the description.
|
||||
int eventTypeID = resultSet.getInt("sub_type");
|
||||
EventType eventType = eventManager.getEventType(eventTypeID).orElseThrow(()
|
||||
-> new TskCoreException("Error mapping event type id " + eventTypeID + "to EventType."));//NON-NLS
|
||||
|
||||
//the event tyepe to use to get the description.
|
||||
int descEventTypeID = resultSet.getInt("sub_type");
|
||||
EventType descEventType = eventManager.getEventType(descEventTypeID).orElseThrow(()
|
||||
-> new TskCoreException("Error mapping event type id " + descEventTypeID + "to EventType."));//NON-NLS
|
||||
|
||||
String description = descEventType.getDescription(descriptionLOD,
|
||||
resultSet.getString("full_description"),
|
||||
resultSet.getString("med_description"),
|
||||
resultSet.getString("short_description"));
|
||||
|
||||
return new EventCluster(interval, eventType, eventIDs, hashHits, tagged, description, descriptionLOD);
|
||||
return new TimelineEvent(
|
||||
resultSet.getLong("event_id"), // NON-NLS
|
||||
resultSet.getLong("data_source_obj_id"), // NON-NLS
|
||||
resultSet.getLong("file_obj_id"), // NON-NLS
|
||||
resultSet.getLong("artifact_id"), // NON-NLS
|
||||
resultSet.getLong("time"), // NON-NLS
|
||||
eventType,
|
||||
eventType.getDescription(
|
||||
resultSet.getString("full_description"), // NON-NLS
|
||||
resultSet.getString("med_description"), // NON-NLS
|
||||
resultSet.getString("short_description")), // NON-NLS
|
||||
resultSet.getInt("hash_hit") != 0, //NON-NLS
|
||||
resultSet.getInt("tagged") != 0);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -215,24 +224,20 @@ final public class DetailsViewModel {
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
static private List<EventStripe> mergeClustersToStripes(Period timeUnitLength, List<EventCluster> eventClusters) {
|
||||
static private List<EventStripe> mergeClustersToStripes(Period timeUnitLength, Map<EventType, SetMultimap< String, EventCluster>> eventClusters) {
|
||||
|
||||
// type -> (description -> events)
|
||||
Map<EventType, SetMultimap< String, EventCluster>> typeMap = new HashMap<>();
|
||||
|
||||
for (EventCluster cluster : eventClusters) {
|
||||
typeMap.computeIfAbsent(cluster.getEventType(), eventType -> HashMultimap.create())
|
||||
.put(cluster.getDescription(), cluster);
|
||||
}
|
||||
//result list to return
|
||||
ArrayList<EventCluster> mergedClusters = new ArrayList<>();
|
||||
|
||||
//For each (type, description) key, merge agg events
|
||||
for (SetMultimap<String, EventCluster> descrMap : typeMap.values()) {
|
||||
for (Map.Entry<EventType, SetMultimap<String, EventCluster>> typeMapEntry : eventClusters.entrySet()) {
|
||||
EventType type = typeMapEntry.getKey();
|
||||
SetMultimap<String, EventCluster> descrMap = typeMapEntry.getValue();
|
||||
//for each description ...
|
||||
for (String descr : descrMap.keySet()) {
|
||||
Set<EventCluster> events = descrMap.get(descr);
|
||||
//run through the sorted events, merging together adjacent events
|
||||
Iterator<EventCluster> iterator = descrMap.get(descr).stream()
|
||||
Iterator<EventCluster> iterator = events.stream()
|
||||
.sorted(new DetailViewEvent.StartComparator())
|
||||
.iterator();
|
||||
EventCluster current = iterator.next();
|
||||
@ -268,88 +273,23 @@ final public class DetailsViewModel {
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a column specification that will allow us to group by the requested
|
||||
* period size. That is, with all info more granular than that requested
|
||||
* dropped (replaced with zeros). For use in the select clause of a sql
|
||||
* query.
|
||||
*
|
||||
* @param periodSize The ChronoUnit describing what granularity to use.
|
||||
* @param timeZone
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
private String formatTimeFunctionHelper(ChronoUnit periodSize, DateTimeZone timeZone) {
|
||||
switch (sleuthkitCase.getDatabaseType()) {
|
||||
case SQLITE:
|
||||
String strfTimeFormat = getSQLIteTimeFormat(periodSize);
|
||||
String useLocalTime = timeZone.equals(DateTimeZone.getDefault()) ? ", 'localtime'" : ""; // NON-NLS
|
||||
return "strftime('" + strfTimeFormat + "', time , 'unixepoch'" + useLocalTime + ")";
|
||||
case POSTGRESQL:
|
||||
String formatString = getPostgresTimeFormat(periodSize);
|
||||
return "to_char(to_timestamp(time) AT TIME ZONE '" + timeZone.getID() + "', '" + formatString + "')";
|
||||
default:
|
||||
throw new UnsupportedOperationException("Unsupported DB type: " + sleuthkitCase.getDatabaseType().name());
|
||||
private boolean passes(TimelineFilter filter, TimelineEvent event) {
|
||||
if (filter instanceof TimelineFilter.EventTypeFilter) {
|
||||
return true;
|
||||
} else if (filter instanceof IntersectionFilter) {
|
||||
boolean allMatch = ((TimelineFilter.IntersectionFilter<?>) filter).getSubFilters().stream().allMatch(subFilter -> passes(subFilter, event));
|
||||
return allMatch;
|
||||
} else if (filter instanceof UnionFilter) {
|
||||
boolean anyMatch = ((TimelineFilter.UnionFilter<?>) filter).getSubFilters().stream().anyMatch(subFilter -> passes(subFilter, event));
|
||||
return anyMatch;
|
||||
} else if (filter instanceof DescriptionFilter) {
|
||||
DescriptionFilter descrFilter = (DescriptionFilter) filter;
|
||||
String eventDescription = event.getDescription(descrFilter.getDescriptionLoD());
|
||||
boolean passed = eventDescription.equalsIgnoreCase(descrFilter.getDescription());
|
||||
|
||||
return passed;
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Get a format string that will allow us to group by the requested period
|
||||
* size. That is, with all info more granular than that requested dropped
|
||||
* (replaced with zeros).
|
||||
*
|
||||
* @param timeUnit The ChronoUnit describing what granularity to build a
|
||||
* strftime string for
|
||||
*
|
||||
* @return a String formatted according to the sqlite strftime spec
|
||||
*
|
||||
* @see https://www.sqlite.org/lang_datefunc.html
|
||||
*/
|
||||
private static String getSQLIteTimeFormat(ChronoUnit timeUnit) {
|
||||
switch (timeUnit) {
|
||||
case YEARS:
|
||||
return "%Y-01-01T00:00:00"; // NON-NLS
|
||||
case MONTHS:
|
||||
return "%Y-%m-01T00:00:00"; // NON-NLS
|
||||
case DAYS:
|
||||
return "%Y-%m-%dT00:00:00"; // NON-NLS
|
||||
case HOURS:
|
||||
return "%Y-%m-%dT%H:00:00"; // NON-NLS
|
||||
case MINUTES:
|
||||
return "%Y-%m-%dT%H:%M:00"; // NON-NLS
|
||||
case SECONDS:
|
||||
default: //seconds - should never happen
|
||||
return "%Y-%m-%dT%H:%M:%S"; // NON-NLS
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a format string that will allow us to group by the requested period
|
||||
* size. That is, with all info more granular than that requested dropped
|
||||
* (replaced with zeros).
|
||||
*
|
||||
* @param timeUnit The ChronoUnit describing what granularity to build a
|
||||
* strftime string for
|
||||
*
|
||||
* @return a String formatted according to the Postgres
|
||||
* to_char(to_timestamp(time) ... ) spec
|
||||
*/
|
||||
private static String getPostgresTimeFormat(ChronoUnit timeUnit) {
|
||||
switch (timeUnit) {
|
||||
case YEARS:
|
||||
return "YYYY-01-01T00:00:00"; // NON-NLS
|
||||
case MONTHS:
|
||||
return "YYYY-MM-01T00:00:00"; // NON-NLS
|
||||
case DAYS:
|
||||
return "YYYY-MM-DDT00:00:00"; // NON-NLS
|
||||
case HOURS:
|
||||
return "YYYY-MM-DDTHH24:00:00"; // NON-NLS
|
||||
case MINUTES:
|
||||
return "YYYY-MM-DDTHH24:MI:00"; // NON-NLS
|
||||
case SECONDS:
|
||||
default: //seconds - should never happen
|
||||
return "YYYY-MM-DDTHH24:MI:SS"; // NON-NLS
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -20,16 +20,20 @@ package org.sleuthkit.autopsy.timeline.ui.detailview.datamodel;
|
||||
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import com.google.common.collect.ImmutableSortedSet;
|
||||
import com.google.common.collect.Sets;
|
||||
import static com.google.common.collect.Sets.union;
|
||||
import java.util.Collection;
|
||||
import static java.util.Collections.emptySet;
|
||||
import static java.util.Collections.singleton;
|
||||
import java.util.Comparator;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.SortedSet;
|
||||
import org.joda.time.Interval;
|
||||
import org.sleuthkit.autopsy.timeline.utils.IntervalUtils;
|
||||
import org.sleuthkit.datamodel.DescriptionLoD;
|
||||
import org.sleuthkit.datamodel.timeline.EventType;
|
||||
import org.sleuthkit.datamodel.timeline.TimelineEvent;
|
||||
|
||||
/**
|
||||
* Represents a set of other events clustered together. All the sub events
|
||||
@ -38,214 +42,224 @@ import org.sleuthkit.datamodel.timeline.EventType;
|
||||
*/
|
||||
public class EventCluster implements MultiEvent<EventStripe> {
|
||||
|
||||
final private EventStripe parent;
|
||||
final private EventStripe parent;
|
||||
|
||||
/**
|
||||
* the smallest time interval containing all the clustered events
|
||||
*/
|
||||
final private Interval span;
|
||||
/**
|
||||
* the smallest time interval containing all the clustered events
|
||||
*/
|
||||
final private Interval span;
|
||||
|
||||
/**
|
||||
* the type of all the clustered events
|
||||
*/
|
||||
final private EventType type;
|
||||
/**
|
||||
* the type of all the clustered events
|
||||
*/
|
||||
final private EventType type;
|
||||
|
||||
/**
|
||||
* the common description of all the clustered events
|
||||
*/
|
||||
final private String description;
|
||||
/**
|
||||
* the common description of all the clustered events
|
||||
*/
|
||||
final private String description;
|
||||
|
||||
/**
|
||||
* the description level of detail that the events were clustered at.
|
||||
*/
|
||||
private final DescriptionLoD lod;
|
||||
/**
|
||||
* the description level of detail that the events were clustered at.
|
||||
*/
|
||||
private final DescriptionLoD lod;
|
||||
|
||||
/**
|
||||
* the set of ids of the clustered events
|
||||
*/
|
||||
final private ImmutableSet<Long> eventIDs;
|
||||
/**
|
||||
* the set of ids of the clustered events
|
||||
*/
|
||||
final private ImmutableSet<Long> eventIDs;
|
||||
|
||||
/**
|
||||
* the ids of the subset of clustered events that have at least one tag
|
||||
* applied to them
|
||||
*/
|
||||
private final ImmutableSet<Long> tagged;
|
||||
/**
|
||||
* the ids of the subset of clustered events that have at least one tag
|
||||
* applied to them
|
||||
*/
|
||||
private final ImmutableSet<Long> tagged;
|
||||
|
||||
/**
|
||||
* the ids of the subset of clustered events that have at least one hash set
|
||||
* hit
|
||||
*/
|
||||
private final ImmutableSet<Long> hashHits;
|
||||
/**
|
||||
* the ids of the subset of clustered events that have at least one hash set
|
||||
* hit
|
||||
*/
|
||||
private final ImmutableSet<Long> hashHits;
|
||||
|
||||
/**
|
||||
* merge two event clusters into one new event cluster.
|
||||
*
|
||||
* @param cluster1
|
||||
* @param cluster2
|
||||
*
|
||||
* @return a new event cluster that is the result of merging the given
|
||||
* events clusters
|
||||
*/
|
||||
public static EventCluster merge(EventCluster cluster1, EventCluster cluster2) {
|
||||
if (cluster1.getEventType() != cluster2.getEventType()) {
|
||||
throw new IllegalArgumentException("event clusters are not compatible: they have different types");
|
||||
}
|
||||
/**
|
||||
* merge two event clusters into one new event cluster.
|
||||
*
|
||||
* @param cluster1
|
||||
* @param cluster2
|
||||
*
|
||||
* @return a new event cluster that is the result of merging the given
|
||||
* events clusters
|
||||
*/
|
||||
public static EventCluster merge(EventCluster cluster1, EventCluster cluster2) {
|
||||
if (cluster1.getEventType() != cluster2.getEventType()) {
|
||||
throw new IllegalArgumentException("event clusters are not compatible: they have different types");
|
||||
}
|
||||
|
||||
if (!cluster1.getDescription().equals(cluster2.getDescription())) {
|
||||
throw new IllegalArgumentException("event clusters are not compatible: they have different descriptions");
|
||||
}
|
||||
Sets.SetView<Long> idsUnion
|
||||
= Sets.union(cluster1.getEventIDs(), cluster2.getEventIDs());
|
||||
Sets.SetView<Long> hashHitsUnion
|
||||
= Sets.union(cluster1.getEventIDsWithHashHits(), cluster2.getEventIDsWithHashHits());
|
||||
Sets.SetView<Long> taggedUnion
|
||||
= Sets.union(cluster1.getEventIDsWithTags(), cluster2.getEventIDsWithTags());
|
||||
if (!cluster1.getDescription().equals(cluster2.getDescription())) {
|
||||
throw new IllegalArgumentException("event clusters are not compatible: they have different descriptions");
|
||||
}
|
||||
|
||||
return new EventCluster(IntervalUtils.span(cluster1.span, cluster2.span),
|
||||
cluster1.getEventType(), idsUnion, hashHitsUnion, taggedUnion,
|
||||
cluster1.getDescription(), cluster1.lod);
|
||||
}
|
||||
Interval spanningInterval = IntervalUtils.span(cluster1.span, cluster2.span);
|
||||
|
||||
private EventCluster(Interval spanningInterval, EventType type, Collection<Long> eventIDs,
|
||||
Collection<Long> hashHits, Collection<Long> tagged, String description, DescriptionLoD lod,
|
||||
EventStripe parent) {
|
||||
Set<Long> idsUnion = union(cluster1.getEventIDs(), cluster2.getEventIDs());
|
||||
Set<Long> hashHitsUnion = union(cluster1.getEventIDsWithHashHits(), cluster2.getEventIDsWithHashHits());
|
||||
Set<Long> taggedUnion = union(cluster1.getEventIDsWithTags(), cluster2.getEventIDsWithTags());
|
||||
|
||||
this.span = spanningInterval;
|
||||
this.type = type;
|
||||
this.hashHits = ImmutableSet.copyOf(hashHits);
|
||||
this.tagged = ImmutableSet.copyOf(tagged);
|
||||
this.description = description;
|
||||
this.eventIDs = ImmutableSet.copyOf(eventIDs);
|
||||
this.lod = lod;
|
||||
this.parent = parent;
|
||||
}
|
||||
return new EventCluster(spanningInterval,
|
||||
cluster1.getEventType(), idsUnion, hashHitsUnion, taggedUnion,
|
||||
cluster1.getDescription(), cluster1.lod);
|
||||
}
|
||||
|
||||
public EventCluster(Interval spanningInterval, EventType type, Collection<Long> eventIDs,
|
||||
Collection<Long> hashHits, Collection<Long> tagged, String description, DescriptionLoD lod) {
|
||||
this(spanningInterval, type, eventIDs, hashHits, tagged, description, lod, null);
|
||||
}
|
||||
private EventCluster(Interval spanningInterval, EventType type, Collection<Long> eventIDs,
|
||||
Collection<Long> hashHits, Collection<Long> tagged, String description, DescriptionLoD lod,
|
||||
EventStripe parent) {
|
||||
|
||||
/**
|
||||
* get the EventStripe (if any) that contains this cluster
|
||||
*
|
||||
* @return an Optional containg the parent stripe of this cluster, or is
|
||||
* empty if the cluster has no parent set.
|
||||
*/
|
||||
@Override
|
||||
public Optional<EventStripe> getParent() {
|
||||
return Optional.ofNullable(parent);
|
||||
}
|
||||
this.span = spanningInterval;
|
||||
this.type = type;
|
||||
this.hashHits = ImmutableSet.copyOf(hashHits);
|
||||
this.tagged = ImmutableSet.copyOf(tagged);
|
||||
this.description = description;
|
||||
this.eventIDs = ImmutableSet.copyOf(eventIDs);
|
||||
this.lod = lod;
|
||||
this.parent = parent;
|
||||
}
|
||||
|
||||
/**
|
||||
* get the EventStripe (if any) that contains this cluster
|
||||
*
|
||||
* @return an Optional containg the parent stripe of this cluster, or is
|
||||
* empty if the cluster has no parent set.
|
||||
*/
|
||||
@Override
|
||||
public Optional<EventStripe> getParentStripe() {
|
||||
//since this clusters parent must be an event stripe just delegate to getParent();
|
||||
return getParent();
|
||||
}
|
||||
public EventCluster(Interval spanningInterval, EventType type, Collection<Long> eventIDs,
|
||||
Collection<Long> hashHits, Collection<Long> tagged, String description, DescriptionLoD lod) {
|
||||
this(spanningInterval, type, eventIDs, hashHits, tagged, description, lod, null);
|
||||
}
|
||||
|
||||
public Interval getSpan() {
|
||||
return span;
|
||||
}
|
||||
public EventCluster(TimelineEvent event, EventType type, DescriptionLoD lod) {
|
||||
this(new Interval(event.getStartMillis(), event.getEndMillis()),
|
||||
type,
|
||||
singleton(event.getEventID()),
|
||||
event.isHashHit() ? singleton(event.getEventID()) : emptySet(),
|
||||
event.isTagged() ? singleton(event.getEventID()) : emptySet(),
|
||||
event.getDescription(lod),
|
||||
lod);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getStartMillis() {
|
||||
return span.getStartMillis();
|
||||
}
|
||||
/**
|
||||
* get the EventStripe (if any) that contains this cluster
|
||||
*
|
||||
* @return an Optional containg the parent stripe of this cluster, or is
|
||||
* empty if the cluster has no parent set.
|
||||
*/
|
||||
@Override
|
||||
public Optional<EventStripe> getParent() {
|
||||
return Optional.ofNullable(parent);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getEndMillis() {
|
||||
return span.getEndMillis();
|
||||
}
|
||||
/**
|
||||
* get the EventStripe (if any) that contains this cluster
|
||||
*
|
||||
* @return an Optional containg the parent stripe of this cluster, or is
|
||||
* empty if the cluster has no parent set.
|
||||
*/
|
||||
@Override
|
||||
public Optional<EventStripe> getParentStripe() {
|
||||
//since this clusters parent must be an event stripe just delegate to getParent();
|
||||
return getParent();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ImmutableSet<Long> getEventIDs() {
|
||||
return eventIDs;
|
||||
}
|
||||
public Interval getSpan() {
|
||||
return span;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ImmutableSet<Long> getEventIDsWithHashHits() {
|
||||
return hashHits;
|
||||
}
|
||||
@Override
|
||||
public long getStartMillis() {
|
||||
return span.getStartMillis();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ImmutableSet<Long> getEventIDsWithTags() {
|
||||
return tagged;
|
||||
}
|
||||
@Override
|
||||
public long getEndMillis() {
|
||||
return span.getEndMillis();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
@Override
|
||||
public ImmutableSet<Long> getEventIDs() {
|
||||
return eventIDs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public EventType getEventType() {
|
||||
return type;
|
||||
}
|
||||
@Override
|
||||
public ImmutableSet<Long> getEventIDsWithHashHits() {
|
||||
return hashHits;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DescriptionLoD getDescriptionLoD() {
|
||||
return lod;
|
||||
}
|
||||
@Override
|
||||
public ImmutableSet<Long> getEventIDsWithTags() {
|
||||
return tagged;
|
||||
}
|
||||
|
||||
/**
|
||||
* return a new EventCluster identical to this one, except with the given
|
||||
* EventBundle as the parent.
|
||||
*
|
||||
* @param parent
|
||||
*
|
||||
* @return a new EventCluster identical to this one, except with the given
|
||||
* EventBundle as the parent.
|
||||
*/
|
||||
public EventCluster withParent(EventStripe parent) {
|
||||
return new EventCluster(span, type, eventIDs, hashHits, tagged, description, lod, parent);
|
||||
}
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SortedSet<EventCluster> getClusters() {
|
||||
return ImmutableSortedSet.orderedBy(Comparator.comparing(EventCluster::getStartMillis)).add(this).build();
|
||||
}
|
||||
@Override
|
||||
public EventType getEventType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "EventCluster{" + "description=" + description + ", eventIDs=" + eventIDs.size() + '}';
|
||||
}
|
||||
@Override
|
||||
public DescriptionLoD getDescriptionLoD() {
|
||||
return lod;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int hash = 7;
|
||||
hash = 23 * hash + Objects.hashCode(this.type);
|
||||
hash = 23 * hash + Objects.hashCode(this.description);
|
||||
hash = 23 * hash + Objects.hashCode(this.lod);
|
||||
hash = 23 * hash + Objects.hashCode(this.eventIDs);
|
||||
return hash;
|
||||
}
|
||||
/**
|
||||
* return a new EventCluster identical to this one, except with the given
|
||||
* EventBundle as the parent.
|
||||
*
|
||||
* @param parent
|
||||
*
|
||||
* @return a new EventCluster identical to this one, except with the given
|
||||
* EventBundle as the parent.
|
||||
*/
|
||||
public EventCluster withParent(EventStripe parent) {
|
||||
return new EventCluster(span, type, eventIDs, hashHits, tagged, description, lod, parent);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
final EventCluster other = (EventCluster) obj;
|
||||
if (!Objects.equals(this.description, other.description)) {
|
||||
return false;
|
||||
}
|
||||
if (!Objects.equals(this.type, other.type)) {
|
||||
return false;
|
||||
}
|
||||
if (this.lod != other.lod) {
|
||||
return false;
|
||||
}
|
||||
return Objects.equals(this.eventIDs, other.eventIDs);
|
||||
}
|
||||
@Override
|
||||
public SortedSet<EventCluster> getClusters() {
|
||||
return ImmutableSortedSet.orderedBy(Comparator.comparing(EventCluster::getStartMillis)).add(this).build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "EventCluster{" + "description=" + description + ", eventIDs=" + eventIDs.size() + '}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int hash = 7;
|
||||
hash = 23 * hash + Objects.hashCode(this.type);
|
||||
hash = 23 * hash + Objects.hashCode(this.description);
|
||||
hash = 23 * hash + Objects.hashCode(this.lod);
|
||||
hash = 23 * hash + Objects.hashCode(this.eventIDs);
|
||||
return hash;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
final EventCluster other = (EventCluster) obj;
|
||||
if (!Objects.equals(this.description, other.description)) {
|
||||
return false;
|
||||
}
|
||||
if (!Objects.equals(this.type, other.type)) {
|
||||
return false;
|
||||
}
|
||||
if (this.lod != other.lod) {
|
||||
return false;
|
||||
}
|
||||
return Objects.equals(this.eventIDs, other.eventIDs);
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user