mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-19 11:07:43 +00:00
create some additional indexes; don't use group_concat uselessly
This commit is contained in:
parent
7a85c14b2d
commit
cedeed1a9e
@ -413,10 +413,7 @@ public class EventDB {
|
|||||||
try (ResultSet rs = getDataSourceIDsStmt.executeQuery()) {
|
try (ResultSet rs = getDataSourceIDsStmt.executeQuery()) {
|
||||||
while (rs.next()) {
|
while (rs.next()) {
|
||||||
long datasourceID = rs.getLong("datasource_id");
|
long datasourceID = rs.getLong("datasource_id");
|
||||||
//this relies on the fact that no tskObj has ID 0 but 0 is the default value for the datasource_id column in the events table.
|
hashSet.add(datasourceID);
|
||||||
if (datasourceID != 0) {
|
|
||||||
hashSet.add(datasourceID);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} catch (SQLException ex) {
|
} catch (SQLException ex) {
|
||||||
LOGGER.log(Level.SEVERE, "Failed to get MAX time.", ex); // NON-NLS
|
LOGGER.log(Level.SEVERE, "Failed to get MAX time.", ex); // NON-NLS
|
||||||
@ -583,6 +580,10 @@ public class EventDB {
|
|||||||
|
|
||||||
initializeTagsTable();
|
initializeTagsTable();
|
||||||
|
|
||||||
|
createIndex("events", Arrays.asList("datasource_id"));
|
||||||
|
createIndex("events", Arrays.asList("event_id", "hash_hit"));
|
||||||
|
createIndex("events", Arrays.asList("event_id", "tagged"));
|
||||||
|
createIndex("events", Arrays.asList("file_id"));
|
||||||
createIndex("events", Arrays.asList("file_id"));
|
createIndex("events", Arrays.asList("file_id"));
|
||||||
createIndex("events", Arrays.asList("artifact_id"));
|
createIndex("events", Arrays.asList("artifact_id"));
|
||||||
createIndex("events", Arrays.asList("time"));
|
createIndex("events", Arrays.asList("time"));
|
||||||
@ -595,7 +596,7 @@ public class EventDB {
|
|||||||
"INSERT INTO events (datasource_id,file_id ,artifact_id, time, sub_type, base_type, full_description, med_description, short_description, known_state, hash_hit, tagged) " // NON-NLS
|
"INSERT INTO events (datasource_id,file_id ,artifact_id, time, sub_type, base_type, full_description, med_description, short_description, known_state, hash_hit, tagged) " // NON-NLS
|
||||||
+ "VALUES (?,?,?,?,?,?,?,?,?,?,?,?)"); // NON-NLS
|
+ "VALUES (?,?,?,?,?,?,?,?,?,?,?,?)"); // NON-NLS
|
||||||
getHashSetNamesStmt = prepareStatement("SELECT hash_set_id, hash_set_name FROM hash_sets"); // NON-NLS
|
getHashSetNamesStmt = prepareStatement("SELECT hash_set_id, hash_set_name FROM hash_sets"); // NON-NLS
|
||||||
getDataSourceIDsStmt = prepareStatement("SELECT DISTINCT datasource_id FROM events"); // NON-NLS
|
getDataSourceIDsStmt = prepareStatement("SELECT DISTINCT datasource_id FROM events WHERE datasource_id != 0"); // NON-NLS
|
||||||
getMaxTimeStmt = prepareStatement("SELECT Max(time) AS max FROM events"); // NON-NLS
|
getMaxTimeStmt = prepareStatement("SELECT Max(time) AS max FROM events"); // NON-NLS
|
||||||
getMinTimeStmt = prepareStatement("SELECT Min(time) AS min FROM events"); // NON-NLS
|
getMinTimeStmt = prepareStatement("SELECT Min(time) AS min FROM events"); // NON-NLS
|
||||||
getEventByIDStmt = prepareStatement("SELECT * FROM events WHERE event_id = ?"); // NON-NLS
|
getEventByIDStmt = prepareStatement("SELECT * FROM events WHERE event_id = ?"); // NON-NLS
|
||||||
@ -1041,7 +1042,7 @@ public class EventDB {
|
|||||||
* the supplied filter, aggregated according to the given event type
|
* the supplied filter, aggregated according to the given event type
|
||||||
* and description zoom levels
|
* and description zoom levels
|
||||||
*/
|
*/
|
||||||
List<EventCluster> getAggregatedEvents(ZoomParams params) {
|
List<EventCluster> getClusteredEvents(ZoomParams params) {
|
||||||
//unpack params
|
//unpack params
|
||||||
Interval timeRange = params.getTimeRange();
|
Interval timeRange = params.getTimeRange();
|
||||||
RootFilter filter = params.getFilter();
|
RootFilter filter = params.getFilter();
|
||||||
@ -1079,7 +1080,7 @@ public class EventDB {
|
|||||||
try (Statement createStatement = con.createStatement();
|
try (Statement createStatement = con.createStatement();
|
||||||
ResultSet rs = createStatement.executeQuery(query)) {
|
ResultSet rs = createStatement.executeQuery(query)) {
|
||||||
while (rs.next()) {
|
while (rs.next()) {
|
||||||
events.add(aggregateEventHelper(rs, useSubTypes, descriptionLOD, filter.getTagsFilter()));
|
events.add(eventClusterHelper(rs, useSubTypes, descriptionLOD, filter.getTagsFilter()));
|
||||||
}
|
}
|
||||||
} catch (SQLException ex) {
|
} catch (SQLException ex) {
|
||||||
LOGGER.log(Level.SEVERE, "Failed to get aggregate events with query: " + query, ex); // NON-NLS
|
LOGGER.log(Level.SEVERE, "Failed to get aggregate events with query: " + query, ex); // NON-NLS
|
||||||
@ -1087,11 +1088,11 @@ public class EventDB {
|
|||||||
DBLock.unlock();
|
DBLock.unlock();
|
||||||
}
|
}
|
||||||
|
|
||||||
return mergeAggregateEvents(rangeInfo.getPeriodSize().getPeriod(), events);
|
return mergeEventClusters(rangeInfo.getPeriodSize().getPeriod(), events);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* map a single row in a ResultSet to an AggregateEvent
|
* map a single row in a ResultSet to an EventCluster
|
||||||
*
|
*
|
||||||
* @param rs the result set whose current row should be mapped
|
* @param rs the result set whose current row should be mapped
|
||||||
* @param useSubTypes use the sub_type column if true, else use the
|
* @param useSubTypes use the sub_type column if true, else use the
|
||||||
@ -1103,7 +1104,7 @@ public class EventDB {
|
|||||||
*
|
*
|
||||||
* @throws SQLException
|
* @throws SQLException
|
||||||
*/
|
*/
|
||||||
private EventCluster aggregateEventHelper(ResultSet rs, boolean useSubTypes, DescriptionLOD descriptionLOD, TagsFilter filter) throws SQLException {
|
private EventCluster eventClusterHelper(ResultSet rs, boolean useSubTypes, DescriptionLOD descriptionLOD, TagsFilter filter) throws SQLException {
|
||||||
Interval interval = new Interval(rs.getLong("min(time)") * 1000, rs.getLong("max(time)") * 1000, TimeLineController.getJodaTimeZone());// NON-NLS
|
Interval interval = new Interval(rs.getLong("min(time)") * 1000, rs.getLong("max(time)") * 1000, TimeLineController.getJodaTimeZone());// NON-NLS
|
||||||
String eventIDsString = rs.getString("event_ids");// NON-NLS
|
String eventIDsString = rs.getString("event_ids");// NON-NLS
|
||||||
Set<Long> eventIDs = SQLHelper.unGroupConcat(eventIDsString, Long::valueOf);
|
Set<Long> eventIDs = SQLHelper.unGroupConcat(eventIDsString, Long::valueOf);
|
||||||
@ -1111,20 +1112,20 @@ public class EventDB {
|
|||||||
EventType type = useSubTypes ? RootEventType.allTypes.get(rs.getInt("sub_type")) : BaseTypes.values()[rs.getInt("base_type")];// NON-NLS
|
EventType type = useSubTypes ? RootEventType.allTypes.get(rs.getInt("sub_type")) : BaseTypes.values()[rs.getInt("base_type")];// NON-NLS
|
||||||
|
|
||||||
Set<Long> hashHits = new HashSet<>();
|
Set<Long> hashHits = new HashSet<>();
|
||||||
String hashHitQuery = "SELECT group_concat(event_id) FROM events WHERE event_id IN (" + eventIDsString + ") AND hash_hit = 1";// NON-NLS
|
String hashHitQuery = "SELECT event_id FROM events WHERE event_id IN (" + eventIDsString + ") AND hash_hit = 1";// NON-NLS
|
||||||
try (Statement stmt = con.createStatement();
|
try (Statement stmt = con.createStatement();
|
||||||
ResultSet hashHitsRS = stmt.executeQuery(hashHitQuery)) {
|
ResultSet hashHitsRS = stmt.executeQuery(hashHitQuery)) {
|
||||||
while (hashHitsRS.next()) {
|
while (hashHitsRS.next()) {
|
||||||
hashHits = SQLHelper.unGroupConcat(hashHitsRS.getString("group_concat(event_id)"), Long::valueOf);// NON-NLS
|
hashHits.add(hashHitsRS.getLong("event_id"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Set<Long> tagged = new HashSet<>();
|
Set<Long> tagged = new HashSet<>();
|
||||||
String taggedQuery = "SELECT group_concat(event_id) FROM events WHERE event_id IN (" + eventIDsString + ") AND tagged = 1";// NON-NLS
|
String taggedQuery = "SELECT event_id FROM events WHERE event_id IN (" + eventIDsString + ") AND tagged = 1";// NON-NLS
|
||||||
try (Statement stmt = con.createStatement();
|
try (Statement stmt = con.createStatement();
|
||||||
ResultSet taggedRS = stmt.executeQuery(taggedQuery)) {
|
ResultSet taggedRS = stmt.executeQuery(taggedQuery)) {
|
||||||
while (taggedRS.next()) {
|
while (taggedRS.next()) {
|
||||||
tagged = SQLHelper.unGroupConcat(taggedRS.getString("group_concat(event_id)"), Long::valueOf);// NON-NLS
|
tagged.add(taggedRS.getLong("event_id"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1145,7 +1146,7 @@ public class EventDB {
|
|||||||
*
|
*
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
static private List<EventCluster> mergeAggregateEvents(Period timeUnitLength, List<EventCluster> preMergedEvents) {
|
static private List<EventCluster> mergeEventClusters(Period timeUnitLength, List<EventCluster> preMergedEvents) {
|
||||||
|
|
||||||
//effectively map from type to (map from description to events)
|
//effectively map from type to (map from description to events)
|
||||||
Map<EventType, SetMultimap< String, EventCluster>> typeMap = new HashMap<>();
|
Map<EventType, SetMultimap< String, EventCluster>> typeMap = new HashMap<>();
|
||||||
|
@ -149,7 +149,7 @@ public class EventsRepository {
|
|||||||
aggregateEventsCache = CacheBuilder.newBuilder()
|
aggregateEventsCache = CacheBuilder.newBuilder()
|
||||||
.maximumSize(1000L)
|
.maximumSize(1000L)
|
||||||
.expireAfterAccess(10, TimeUnit.MINUTES
|
.expireAfterAccess(10, TimeUnit.MINUTES
|
||||||
).build(CacheLoader.from(eventDB::getAggregatedEvents));
|
).build(CacheLoader.from(eventDB::getClusteredEvents));
|
||||||
maxCache = CacheBuilder.newBuilder().build(CacheLoader.from(eventDB::getMaxTime));
|
maxCache = CacheBuilder.newBuilder().build(CacheLoader.from(eventDB::getMaxTime));
|
||||||
minCache = CacheBuilder.newBuilder().build(CacheLoader.from(eventDB::getMinTime));
|
minCache = CacheBuilder.newBuilder().build(CacheLoader.from(eventDB::getMinTime));
|
||||||
this.modelInstance = new FilteredEventsModel(this, currentStateProperty);
|
this.modelInstance = new FilteredEventsModel(this, currentStateProperty);
|
||||||
|
Loading…
x
Reference in New Issue
Block a user