revert experiemental sql query timeout

This commit is contained in:
jmillman 2015-12-01 15:55:25 -05:00
parent 6f2646d061
commit 67dbaa4cdc
2 changed files with 71 additions and 80 deletions

View File

@ -26,7 +26,6 @@ import java.sql.DriverManager;
import java.sql.PreparedStatement; import java.sql.PreparedStatement;
import java.sql.ResultSet; import java.sql.ResultSet;
import java.sql.SQLException; import java.sql.SQLException;
import java.sql.SQLTimeoutException;
import java.sql.Statement; import java.sql.Statement;
import java.sql.Types; import java.sql.Types;
import java.util.ArrayList; import java.util.ArrayList;
@ -53,7 +52,6 @@ import org.apache.commons.lang3.tuple.ImmutablePair;
import org.joda.time.DateTimeZone; import org.joda.time.DateTimeZone;
import org.joda.time.Interval; import org.joda.time.Interval;
import org.joda.time.Period; import org.joda.time.Period;
import org.openide.util.Exceptions;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.Version; import org.sleuthkit.autopsy.coreutils.Version;
@ -1113,18 +1111,11 @@ public class EventDB {
List<EventCluster> events = new ArrayList<>(); List<EventCluster> events = new ArrayList<>();
DBLock.lock(); DBLock.lock();
try (Statement createStatement = con.createStatement();) { try (Statement createStatement = con.createStatement();
createStatement.setQueryTimeout(1); ResultSet rs = createStatement.executeQuery(query)) {
try (ResultSet rs = createStatement.executeQuery(query)) { while (rs.next()) {
events.add(eventClusterHelper(rs, useSubTypes, descriptionLOD, filter.getTagsFilter()));
while (rs.next()) {
events.add(eventClusterHelper(rs, useSubTypes, descriptionLOD, filter.getTagsFilter()));
}
} }
} catch (SQLTimeoutException timeout) {
Exceptions.printStackTrace(timeout);
} catch (SQLException ex) { } catch (SQLException ex) {
LOGGER.log(Level.SEVERE, "Failed to get events with query: " + query, ex); // NON-NLS LOGGER.log(Level.SEVERE, "Failed to get events with query: " + query, ex); // NON-NLS
} finally { } finally {

View File

@ -94,41 +94,41 @@ import org.sleuthkit.datamodel.TskData;
* *
*/ */
public class EventsRepository { public class EventsRepository {
private final static Logger LOGGER = Logger.getLogger(EventsRepository.class.getName()); private final static Logger LOGGER = Logger.getLogger(EventsRepository.class.getName());
private final Executor workerExecutor = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setNameFormat("eventrepository-worker-%d").build()); private final Executor workerExecutor = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setNameFormat("eventrepository-worker-%d").build());
private DBPopulationWorker dbWorker; private DBPopulationWorker dbWorker;
private final EventDB eventDB; private final EventDB eventDB;
private final Case autoCase; private final Case autoCase;
private final FilteredEventsModel modelInstance; private final FilteredEventsModel modelInstance;
private final LoadingCache<Object, Long> maxCache; private final LoadingCache<Object, Long> maxCache;
private final LoadingCache<Object, Long> minCache; private final LoadingCache<Object, Long> minCache;
private final LoadingCache<Long, TimeLineEvent> idToEventCache; private final LoadingCache<Long, TimeLineEvent> idToEventCache;
private final LoadingCache<ZoomParams, Map<EventType, Long>> eventCountsCache; private final LoadingCache<ZoomParams, Map<EventType, Long>> eventCountsCache;
private final LoadingCache<ZoomParams, List<EventStripe>> eventStripeCache; private final LoadingCache<ZoomParams, List<EventStripe>> eventStripeCache;
private final ObservableMap<Long, String> datasourcesMap = FXCollections.observableHashMap(); private final ObservableMap<Long, String> datasourcesMap = FXCollections.observableHashMap();
private final ObservableMap<Long, String> hashSetMap = FXCollections.observableHashMap(); private final ObservableMap<Long, String> hashSetMap = FXCollections.observableHashMap();
private final ObservableList<TagName> tagNames = FXCollections.observableArrayList(); private final ObservableList<TagName> tagNames = FXCollections.observableArrayList();
public Case getAutoCase() { public Case getAutoCase() {
return autoCase; return autoCase;
} }
public ObservableList<TagName> getTagNames() { public ObservableList<TagName> getTagNames() {
return tagNames; return tagNames;
} }
synchronized public ObservableMap<Long, String> getDatasourcesMap() { synchronized public ObservableMap<Long, String> getDatasourcesMap() {
return datasourcesMap; return datasourcesMap;
} }
synchronized public ObservableMap<Long, String> getHashSetMap() { synchronized public ObservableMap<Long, String> getHashSetMap() {
return hashSetMap; return hashSetMap;
} }
public Interval getBoundingEventsInterval(Interval timeRange, RootFilter filter) { public Interval getBoundingEventsInterval(Interval timeRange, RootFilter filter) {
return eventDB.getBoundingEventsInterval(timeRange, filter); return eventDB.getBoundingEventsInterval(timeRange, filter);
} }
@ -140,7 +140,7 @@ public class EventsRepository {
public FilteredEventsModel getEventsModel() { public FilteredEventsModel getEventsModel() {
return modelInstance; return modelInstance;
} }
public EventsRepository(Case autoCase, ReadOnlyObjectProperty<ZoomParams> currentStateProperty) { public EventsRepository(Case autoCase, ReadOnlyObjectProperty<ZoomParams> currentStateProperty) {
this.autoCase = autoCase; this.autoCase = autoCase;
//TODO: we should check that case is open, or get passed a case object/directory -jm //TODO: we should check that case is open, or get passed a case object/directory -jm
@ -178,42 +178,42 @@ public class EventsRepository {
return minCache.getUnchecked("min"); // NON-NLS return minCache.getUnchecked("min"); // NON-NLS
// return eventDB.getMinTime(); // return eventDB.getMinTime();
} }
private void recordLastArtifactID(long lastArtfID) { private void recordLastArtifactID(long lastArtfID) {
eventDB.recordLastArtifactID(lastArtfID); eventDB.recordLastArtifactID(lastArtfID);
} }
private void recordWasIngestRunning(Boolean wasIngestRunning) { private void recordWasIngestRunning(Boolean wasIngestRunning) {
eventDB.recordWasIngestRunning(wasIngestRunning); eventDB.recordWasIngestRunning(wasIngestRunning);
} }
private void recordLastObjID(Long lastObjID) { private void recordLastObjID(Long lastObjID) {
eventDB.recordLastObjID(lastObjID); eventDB.recordLastObjID(lastObjID);
} }
public boolean getWasIngestRunning() { public boolean getWasIngestRunning() {
return eventDB.getWasIngestRunning(); return eventDB.getWasIngestRunning();
} }
public Long getLastObjID() { public Long getLastObjID() {
return eventDB.getLastObjID(); return eventDB.getLastObjID();
} }
public long getLastArtfactID() { public long getLastArtfactID() {
return eventDB.getLastArtfactID(); return eventDB.getLastArtfactID();
} }
public TimeLineEvent getEventById(Long eventID) { public TimeLineEvent getEventById(Long eventID) {
return idToEventCache.getUnchecked(eventID); return idToEventCache.getUnchecked(eventID);
} }
synchronized public Set<TimeLineEvent> getEventsById(Collection<Long> eventIDs) { synchronized public Set<TimeLineEvent> getEventsById(Collection<Long> eventIDs) {
return eventIDs.stream() return eventIDs.stream()
.map(idToEventCache::getUnchecked) .map(idToEventCache::getUnchecked)
.collect(Collectors.toSet()); .collect(Collectors.toSet());
} }
synchronized public List<EventStripe> getEventStripes(ZoomParams params) { synchronized public List<EventStripe> getEventStripes(ZoomParams params) {
try { try {
return eventStripeCache.get(params); return eventStripeCache.get(params);
@ -222,11 +222,11 @@ public class EventsRepository {
return Collections.emptyList(); return Collections.emptyList();
} }
} }
synchronized public Map<EventType, Long> countEvents(ZoomParams params) { synchronized public Map<EventType, Long> countEvents(ZoomParams params) {
return eventCountsCache.getUnchecked(params); return eventCountsCache.getUnchecked(params);
} }
private void invalidateCaches() { private void invalidateCaches() {
minCache.invalidateAll(); minCache.invalidateAll();
maxCache.invalidateAll(); maxCache.invalidateAll();
@ -234,15 +234,15 @@ public class EventsRepository {
eventStripeCache.invalidateAll(); eventStripeCache.invalidateAll();
idToEventCache.invalidateAll(); idToEventCache.invalidateAll();
} }
public Set<Long> getEventIDs(Interval timeRange, RootFilter filter) { public Set<Long> getEventIDs(Interval timeRange, RootFilter filter) {
return eventDB.getEventIDs(timeRange, filter); return eventDB.getEventIDs(timeRange, filter);
} }
public Interval getSpanningInterval(Collection<Long> eventIDs) { public Interval getSpanningInterval(Collection<Long> eventIDs) {
return eventDB.getSpanningInterval(eventIDs); return eventDB.getSpanningInterval(eventIDs);
} }
public boolean hasNewColumns() { public boolean hasNewColumns() {
return eventDB.hasNewColumns(); return eventDB.hasNewColumns();
} }
@ -266,7 +266,7 @@ public class EventsRepository {
* @param skCase * @param skCase
*/ */
synchronized private void populateFilterData(SleuthkitCase skCase) { synchronized private void populateFilterData(SleuthkitCase skCase) {
for (Map.Entry<Long, String> hashSet : eventDB.getHashSetNames().entrySet()) { for (Map.Entry<Long, String> hashSet : eventDB.getHashSetNames().entrySet()) {
hashSetMap.putIfAbsent(hashSet.getKey(), hashSet.getValue()); hashSetMap.putIfAbsent(hashSet.getKey(), hashSet.getValue());
} }
@ -278,7 +278,7 @@ public class EventsRepository {
LOGGER.log(Level.SEVERE, "Failed to get datasource by ID.", ex); LOGGER.log(Level.SEVERE, "Failed to get datasource by ID.", ex);
} }
} }
try { try {
//should this only be tags applied to files or event bearing artifacts? //should this only be tags applied to files or event bearing artifacts?
tagNames.setAll(skCase.getTagNamesInUse()); tagNames.setAll(skCase.getTagNamesInUse());
@ -286,7 +286,7 @@ public class EventsRepository {
LOGGER.log(Level.SEVERE, "Failed to get tag names in use.", ex); LOGGER.log(Level.SEVERE, "Failed to get tag names in use.", ex);
} }
} }
synchronized public Set<Long> addTag(long objID, Long artifactID, Tag tag, EventDB.EventTransaction trans) { synchronized public Set<Long> addTag(long objID, Long artifactID, Tag tag, EventDB.EventTransaction trans) {
Set<Long> updatedEventIDs = eventDB.addTag(objID, artifactID, tag, trans); Set<Long> updatedEventIDs = eventDB.addTag(objID, artifactID, tag, trans);
if (!updatedEventIDs.isEmpty()) { if (!updatedEventIDs.isEmpty()) {
@ -294,7 +294,7 @@ public class EventsRepository {
} }
return updatedEventIDs; return updatedEventIDs;
} }
synchronized public Set<Long> deleteTag(long objID, Long artifactID, long tagID, boolean tagged) { synchronized public Set<Long> deleteTag(long objID, Long artifactID, long tagID, boolean tagged) {
Set<Long> updatedEventIDs = eventDB.deleteTag(objID, artifactID, tagID, tagged); Set<Long> updatedEventIDs = eventDB.deleteTag(objID, artifactID, tagID, tagged);
if (!updatedEventIDs.isEmpty()) { if (!updatedEventIDs.isEmpty()) {
@ -302,7 +302,7 @@ public class EventsRepository {
} }
return updatedEventIDs; return updatedEventIDs;
} }
synchronized private void invalidateCaches(Set<Long> updatedEventIDs) { synchronized private void invalidateCaches(Set<Long> updatedEventIDs) {
eventCountsCache.invalidateAll(); eventCountsCache.invalidateAll();
eventStripeCache.invalidateAll(); eventStripeCache.invalidateAll();
@ -342,21 +342,21 @@ public class EventsRepository {
recordLastArtifactID(lastArtfID); recordLastArtifactID(lastArtfID);
recordWasIngestRunning(injestRunning); recordWasIngestRunning(injestRunning);
} }
public boolean areFiltersEquivalent(RootFilter f1, RootFilter f2) { public boolean areFiltersEquivalent(RootFilter f1, RootFilter f2) {
return SQLHelper.getSQLWhere(f1).equals(SQLHelper.getSQLWhere(f2)); return SQLHelper.getSQLWhere(f1).equals(SQLHelper.getSQLWhere(f2));
} }
@ThreadConfined(type = ThreadConfined.ThreadType.JFX) @ThreadConfined(type = ThreadConfined.ThreadType.JFX)
public boolean isRebuilding() { public boolean isRebuilding() {
return dbWorker.isRunning(); return dbWorker.isRunning();
} }
@ThreadConfined(type = ThreadConfined.ThreadType.JFX) @ThreadConfined(type = ThreadConfined.ThreadType.JFX)
public CancellationProgressTask<Void> rebuildRepository() { public CancellationProgressTask<Void> rebuildRepository() {
return rebuildRepository(DBPopulationMode.FULL); return rebuildRepository(DBPopulationMode.FULL);
} }
@ThreadConfined(type = ThreadConfined.ThreadType.JFX) @ThreadConfined(type = ThreadConfined.ThreadType.JFX)
public CancellationProgressTask<Void> rebuildTags() { public CancellationProgressTask<Void> rebuildTags() {
return rebuildRepository(DBPopulationMode.TAGS_ONLY); return rebuildRepository(DBPopulationMode.TAGS_ONLY);
@ -376,9 +376,9 @@ public class EventsRepository {
workerExecutor.execute(dbWorker); workerExecutor.execute(dbWorker);
return dbWorker; return dbWorker;
} }
private enum DBPopulationMode { private enum DBPopulationMode {
FULL, FULL,
TAGS_ONLY; TAGS_ONLY;
} }
@ -388,38 +388,38 @@ public class EventsRepository {
* the alternatives I can think of seem even worse. -jm * the alternatives I can think of seem even worse. -jm
*/ */
private class DBPopulationWorker extends CancellationProgressTask<Void> { private class DBPopulationWorker extends CancellationProgressTask<Void> {
private final ReadOnlyBooleanWrapper cancellable = new ReadOnlyBooleanWrapper(true); private final ReadOnlyBooleanWrapper cancellable = new ReadOnlyBooleanWrapper(true);
private final DBPopulationMode dbPopulationMode; private final DBPopulationMode dbPopulationMode;
private final SleuthkitCase skCase; private final SleuthkitCase skCase;
private final TagsManager tagsManager; private final TagsManager tagsManager;
private ProgressHandle progressHandle; private ProgressHandle progressHandle;
@Override @Override
public ReadOnlyBooleanProperty cancellableProperty() { public ReadOnlyBooleanProperty cancellableProperty() {
return cancellable.getReadOnlyProperty(); return cancellable.getReadOnlyProperty();
} }
@Override @Override
public boolean requestCancel() { public boolean requestCancel() {
Platform.runLater(() -> cancellable.set(false)); Platform.runLater(() -> cancellable.set(false));
return super.requestCancel(); return super.requestCancel();
} }
@Override @Override
protected void updateTitle(String title) { protected void updateTitle(String title) {
super.updateTitle(title); super.updateTitle(title);
progressHandle.setDisplayName(title); progressHandle.setDisplayName(title);
} }
@Override @Override
protected void updateMessage(String message) { protected void updateMessage(String message) {
super.updateMessage(message); super.updateMessage(message);
progressHandle.progress(message); progressHandle.progress(message);
} }
@Override @Override
protected void updateProgress(double workDone, double max) { protected void updateProgress(double workDone, double max) {
super.updateProgress(workDone, max); super.updateProgress(workDone, max);
@ -427,7 +427,7 @@ public class EventsRepository {
progressHandle.progress((int) workDone); progressHandle.progress((int) workDone);
} }
} }
@Override @Override
protected void updateProgress(long workDone, long max) { protected void updateProgress(long workDone, long max) {
super.updateProgress(workDone, max); super.updateProgress(workDone, max);
@ -436,13 +436,13 @@ public class EventsRepository {
progressHandle.progress((int) workDone); progressHandle.progress((int) workDone);
} }
} }
DBPopulationWorker(DBPopulationMode mode) { DBPopulationWorker(DBPopulationMode mode) {
skCase = autoCase.getSleuthkitCase(); skCase = autoCase.getSleuthkitCase();
tagsManager = autoCase.getServices().getTagsManager(); tagsManager = autoCase.getServices().getTagsManager();
this.dbPopulationMode = mode; this.dbPopulationMode = mode;
} }
void restartProgressHandle(String title, String message, Double workDone, double total, Boolean cancellable) { void restartProgressHandle(String title, String message, Double workDone, double total, Boolean cancellable) {
if (progressHandle != null) { if (progressHandle != null) {
progressHandle.finish(); progressHandle.finish();
@ -450,7 +450,7 @@ public class EventsRepository {
progressHandle = cancellable progressHandle = cancellable
? ProgressHandleFactory.createHandle(title, this::requestCancel) ? ProgressHandleFactory.createHandle(title, this::requestCancel)
: ProgressHandleFactory.createHandle(title); : ProgressHandleFactory.createHandle(title);
if (workDone < 0) { if (workDone < 0) {
progressHandle.start(); progressHandle.start();
} else { } else {
@ -460,7 +460,7 @@ public class EventsRepository {
updateMessage(message); updateMessage(message);
updateProgress(workDone, total); updateProgress(workDone, total);
} }
@Override @Override
@NbBundle.Messages({"progressWindow.msg.refreshingFileTags=Refreshing file tags", @NbBundle.Messages({"progressWindow.msg.refreshingFileTags=Refreshing file tags",
"progressWindow.msg.refreshingResultTags=Refreshing result tags", "progressWindow.msg.refreshingResultTags=Refreshing result tags",
@ -473,7 +473,7 @@ public class EventsRepository {
long lastObjId = skCase.getLastObjectId(); long lastObjId = skCase.getLastObjectId();
long lastArtfID = TimeLineController.getCaseLastArtifactID(skCase); long lastArtfID = TimeLineController.getCaseLastArtifactID(skCase);
boolean injestRunning = IngestManager.getInstance().isIngestRunning(); boolean injestRunning = IngestManager.getInstance().isIngestRunning();
if (dbPopulationMode == DBPopulationMode.FULL) { if (dbPopulationMode == DBPopulationMode.FULL) {
//drop old db, and add back MAC and artifact events //drop old db, and add back MAC and artifact events
LOGGER.log(Level.INFO, "Beginning population of timeline db."); // NON-NLS LOGGER.log(Level.INFO, "Beginning population of timeline db."); // NON-NLS
@ -483,7 +483,7 @@ public class EventsRepository {
//grab ids of all files //grab ids of all files
List<Long> fileIDs = skCase.findAllFileIdsWhere("name != '.' AND name != '..'"); List<Long> fileIDs = skCase.findAllFileIdsWhere("name != '.' AND name != '..'");
final int numFiles = fileIDs.size(); final int numFiles = fileIDs.size();
trans = eventDB.beginTransaction(); trans = eventDB.beginTransaction();
insertMACTimeEvents(numFiles, fileIDs, trans); insertMACTimeEvents(numFiles, fileIDs, trans);
insertArtifactDerivedEvents(trans); insertArtifactDerivedEvents(trans);
@ -495,19 +495,19 @@ public class EventsRepository {
LOGGER.log(Level.INFO, "dropping old tags"); // NON-NLS LOGGER.log(Level.INFO, "dropping old tags"); // NON-NLS
eventDB.reInitializeTags(); eventDB.reInitializeTags();
} }
LOGGER.log(Level.INFO, "updating content tags"); // NON-NLS LOGGER.log(Level.INFO, "updating content tags"); // NON-NLS
List<ContentTag> contentTags = tagsManager.getAllContentTags(); List<ContentTag> contentTags = tagsManager.getAllContentTags();
int currentWorkTotal = contentTags.size(); int currentWorkTotal = contentTags.size();
restartProgressHandle(Bundle.progressWindow_msg_refreshingFileTags(), "", 0D, currentWorkTotal, true); restartProgressHandle(Bundle.progressWindow_msg_refreshingFileTags(), "", 0D, currentWorkTotal, true);
insertContentTags(currentWorkTotal, contentTags, trans); insertContentTags(currentWorkTotal, contentTags, trans);
LOGGER.log(Level.INFO, "updating artifact tags"); // NON-NLS LOGGER.log(Level.INFO, "updating artifact tags"); // NON-NLS
List<BlackboardArtifactTag> artifactTags = tagsManager.getAllBlackboardArtifactTags(); List<BlackboardArtifactTag> artifactTags = tagsManager.getAllBlackboardArtifactTags();
currentWorkTotal = artifactTags.size(); currentWorkTotal = artifactTags.size();
restartProgressHandle(Bundle.progressWindow_msg_refreshingResultTags(), "", 0D, currentWorkTotal, true); restartProgressHandle(Bundle.progressWindow_msg_refreshingResultTags(), "", 0D, currentWorkTotal, true);
insertArtifactTags(currentWorkTotal, artifactTags, trans); insertArtifactTags(currentWorkTotal, artifactTags, trans);
LOGGER.log(Level.INFO, "committing db"); // NON-NLS LOGGER.log(Level.INFO, "committing db"); // NON-NLS
Platform.runLater(() -> cancellable.set(false)); Platform.runLater(() -> cancellable.set(false));
restartProgressHandle(Bundle.progressWindow_msg_commitingDb(), "", -1D, 1, false); restartProgressHandle(Bundle.progressWindow_msg_commitingDb(), "", -1D, 1, false);
@ -515,18 +515,18 @@ public class EventsRepository {
if (isCancelRequested() == false) { if (isCancelRequested() == false) {
recordDBPopulationState(lastObjId, lastArtfID, injestRunning); recordDBPopulationState(lastObjId, lastArtfID, injestRunning);
} }
eventDB.analyze(); eventDB.analyze();
populateFilterData(skCase); populateFilterData(skCase);
invalidateCaches(); invalidateCaches();
progressHandle.finish(); progressHandle.finish();
if (isCancelRequested()) { if (isCancelRequested()) {
cancel(); cancel();
} }
return null; return null;
} }
private void insertArtifactTags(int currentWorkTotal, List<BlackboardArtifactTag> artifactTags, EventDB.EventTransaction trans) { private void insertArtifactTags(int currentWorkTotal, List<BlackboardArtifactTag> artifactTags, EventDB.EventTransaction trans) {
for (int i = 0; i < currentWorkTotal; i++) { for (int i = 0; i < currentWorkTotal; i++) {
if (isCancelRequested()) { if (isCancelRequested()) {
@ -537,7 +537,7 @@ public class EventsRepository {
eventDB.addTag(artifactTag.getContent().getId(), artifactTag.getArtifact().getArtifactID(), artifactTag, trans); eventDB.addTag(artifactTag.getContent().getId(), artifactTag.getArtifact().getArtifactID(), artifactTag, trans);
} }
} }
private void insertContentTags(int currentWorkTotal, List<ContentTag> contentTags, EventDB.EventTransaction trans) { private void insertContentTags(int currentWorkTotal, List<ContentTag> contentTags, EventDB.EventTransaction trans) {
for (int i = 0; i < currentWorkTotal; i++) { for (int i = 0; i < currentWorkTotal; i++) {
if (isCancelRequested()) { if (isCancelRequested()) {
@ -548,7 +548,7 @@ public class EventsRepository {
eventDB.addTag(contentTag.getContent().getId(), null, contentTag, trans); eventDB.addTag(contentTag.getContent().getId(), null, contentTag, trans);
} }
} }
private void insertArtifactDerivedEvents(EventDB.EventTransaction trans) { private void insertArtifactDerivedEvents(EventDB.EventTransaction trans) {
//insert artifact based events //insert artifact based events
//TODO: use (not-yet existing api) to grab all artifacts with timestamps, rather than the hardcoded lists in EventType -jm //TODO: use (not-yet existing api) to grab all artifacts with timestamps, rather than the hardcoded lists in EventType -jm
@ -562,7 +562,7 @@ public class EventsRepository {
} }
} }
} }
@NbBundle.Messages("progressWindow.msg.populateMacEventsFiles=Populating MAC time events for files") @NbBundle.Messages("progressWindow.msg.populateMacEventsFiles=Populating MAC time events for files")
private void insertMACTimeEvents(final int numFiles, List<Long> fileIDs, EventDB.EventTransaction trans) { private void insertMACTimeEvents(final int numFiles, List<Long> fileIDs, EventDB.EventTransaction trans) {
restartProgressHandle(Bundle.progressWindow_msg_populateMacEventsFiles(), "", 0D, numFiles, true); restartProgressHandle(Bundle.progressWindow_msg_populateMacEventsFiles(), "", 0D, numFiles, true);
@ -573,7 +573,7 @@ public class EventsRepository {
long fID = fileIDs.get(i); long fID = fileIDs.get(i);
try { try {
AbstractFile f = skCase.getAbstractFileById(fID); AbstractFile f = skCase.getAbstractFileById(fID);
if (isNull(f)) { if (isNull(f)) {
LOGGER.log(Level.WARNING, "Failed to get data for file : {0}", fID); // NON-NLS LOGGER.log(Level.WARNING, "Failed to get data for file : {0}", fID); // NON-NLS
} else { } else {
@ -586,7 +586,7 @@ public class EventsRepository {
} }
} }
} }
private void insertEventsForFile(AbstractFile f, EventDB.EventTransaction trans) throws TskCoreException { private void insertEventsForFile(AbstractFile f, EventDB.EventTransaction trans) throws TskCoreException {
//gather time stamps into map //gather time stamps into map
EnumMap<FileSystemTypes, Long> timeMap = new EnumMap<>(FileSystemTypes.class); EnumMap<FileSystemTypes, Long> timeMap = new EnumMap<>(FileSystemTypes.class);
@ -606,16 +606,16 @@ public class EventsRepository {
final String parentPath = f.getParentPath(); final String parentPath = f.getParentPath();
long datasourceID = f.getDataSource().getId(); long datasourceID = f.getDataSource().getId();
String datasourceName = StringUtils.substringBeforeLast(uniquePath, parentPath); String datasourceName = StringUtils.substringBeforeLast(uniquePath, parentPath);
String rootFolder = StringUtils.substringBefore(StringUtils.substringAfter(parentPath, "/"), "/"); String rootFolder = StringUtils.substringBefore(StringUtils.substringAfter(parentPath, "/"), "/");
String shortDesc = datasourceName + "/" + StringUtils.defaultString(rootFolder); String shortDesc = datasourceName + "/" + StringUtils.defaultString(rootFolder);
shortDesc = shortDesc.endsWith("/") ? shortDesc : shortDesc + "/"; shortDesc = shortDesc.endsWith("/") ? shortDesc : shortDesc + "/";
String medDesc = datasourceName + parentPath; String medDesc = datasourceName + parentPath;
final TskData.FileKnown known = f.getKnown(); final TskData.FileKnown known = f.getKnown();
Set<String> hashSets = f.getHashSetNames(); Set<String> hashSets = f.getHashSetNames();
List<ContentTag> tags = tagsManager.getContentTagsByContent(f); List<ContentTag> tags = tagsManager.getContentTagsByContent(f);
for (Map.Entry<FileSystemTypes, Long> timeEntry : timeMap.entrySet()) { for (Map.Entry<FileSystemTypes, Long> timeEntry : timeMap.entrySet()) {
if (timeEntry.getValue() > 0) { if (timeEntry.getValue() > 0) {
// if the time is legitimate ( greater than zero ) insert it // if the time is legitimate ( greater than zero ) insert it
@ -626,7 +626,7 @@ public class EventsRepository {
} }
} }
} }
@Override @Override
@NbBundle.Messages("msgdlg.problem.text=There was a problem populating the timeline." @NbBundle.Messages("msgdlg.problem.text=There was a problem populating the timeline."
+ " Not all events may be present or accurate.") + " Not all events may be present or accurate.")
@ -670,7 +670,7 @@ public class EventsRepository {
LOGGER.log(Level.SEVERE, "There was a problem getting events with sub type " + type.toString() + ".", ex); // NON-NLS LOGGER.log(Level.SEVERE, "There was a problem getting events with sub type " + type.toString() + ".", ex); // NON-NLS
} }
} }
private void insertEventForArtifact(final ArtifactEventType type, BlackboardArtifact bbart, EventDB.EventTransaction trans) throws TskCoreException { private void insertEventForArtifact(final ArtifactEventType type, BlackboardArtifact bbart, EventDB.EventTransaction trans) throws TskCoreException {
ArtifactEventType.AttributeEventDescription eventDescription = ArtifactEventType.buildEventDescription(type, bbart); ArtifactEventType.AttributeEventDescription eventDescription = ArtifactEventType.buildEventDescription(type, bbart);