Clearing and filling hashmap now wrapped by same synchronized block

This commit is contained in:
William Schaefer 2016-12-14 14:18:03 -05:00
parent db904a6080
commit add50b1628

View File

@ -75,9 +75,7 @@ public final class FileTypesByMimeType extends Observable implements AutopsyVisi
private final PropertyChangeListener pcl = (PropertyChangeEvent evt) -> { private final PropertyChangeListener pcl = (PropertyChangeEvent evt) -> {
String eventType = evt.getPropertyName(); String eventType = evt.getPropertyName();
if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString()) if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString())
|| eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString()) || eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) {
// || eventType.equals(Case.Events.DATA_SOURCE_ADDED.toString())
|| eventType.equals(IngestManager.IngestModuleEvent.CONTENT_CHANGED.toString())) {
/** /**
* Checking for a current case is a stop gap measure until a * Checking for a current case is a stop gap measure until a
* different way of handling the closing of cases is worked out. * different way of handling the closing of cases is worked out.
@ -123,7 +121,6 @@ public final class FileTypesByMimeType extends Observable implements AutopsyVisi
allDistinctMimeTypesQuery.append(TskData.TSK_DB_FILES_TYPE_ENUM.LOCAL.ordinal()).append("))"); allDistinctMimeTypesQuery.append(TskData.TSK_DB_FILES_TYPE_ENUM.LOCAL.ordinal()).append("))");
synchronized (existingMimeTypes) { synchronized (existingMimeTypes) {
existingMimeTypes.clear(); existingMimeTypes.clear();
}
if (skCase == null) { if (skCase == null) {
@ -131,7 +128,6 @@ public final class FileTypesByMimeType extends Observable implements AutopsyVisi
} }
try (SleuthkitCase.CaseDbQuery dbQuery = skCase.executeQuery(allDistinctMimeTypesQuery.toString())) { try (SleuthkitCase.CaseDbQuery dbQuery = skCase.executeQuery(allDistinctMimeTypesQuery.toString())) {
ResultSet resultSet = dbQuery.getResultSet(); ResultSet resultSet = dbQuery.getResultSet();
synchronized (existingMimeTypes) {
while (resultSet.next()) { while (resultSet.next()) {
final String mime_type = resultSet.getString("mime_type"); //NON-NLS final String mime_type = resultSet.getString("mime_type"); //NON-NLS
if (!mime_type.isEmpty()) { if (!mime_type.isEmpty()) {
@ -144,17 +140,18 @@ public final class FileTypesByMimeType extends Observable implements AutopsyVisi
} }
} }
} }
}
} catch (TskCoreException | SQLException ex) { } catch (TskCoreException | SQLException ex) {
LOGGER.log(Level.WARNING, "Unable to populate File Types by MIME Type tree view from DB: ", ex); //NON-NLS LOGGER.log(Level.WARNING, "Unable to populate File Types by MIME Type tree view from DB: ", ex); //NON-NLS
} }
}
setChanged(); setChanged();
notifyObservers(); notifyObservers();
} }
FileTypesByMimeType(SleuthkitCase skCase) { FileTypesByMimeType(SleuthkitCase skCase) {
IngestManager.getInstance().addIngestJobEventListener(pcl); IngestManager.getInstance().addIngestJobEventListener(pcl);
IngestManager.getInstance().addIngestModuleEventListener(pcl);
this.skCase = skCase; this.skCase = skCase;
populateHashMap(); populateHashMap();
} }
@ -178,6 +175,7 @@ public final class FileTypesByMimeType extends Observable implements AutopsyVisi
isEmptyMimeNode = true; isEmptyMimeNode = true;
} }
return isEmptyMimeNode; return isEmptyMimeNode;
} }
/** /**