Merge pull request #2259 from eugene7646/concurrent_mod_1839

1839 Fixed ConcurrentModificationException in several Autopsy datamodel objects
This commit is contained in:
Richard Cordovano 2016-06-28 17:47:00 -04:00 committed by GitHub
commit 5cb6afb67a
4 changed files with 130 additions and 91 deletions

View File

@ -72,6 +72,7 @@ public class EmailExtracted implements AutopsyVisitableItem {
private final class EmailResults extends Observable {
// NOTE: the map can be accessed by multiple worker threads and needs to be synchronized
private final Map<String, Map<String, List<Long>>> accounts = new LinkedHashMap<>();
EmailResults() {
@ -79,20 +80,28 @@ public class EmailExtracted implements AutopsyVisitableItem {
}
public Set<String> getAccounts() {
return accounts.keySet();
synchronized (accounts) {
return accounts.keySet();
}
}
public Set<String> getFolders(String account) {
return accounts.get(account).keySet();
synchronized (accounts) {
return accounts.get(account).keySet();
}
}
public List<Long> getArtifactIds(String account, String folder) {
return accounts.get(account).get(folder);
synchronized (accounts) {
return accounts.get(account).get(folder);
}
}
@SuppressWarnings("deprecation")
public void update() {
accounts.clear();
synchronized (accounts) {
accounts.clear();
}
if (skCase == null) {
return;
}
@ -107,24 +116,26 @@ public class EmailExtracted implements AutopsyVisitableItem {
try (CaseDbQuery dbQuery = skCase.executeQuery(query)) {
ResultSet resultSet = dbQuery.getResultSet();
while (resultSet.next()) {
final String path = resultSet.getString("value_text"); //NON-NLS
final long artifactId = resultSet.getLong("artifact_id"); //NON-NLS
final Map<String, String> parsedPath = parsePath(path);
final String account = parsedPath.get(MAIL_ACCOUNT);
final String folder = parsedPath.get(MAIL_FOLDER);
synchronized (accounts) {
while (resultSet.next()) {
final String path = resultSet.getString("value_text"); //NON-NLS
final long artifactId = resultSet.getLong("artifact_id"); //NON-NLS
final Map<String, String> parsedPath = parsePath(path);
final String account = parsedPath.get(MAIL_ACCOUNT);
final String folder = parsedPath.get(MAIL_FOLDER);
Map<String, List<Long>> folders = accounts.get(account);
if (folders == null) {
folders = new LinkedHashMap<>();
accounts.put(account, folders);
Map<String, List<Long>> folders = accounts.get(account);
if (folders == null) {
folders = new LinkedHashMap<>();
accounts.put(account, folders);
}
List<Long> messages = folders.get(folder);
if (messages == null) {
messages = new ArrayList<>();
folders.put(folder, messages);
}
messages.add(artifactId);
}
List<Long> messages = folders.get(folder);
if (messages == null) {
messages = new ArrayList<>();
folders.put(folder, messages);
}
messages.add(artifactId);
}
} catch (TskCoreException | SQLException ex) {
logger.log(Level.WARNING, "Cannot initialize email extraction: ", ex); //NON-NLS

View File

@ -78,7 +78,7 @@ public class HashsetHits implements AutopsyVisitableItem {
private class HashsetResults extends Observable {
// maps hashset name to list of artifacts for that set
// NOTE: the map can be accessed by multiple worker threads and needs to be synchronized
private final Map<String, Set<Long>> hashSetHitsMap = new LinkedHashMap<>();
HashsetResults() {
@ -86,18 +86,25 @@ public class HashsetHits implements AutopsyVisitableItem {
}
List<String> getSetNames() {
List<String> names = new ArrayList<>(hashSetHitsMap.keySet());
List<String> names;
synchronized (hashSetHitsMap) {
names = new ArrayList<>(hashSetHitsMap.keySet());
}
Collections.sort(names);
return names;
}
Set<Long> getArtifactIds(String hashSetName) {
return hashSetHitsMap.get(hashSetName);
synchronized (hashSetHitsMap) {
return hashSetHitsMap.get(hashSetName);
}
}
@SuppressWarnings("deprecation")
final void update() {
hashSetHitsMap.clear();
synchronized (hashSetHitsMap) {
hashSetHitsMap.clear();
}
if (skCase == null) {
return;
@ -113,13 +120,15 @@ public class HashsetHits implements AutopsyVisitableItem {
try (CaseDbQuery dbQuery = skCase.executeQuery(query)) {
ResultSet resultSet = dbQuery.getResultSet();
while (resultSet.next()) {
String setName = resultSet.getString("value_text"); //NON-NLS
long artifactId = resultSet.getLong("artifact_id"); //NON-NLS
if (!hashSetHitsMap.containsKey(setName)) {
hashSetHitsMap.put(setName, new HashSet<Long>());
synchronized (hashSetHitsMap) {
while (resultSet.next()) {
String setName = resultSet.getString("value_text"); //NON-NLS
long artifactId = resultSet.getLong("artifact_id"); //NON-NLS
if (!hashSetHitsMap.containsKey(setName)) {
hashSetHitsMap.put(setName, new HashSet<Long>());
}
hashSetHitsMap.get(setName).add(artifactId);
}
hashSetHitsMap.get(setName).add(artifactId);
}
} catch (TskCoreException | SQLException ex) {
logger.log(Level.WARNING, "SQL Exception occurred: ", ex); //NON-NLS

View File

@ -64,20 +64,28 @@ public class InterestingHits implements AutopsyVisitableItem {
private class InterestingResults extends Observable {
// NOTE: the map can be accessed by multiple worker threads and needs to be synchronized
private final Map<String, Set<Long>> interestingItemsMap = new LinkedHashMap<>();
public List<String> getSetNames() {
List<String> setNames = new ArrayList<>(interestingItemsMap.keySet());
List<String> setNames;
synchronized (interestingItemsMap) {
setNames = new ArrayList<>(interestingItemsMap.keySet());
}
Collections.sort(setNames);
return setNames;
}
public Set<Long> getArtifactIds(String setName) {
return interestingItemsMap.get(setName);
synchronized (interestingItemsMap) {
return interestingItemsMap.get(setName);
}
}
public void update() {
interestingItemsMap.clear();
synchronized (interestingItemsMap) {
interestingItemsMap.clear();
}
loadArtifacts(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
loadArtifacts(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT);
setChanged();
@ -103,14 +111,16 @@ public class InterestingHits implements AutopsyVisitableItem {
+ " AND blackboard_artifacts.artifact_type_id=" + artId; //NON-NLS
try (CaseDbQuery dbQuery = skCase.executeQuery(query)) {
ResultSet resultSet = dbQuery.getResultSet();
while (resultSet.next()) {
String value = resultSet.getString("value_text"); //NON-NLS
long artifactId = resultSet.getLong("artifact_id"); //NON-NLS
if (!interestingItemsMap.containsKey(value)) {
interestingItemsMap.put(value, new HashSet<>());
synchronized (interestingItemsMap) {
ResultSet resultSet = dbQuery.getResultSet();
while (resultSet.next()) {
String value = resultSet.getString("value_text"); //NON-NLS
long artifactId = resultSet.getLong("artifact_id"); //NON-NLS
if (!interestingItemsMap.containsKey(value)) {
interestingItemsMap.put(value, new HashSet<>());
}
interestingItemsMap.get(value).add(artifactId);
}
interestingItemsMap.get(value).add(artifactId);
}
} catch (TskCoreException | SQLException ex) {
logger.log(Level.WARNING, "SQL Exception occurred: ", ex); //NON-NLS

View File

@ -73,83 +73,92 @@ public class KeywordHits implements AutopsyVisitableItem {
private final class KeywordResults extends Observable {
// Map from listName/Type to Map of keyword to set of artifact Ids
private final Map<String, Map<String, Set<Long>>> topLevelMap;
// NOTE: the map can be accessed by multiple worker threads and needs to be synchronized
private final Map<String, Map<String, Set<Long>>> topLevelMap = new LinkedHashMap<>();
KeywordResults() {
topLevelMap = new LinkedHashMap<>();
update();
}
List<String> getListNames() {
List<String> names = new ArrayList<>(topLevelMap.keySet());
// this causes the "Single ..." terms to be in the middle of the results,
// which is wierd. Make a custom comparator or do something else to maek them on top
//Collections.sort(names);
return names;
synchronized (topLevelMap) {
List<String> names = new ArrayList<>(topLevelMap.keySet());
// this causes the "Single ..." terms to be in the middle of the results,
// which is wierd. Make a custom comparator or do something else to maek them on top
//Collections.sort(names);
return names;
}
}
List<String> getKeywords(String listName) {
List<String> keywords = new ArrayList<>(topLevelMap.get(listName).keySet());
List<String> keywords;
synchronized (topLevelMap) {
keywords = new ArrayList<>(topLevelMap.get(listName).keySet());
}
Collections.sort(keywords);
return keywords;
}
Set<Long> getArtifactIds(String listName, String keyword) {
return topLevelMap.get(listName).get(keyword);
synchronized (topLevelMap) {
return topLevelMap.get(listName).get(keyword);
}
}
// populate maps based on artifactIds
void populateMaps(Map<Long, Map<Long, String>> artifactIds) {
topLevelMap.clear();
synchronized (topLevelMap) {
topLevelMap.clear();
// map of list name to keword to artifact IDs
Map<String, Map<String, Set<Long>>> listsMap = new LinkedHashMap<>();
// map of list name to keword to artifact IDs
Map<String, Map<String, Set<Long>>> listsMap = new LinkedHashMap<>();
// Map from from literal keyword to artifact IDs
Map<String, Set<Long>> literalMap = new LinkedHashMap<>();
// Map from from literal keyword to artifact IDs
Map<String, Set<Long>> literalMap = new LinkedHashMap<>();
// Map from regex keyword artifact IDs
Map<String, Set<Long>> regexMap = new LinkedHashMap<>();
// Map from regex keyword artifact IDs
Map<String, Set<Long>> regexMap = new LinkedHashMap<>();
// top-level nodes
topLevelMap.put(SIMPLE_LITERAL_SEARCH, literalMap);
topLevelMap.put(SIMPLE_REGEX_SEARCH, regexMap);
// top-level nodes
topLevelMap.put(SIMPLE_LITERAL_SEARCH, literalMap);
topLevelMap.put(SIMPLE_REGEX_SEARCH, regexMap);
for (Map.Entry<Long, Map<Long, String>> art : artifactIds.entrySet()) {
long id = art.getKey();
Map<Long, String> attributes = art.getValue();
for (Map.Entry<Long, Map<Long, String>> art : artifactIds.entrySet()) {
long id = art.getKey();
Map<Long, String> attributes = art.getValue();
// I think we can use attributes.remove(...) here?
String listName = attributes.get(Long.valueOf(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()));
String word = attributes.get(Long.valueOf(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD.getTypeID()));
String reg = attributes.get(Long.valueOf(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_REGEXP.getTypeID()));
// I think we can use attributes.remove(...) here?
String listName = attributes.get(Long.valueOf(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()));
String word = attributes.get(Long.valueOf(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD.getTypeID()));
String reg = attributes.get(Long.valueOf(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_REGEXP.getTypeID()));
// part of a list
if (listName != null) {
if (listsMap.containsKey(listName) == false) {
listsMap.put(listName, new LinkedHashMap<String, Set<Long>>());
// part of a list
if (listName != null) {
if (listsMap.containsKey(listName) == false) {
listsMap.put(listName, new LinkedHashMap<String, Set<Long>>());
}
Map<String, Set<Long>> listMap = listsMap.get(listName);
if (listMap.containsKey(word) == false) {
listMap.put(word, new HashSet<Long>());
}
listMap.get(word).add(id);
} // regular expression, single term
else if (reg != null) {
if (regexMap.containsKey(reg) == false) {
regexMap.put(reg, new HashSet<Long>());
}
regexMap.get(reg).add(id);
} // literal, single term
else {
if (literalMap.containsKey(word) == false) {
literalMap.put(word, new HashSet<Long>());
}
literalMap.get(word).add(id);
}
Map<String, Set<Long>> listMap = listsMap.get(listName);
if (listMap.containsKey(word) == false) {
listMap.put(word, new HashSet<Long>());
}
listMap.get(word).add(id);
} // regular expression, single term
else if (reg != null) {
if (regexMap.containsKey(reg) == false) {
regexMap.put(reg, new HashSet<Long>());
}
regexMap.get(reg).add(id);
} // literal, single term
else {
if (literalMap.containsKey(word) == false) {
literalMap.put(word, new HashSet<Long>());
}
literalMap.get(word).add(id);
topLevelMap.putAll(listsMap);
}
topLevelMap.putAll(listsMap);
}
setChanged();