mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-20 03:24:55 +00:00
SearchRunner now also uses writeAllHitsToBlackBoard()
This commit is contained in:
parent
dcbade813e
commit
766ded2d75
@ -103,17 +103,22 @@ class QueryResults {
|
|||||||
* Creates a blackboard artifact for each keyword hit
|
* Creates a blackboard artifact for each keyword hit
|
||||||
* @param query
|
* @param query
|
||||||
* @param listName
|
* @param listName
|
||||||
* @param progress
|
* @param progress can be null
|
||||||
|
* @param subProgress can be null
|
||||||
* @param notifyInbox flag indicating whether or not to call writeInboxMessage() for each hit
|
* @param notifyInbox flag indicating whether or not to call writeInboxMessage() for each hit
|
||||||
* @return list of new artifacts
|
* @return list of new artifacts
|
||||||
*/
|
*/
|
||||||
public Collection<BlackboardArtifact> writeAllHitsToBlackBoard(KeywordSearchQuery query, String listName, ProgressHandle progress, ProgressContributor subProgress, SwingWorker<Object, Void> worker, boolean notifyInbox) {
|
public Collection<BlackboardArtifact> writeAllHitsToBlackBoard(KeywordSearchQuery query, String listName, ProgressHandle progress, ProgressContributor subProgress, SwingWorker<Object, Void> worker, boolean notifyInbox) {
|
||||||
final Collection<BlackboardArtifact> newArtifacts = new ArrayList<>();
|
final Collection<BlackboardArtifact> newArtifacts = new ArrayList<>();
|
||||||
|
if (progress != null) {
|
||||||
progress.start(getKeywords().size());
|
progress.start(getKeywords().size());
|
||||||
|
}
|
||||||
int unitProgress = 0;
|
int unitProgress = 0;
|
||||||
|
|
||||||
for (final Keyword hitTerm : getKeywords()) {
|
for (final Keyword hitTerm : getKeywords()) {
|
||||||
|
if (progress != null) {
|
||||||
progress.progress(hitTerm.toString(), unitProgress);
|
progress.progress(hitTerm.toString(), unitProgress);
|
||||||
|
}
|
||||||
|
|
||||||
if (worker.isCancelled()) {
|
if (worker.isCancelled()) {
|
||||||
logger.log(Level.INFO, "Cancel detected, bailing before new keyword processed: {0}", hitTerm.getQuery()); //NON-NLS
|
logger.log(Level.INFO, "Cancel detected, bailing before new keyword processed: {0}", hitTerm.getQuery()); //NON-NLS
|
||||||
@ -133,8 +138,9 @@ class QueryResults {
|
|||||||
Map<AbstractFile, Integer> flattened = getUniqueFiles(hitTerm);
|
Map<AbstractFile, Integer> flattened = getUniqueFiles(hitTerm);
|
||||||
|
|
||||||
for (AbstractFile hitFile : flattened.keySet()) {
|
for (AbstractFile hitFile : flattened.keySet()) {
|
||||||
|
String termHit = notifyInbox ? hitTerm.getQuery() : hitTerm.toString();
|
||||||
int chunkId = flattened.get(hitFile);
|
int chunkId = flattened.get(hitFile);
|
||||||
final String snippetQuery = KeywordSearchUtil.escapeLuceneQuery(hitTerm.toString());
|
final String snippetQuery = KeywordSearchUtil.escapeLuceneQuery(termHit);
|
||||||
String snippet;
|
String snippet;
|
||||||
try {
|
try {
|
||||||
snippet = LuceneQuery.querySnippet(snippetQuery, hitFile.getId(), chunkId, !query.isLiteral(), true);
|
snippet = LuceneQuery.querySnippet(snippetQuery, hitFile.getId(), chunkId, !query.isLiteral(), true);
|
||||||
@ -147,7 +153,8 @@ class QueryResults {
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if (snippet != null) {
|
if (snippet != null) {
|
||||||
KeywordWriteResult written = query.writeToBlackBoard(hitTerm.toString(), hitFile, snippet, listName);
|
KeywordWriteResult written = query.writeToBlackBoard(termHit, hitFile, snippet, listName);
|
||||||
|
|
||||||
if (written != null) {
|
if (written != null) {
|
||||||
newArtifacts.add(written.getArtifact());
|
newArtifacts.add(written.getArtifact());
|
||||||
if (notifyInbox) {
|
if (notifyInbox) {
|
||||||
|
@ -440,11 +440,11 @@ public final class SearchRunner {
|
|||||||
final KeywordQueryFilter dataSourceFilter = new KeywordQueryFilter(KeywordQueryFilter.FilterType.DATA_SOURCE, job.getDataSourceId());
|
final KeywordQueryFilter dataSourceFilter = new KeywordQueryFilter(KeywordQueryFilter.FilterType.DATA_SOURCE, job.getDataSourceId());
|
||||||
keywordSearchQuery.addFilter(dataSourceFilter);
|
keywordSearchQuery.addFilter(dataSourceFilter);
|
||||||
|
|
||||||
QueryResults queryResult;
|
QueryResults queryResults;
|
||||||
|
|
||||||
// Do the actual search
|
// Do the actual search
|
||||||
try {
|
try {
|
||||||
queryResult = keywordSearchQuery.performQuery();
|
queryResults = keywordSearchQuery.performQuery();
|
||||||
} catch (NoOpenCoreException ex) {
|
} catch (NoOpenCoreException ex) {
|
||||||
logger.log(Level.WARNING, "Error performing query: " + keywordQuery.getQuery(), ex); //NON-NLS
|
logger.log(Level.WARNING, "Error performing query: " + keywordQuery.getQuery(), ex); //NON-NLS
|
||||||
//no reason to continue with next query if recovery failed
|
//no reason to continue with next query if recovery failed
|
||||||
@ -461,7 +461,7 @@ public final class SearchRunner {
|
|||||||
|
|
||||||
// calculate new results by substracting results already obtained in this ingest
|
// calculate new results by substracting results already obtained in this ingest
|
||||||
// this creates a map of each keyword to the list of unique files that have that hit.
|
// this creates a map of each keyword to the list of unique files that have that hit.
|
||||||
QueryResults newResults = filterResults(queryResult, isRegex);
|
QueryResults newResults = filterResults(queryResults, isRegex);
|
||||||
|
|
||||||
if (!newResults.getKeywords().isEmpty()) {
|
if (!newResults.getKeywords().isEmpty()) {
|
||||||
|
|
||||||
@ -480,64 +480,9 @@ public final class SearchRunner {
|
|||||||
}
|
}
|
||||||
subProgresses[keywordsSearched].progress(listName + ": " + queryDisplayStr, unitProgress);
|
subProgresses[keywordsSearched].progress(listName + ": " + queryDisplayStr, unitProgress);
|
||||||
|
|
||||||
// cycle through the keywords returned -- only one unless it was a regexp
|
// Create blackboard artifacts
|
||||||
for (final Keyword hitTerm : newResults.getKeywords()) {
|
newArtifacts = queryResults.writeAllHitsToBlackBoard(keywordSearchQuery, listName, null, subProgresses[keywordsSearched], this, list.getIngestMessages());
|
||||||
//checking for cancellation between results
|
|
||||||
if (this.isCancelled()) {
|
|
||||||
logger.log(Level.INFO, "Cancel detected, bailing before new hit processed for query: {0}", keywordQuery.getQuery()); //NON-NLS
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
// update progress display
|
|
||||||
String hitDisplayStr = hitTerm.getQuery();
|
|
||||||
if (hitDisplayStr.length() > 50) {
|
|
||||||
hitDisplayStr = hitDisplayStr.substring(0, 49) + "...";
|
|
||||||
}
|
|
||||||
subProgresses[keywordsSearched].progress(listName + ": " + hitDisplayStr, unitProgress);
|
|
||||||
|
|
||||||
// this returns the unique files in the set with the first chunk that has a hit
|
|
||||||
Map<AbstractFile, Integer> contentHitsFlattened = newResults.getUniqueFiles(hitTerm);
|
|
||||||
for (final AbstractFile hitFile : contentHitsFlattened.keySet()) {
|
|
||||||
|
|
||||||
// get the snippet for the first hit in the file
|
|
||||||
String snippet;
|
|
||||||
final String snippetQuery = KeywordSearchUtil.escapeLuceneQuery(hitTerm.getQuery());
|
|
||||||
int chunkId = contentHitsFlattened.get(hitFile);
|
|
||||||
try {
|
|
||||||
snippet = LuceneQuery.querySnippet(snippetQuery, hitFile.getId(), chunkId, isRegex, true);
|
|
||||||
} catch (NoOpenCoreException e) {
|
|
||||||
logger.log(Level.WARNING, "Error querying snippet: " + snippetQuery, e); //NON-NLS
|
|
||||||
//no reason to continue
|
|
||||||
return null;
|
|
||||||
} catch (Exception e) {
|
|
||||||
logger.log(Level.WARNING, "Error querying snippet: " + snippetQuery, e); //NON-NLS
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// write the blackboard artifact for this keyword in this file
|
|
||||||
KeywordWriteResult written = keywordSearchQuery.writeToBlackBoard(hitTerm.getQuery(), hitFile, snippet, listName);
|
|
||||||
if (written == null) {
|
|
||||||
logger.log(Level.WARNING, "BB artifact for keyword hit not written, file: {0}, hit: {1}", new Object[]{hitFile, hitTerm.toString()}); //NON-NLS
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
newArtifacts.add(written.getArtifact());
|
|
||||||
|
|
||||||
// Inbox messages
|
|
||||||
if (list.getIngestMessages()) {
|
|
||||||
newResults.writeInboxMessage(keywordSearchQuery, written, hitFile);
|
|
||||||
}
|
|
||||||
|
|
||||||
} //for each file hit
|
|
||||||
|
|
||||||
++unitProgress;
|
|
||||||
|
|
||||||
}//for each hit term
|
|
||||||
|
|
||||||
//update artifact browser
|
|
||||||
if (!newArtifacts.isEmpty()) {
|
|
||||||
services.fireModuleDataEvent(new ModuleDataEvent(KeywordSearchModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT, newArtifacts));
|
|
||||||
}
|
|
||||||
} //if has results
|
} //if has results
|
||||||
|
|
||||||
//reset the status text before it goes away
|
//reset the status text before it goes away
|
||||||
|
Loading…
x
Reference in New Issue
Block a user