mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-17 18:17:43 +00:00
- KeywordHit now contains a Content member instead of an AbstractFile and getFile() has been replaced by getContent().
- Updated KeywordSearchResultFactory, LuceneQuery and TermComponentQuery to use KeywordHit.getContent(). - Modified KeywordHits.java to not attempt to create MAC time columns for hits that do not have an AbstractFile. - Updated SolrSearchService.indexArtifact() to get the underlying image id for artifacts that do not have an associated AbstractFile.
This commit is contained in:
parent
65e2246209
commit
d0663d8fca
@ -503,6 +503,12 @@ public class KeywordHits implements AutopsyVisitableItem {
|
|||||||
return n;
|
return n;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// It is possible to get a keyword hit on artifacts generated
|
||||||
|
// for the underlying image in which case MAC times are not
|
||||||
|
// available/applicable/useful.
|
||||||
|
if (file == null)
|
||||||
|
return n;
|
||||||
|
|
||||||
n.addNodeProperty(new NodeProperty<>(
|
n.addNodeProperty(new NodeProperty<>(
|
||||||
NbBundle.getMessage(this.getClass(), "KeywordHits.createNodeForKey.modTime.name"),
|
NbBundle.getMessage(this.getClass(), "KeywordHits.createNodeForKey.modTime.name"),
|
||||||
NbBundle.getMessage(this.getClass(),
|
NbBundle.getMessage(this.getClass(),
|
||||||
|
@ -21,6 +21,7 @@ package org.sleuthkit.autopsy.keywordsearch;
|
|||||||
import org.sleuthkit.autopsy.casemodule.Case;
|
import org.sleuthkit.autopsy.casemodule.Case;
|
||||||
import org.sleuthkit.datamodel.AbstractFile;
|
import org.sleuthkit.datamodel.AbstractFile;
|
||||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||||
|
import org.sleuthkit.datamodel.Content;
|
||||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||||
import org.sleuthkit.datamodel.TskCoreException;
|
import org.sleuthkit.datamodel.TskCoreException;
|
||||||
|
|
||||||
@ -36,7 +37,7 @@ class KeywordHit {
|
|||||||
private final long solrObjectId;
|
private final long solrObjectId;
|
||||||
private final int chunkId;
|
private final int chunkId;
|
||||||
private final String snippet;
|
private final String snippet;
|
||||||
private final AbstractFile file;
|
private final Content content;
|
||||||
private final BlackboardArtifact artifact;
|
private final BlackboardArtifact artifact;
|
||||||
|
|
||||||
KeywordHit(String solrDocumentId, String snippet) throws TskCoreException {
|
KeywordHit(String solrDocumentId, String snippet) throws TskCoreException {
|
||||||
@ -47,10 +48,10 @@ class KeywordHit {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Parse the Solr document id to get the Solr object id and chunk id.
|
* Parse the Solr document id to get the Solr object id and chunk id.
|
||||||
* For a file hit, the Solr object id is the file's obj_id from the case
|
* The Solr object id will either be a file id or an artifact id from
|
||||||
* database. For an artifact hit, the Solr object id is the artifact_id
|
* the case database.
|
||||||
* from the case database summed with a magic number to set the highest
|
*
|
||||||
* order bit. For every object (file or artifact) there will at least
|
* For every object (file or artifact) there will at least
|
||||||
* two Solr documents. One contains object metadata (chunk #1) and the
|
* two Solr documents. One contains object metadata (chunk #1) and the
|
||||||
* second and subsequent documents contain chunks of the text.
|
* second and subsequent documents contain chunks of the text.
|
||||||
*/
|
*/
|
||||||
@ -77,7 +78,7 @@ class KeywordHit {
|
|||||||
this.artifact = null;
|
this.artifact = null;
|
||||||
fileId = this.solrObjectId;
|
fileId = this.solrObjectId;
|
||||||
}
|
}
|
||||||
this.file = caseDb.getAbstractFileById(fileId);
|
this.content = caseDb.getContentById(fileId);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Store the text snippet.
|
* Store the text snippet.
|
||||||
@ -109,8 +110,8 @@ class KeywordHit {
|
|||||||
return this.snippet;
|
return this.snippet;
|
||||||
}
|
}
|
||||||
|
|
||||||
AbstractFile getFile() {
|
Content getContent() {
|
||||||
return this.file;
|
return this.content;
|
||||||
}
|
}
|
||||||
|
|
||||||
boolean isArtifactHit() {
|
boolean isArtifactHit() {
|
||||||
|
@ -171,9 +171,14 @@ class KeywordSearchResultFactory extends ChildFactory<KeyValueQueryContent> {
|
|||||||
* Get file properties.
|
* Get file properties.
|
||||||
*/
|
*/
|
||||||
Map<String, Object> properties = new LinkedHashMap<>();
|
Map<String, Object> properties = new LinkedHashMap<>();
|
||||||
AbstractFile file = hit.getFile();
|
Content file = hit.getContent();
|
||||||
AbstractFsContentNode.fillPropertyMap(properties, file);
|
if (file instanceof AbstractFile) {
|
||||||
|
AbstractFsContentNode.fillPropertyMap(properties, (AbstractFile)file);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
properties.put(AbstractAbstractFileNode.AbstractFilePropertyType.LOCATION.toString(), file.getName());
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Add a snippet property, if available.
|
* Add a snippet property, if available.
|
||||||
*/
|
*/
|
||||||
@ -231,7 +236,7 @@ class KeywordSearchResultFactory extends ChildFactory<KeyValueQueryContent> {
|
|||||||
* @param file
|
* @param file
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
private String getHighlightQuery(KeywordSearchQuery query, boolean literal_query, QueryResults queryResults, AbstractFile file) {
|
private String getHighlightQuery(KeywordSearchQuery query, boolean literal_query, QueryResults queryResults, Content content) {
|
||||||
String highlightQueryEscaped;
|
String highlightQueryEscaped;
|
||||||
if (literal_query) {
|
if (literal_query) {
|
||||||
//literal, treat as non-regex, non-term component query
|
//literal, treat as non-regex, non-term component query
|
||||||
@ -250,7 +255,7 @@ class KeywordSearchResultFactory extends ChildFactory<KeyValueQueryContent> {
|
|||||||
List<String> hitTerms = new ArrayList<>();
|
List<String> hitTerms = new ArrayList<>();
|
||||||
for (Keyword keyword : queryResults.getKeywords()) {
|
for (Keyword keyword : queryResults.getKeywords()) {
|
||||||
for (KeywordHit hit : queryResults.getResults(keyword)) {
|
for (KeywordHit hit : queryResults.getResults(keyword)) {
|
||||||
if (hit.getFile().equals(file)) {
|
if (hit.getContent().equals(content)) {
|
||||||
hitTerms.add(keyword.toString());
|
hitTerms.add(keyword.toString());
|
||||||
break; //go to next term
|
break; //go to next term
|
||||||
}
|
}
|
||||||
|
@ -148,7 +148,7 @@ class LuceneQuery implements KeywordSearchQuery {
|
|||||||
BlackboardArtifact bba;
|
BlackboardArtifact bba;
|
||||||
KeywordCachedArtifact writeResult;
|
KeywordCachedArtifact writeResult;
|
||||||
try {
|
try {
|
||||||
bba = hit.getFile().newArtifact(ARTIFACT_TYPE.TSK_KEYWORD_HIT);
|
bba = hit.getContent().newArtifact(ARTIFACT_TYPE.TSK_KEYWORD_HIT);
|
||||||
writeResult = new KeywordCachedArtifact(bba);
|
writeResult = new KeywordCachedArtifact(bba);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
logger.log(Level.WARNING, "Error adding bb artifact for keyword hit", e); //NON-NLS
|
logger.log(Level.WARNING, "Error adding bb artifact for keyword hit", e); //NON-NLS
|
||||||
|
@ -38,6 +38,7 @@ import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
|
|||||||
import org.sleuthkit.datamodel.AbstractFile;
|
import org.sleuthkit.datamodel.AbstractFile;
|
||||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||||
|
import org.sleuthkit.datamodel.Content;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Stores the results from running a Solr query (which could contain multiple
|
* Stores the results from running a Solr query (which could contain multiple
|
||||||
@ -148,10 +149,10 @@ class QueryResults {
|
|||||||
if (writeResult != null) {
|
if (writeResult != null) {
|
||||||
newArtifacts.add(writeResult.getArtifact());
|
newArtifacts.add(writeResult.getArtifact());
|
||||||
if (notifyInbox) {
|
if (notifyInbox) {
|
||||||
writeSingleFileInboxMessage(writeResult, hit.getFile()); // RJCTODO: Consider rewriting this message post code
|
writeSingleFileInboxMessage(writeResult, hit.getContent()); // RJCTODO: Consider rewriting this message post code
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
logger.log(Level.WARNING, "BB artifact for keyword hit not written, file: {0}, hit: {1}", new Object[]{hit.getFile(), keyword.toString()}); //NON-NLS
|
logger.log(Level.WARNING, "BB artifact for keyword hit not written, file: {0}, hit: {1}", new Object[]{hit.getContent(), keyword.toString()}); //NON-NLS
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -185,7 +186,7 @@ class QueryResults {
|
|||||||
* @param written
|
* @param written
|
||||||
* @param hitFile
|
* @param hitFile
|
||||||
*/
|
*/
|
||||||
private void writeSingleFileInboxMessage(KeywordCachedArtifact written, AbstractFile hitFile) {
|
private void writeSingleFileInboxMessage(KeywordCachedArtifact written, Content hitContent) {
|
||||||
StringBuilder subjectSb = new StringBuilder();
|
StringBuilder subjectSb = new StringBuilder();
|
||||||
StringBuilder detailsSb = new StringBuilder();
|
StringBuilder detailsSb = new StringBuilder();
|
||||||
|
|
||||||
@ -222,7 +223,13 @@ class QueryResults {
|
|||||||
//file
|
//file
|
||||||
detailsSb.append("<tr>"); //NON-NLS
|
detailsSb.append("<tr>"); //NON-NLS
|
||||||
detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.fileThLbl"));
|
detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.fileThLbl"));
|
||||||
detailsSb.append("<td>").append(hitFile.getParentPath()).append(hitFile.getName()).append("</td>"); //NON-NLS
|
if (hitContent instanceof AbstractFile) {
|
||||||
|
AbstractFile hitFile = (AbstractFile)hitContent;
|
||||||
|
detailsSb.append("<td>").append(hitFile.getParentPath()).append(hitFile.getName()).append("</td>"); //NON-NLS
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
detailsSb.append("<td>").append(hitContent.getName()).append("</td>"); //NON-NLS
|
||||||
|
}
|
||||||
detailsSb.append("</tr>"); //NON-NLS
|
detailsSb.append("</tr>"); //NON-NLS
|
||||||
|
|
||||||
//list
|
//list
|
||||||
|
@ -43,6 +43,13 @@ public class SolrSearchService implements KeywordSearchService {
|
|||||||
if (artifact == null)
|
if (artifact == null)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
// We only support artifact indexing for Autopsy versions that use
|
||||||
|
// the negative range for artifact ids.
|
||||||
|
long artifactId = artifact.getArtifactID();
|
||||||
|
|
||||||
|
if (artifactId > 0)
|
||||||
|
return;
|
||||||
|
|
||||||
Case currentCase = Case.getCurrentCase();
|
Case currentCase = Case.getCurrentCase();
|
||||||
if (currentCase == null)
|
if (currentCase == null)
|
||||||
return;
|
return;
|
||||||
@ -51,21 +58,16 @@ public class SolrSearchService implements KeywordSearchService {
|
|||||||
if (sleuthkitCase == null)
|
if (sleuthkitCase == null)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
Content dataSource;
|
||||||
AbstractFile abstractFile = sleuthkitCase.getAbstractFileById(artifact.getObjectID());
|
AbstractFile abstractFile = sleuthkitCase.getAbstractFileById(artifact.getObjectID());
|
||||||
if (abstractFile == null)
|
if (abstractFile != null)
|
||||||
return;
|
dataSource = abstractFile.getDataSource();
|
||||||
|
else
|
||||||
|
dataSource = sleuthkitCase.getContentById(artifact.getObjectID());
|
||||||
|
|
||||||
Content dataSource = abstractFile.getDataSource();
|
|
||||||
if (dataSource == null)
|
if (dataSource == null)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
long artifactId = artifact.getArtifactID();
|
|
||||||
|
|
||||||
// We only support artifact indexing for Autopsy versions that use
|
|
||||||
// the negative range for artifact ids.
|
|
||||||
if (artifactId > 0)
|
|
||||||
return;
|
|
||||||
|
|
||||||
// Concatenate the string values of all attributes into a single
|
// Concatenate the string values of all attributes into a single
|
||||||
// "content" string to be indexed.
|
// "content" string to be indexed.
|
||||||
StringBuilder artifactContents = new StringBuilder();
|
StringBuilder artifactContents = new StringBuilder();
|
||||||
|
@ -174,7 +174,7 @@ class TermComponentQuery implements KeywordSearchQuery {
|
|||||||
KeywordCachedArtifact writeResult;
|
KeywordCachedArtifact writeResult;
|
||||||
Collection<BlackboardAttribute> attributes = new ArrayList<>();
|
Collection<BlackboardAttribute> attributes = new ArrayList<>();
|
||||||
try {
|
try {
|
||||||
bba = hit.getFile().newArtifact(ARTIFACT_TYPE.TSK_KEYWORD_HIT);
|
bba = hit.getContent().newArtifact(ARTIFACT_TYPE.TSK_KEYWORD_HIT);
|
||||||
writeResult = new KeywordCachedArtifact(bba);
|
writeResult = new KeywordCachedArtifact(bba);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
logger.log(Level.WARNING, "Error adding bb artifact for keyword hit", e); //NON-NLS
|
logger.log(Level.WARNING, "Error adding bb artifact for keyword hit", e); //NON-NLS
|
||||||
|
Loading…
x
Reference in New Issue
Block a user