mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-14 17:06:16 +00:00
Updated logic to properly handle artifact and content nodes.
This commit is contained in:
parent
800d98497b
commit
c49e33e5b6
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011-2017 Basis Technology Corp.
|
||||
* Copyright 2011-2018 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -33,6 +33,8 @@ import org.openide.util.lookup.ServiceProvider;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.corecomponentinterfaces.DataContentViewer;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.keywordsearch.KeywordSearchResultFactory.QueryContent;
|
||||
import org.sleuthkit.datamodel.AbstractContent;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Account;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
@ -50,7 +52,7 @@ import org.sleuthkit.datamodel.TskCoreException;
|
||||
@ServiceProvider(service = DataContentViewer.class, position = 4)
|
||||
public class ExtractedContentViewer implements DataContentViewer {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(ExtractedContentViewer.class.getName());
|
||||
private static final Logger LOGGER = Logger.getLogger(ExtractedContentViewer.class.getName());
|
||||
|
||||
private static final long INVALID_DOCUMENT_ID = 0L;
|
||||
private static final BlackboardAttribute.Type TSK_ASSOCIATED_ARTIFACT_TYPE = new BlackboardAttribute.Type(TSK_ASSOCIATED_ARTIFACT);
|
||||
@ -93,7 +95,7 @@ public class ExtractedContentViewer implements DataContentViewer {
|
||||
}
|
||||
|
||||
Lookup nodeLookup = node.getLookup();
|
||||
AbstractFile content = nodeLookup.lookup(AbstractFile.class);
|
||||
AbstractContent content = nodeLookup.lookup(AbstractFile.class);
|
||||
|
||||
/*
|
||||
* Assemble a collection of all of the indexed text "sources" for the
|
||||
@ -104,41 +106,56 @@ public class ExtractedContentViewer implements DataContentViewer {
|
||||
IndexedText rawContentText = null;
|
||||
|
||||
if (null != content && solrHasContent(content.getId())) {
|
||||
QueryResults hits = nodeLookup.lookup(QueryResults.class);
|
||||
BlackboardArtifact artifact = nodeLookup.lookup(BlackboardArtifact.class);
|
||||
if (hits != null) {
|
||||
/*
|
||||
* if there is a QueryReslt object, in the lookup use that. This
|
||||
* happens when a user selects a row in an ad-hoc search result
|
||||
*/
|
||||
highlightedHitText = new HighlightedText(content.getId(), hits);
|
||||
} else if (artifact != null
|
||||
&& artifact.getArtifactTypeID() == TSK_ACCOUNT.getTypeID()) {
|
||||
try {
|
||||
// if the artifact is an account artifact, get an account text .
|
||||
highlightedHitText = getAccountsText(content, nodeLookup);
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Failed to create AccountsText for " + content, ex); //NON-NLS
|
||||
|
||||
}
|
||||
} else if (artifact != null
|
||||
&& artifact.getArtifactTypeID() == TSK_KEYWORD_HIT.getTypeID()) {
|
||||
try {
|
||||
//if there is kwh artifact use that to construct the HighlightedText
|
||||
highlightedHitText = new HighlightedText(artifact);
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Failed to create HighlightedText for " + artifact, ex); //NON-NLS
|
||||
}
|
||||
}
|
||||
|
||||
if (highlightedHitText != null) {
|
||||
sources.add(highlightedHitText);
|
||||
}
|
||||
|
||||
/*
|
||||
* Next, add the "raw" (not highlighted) text, if any, for any
|
||||
* content associated with the node.
|
||||
* Results for Keyword Hits.
|
||||
*/
|
||||
BlackboardArtifact artifact = nodeLookup.lookup(BlackboardArtifact.class);
|
||||
if (artifact != null) {
|
||||
if (artifact.getArtifactTypeID() == TSK_ACCOUNT.getTypeID()) {
|
||||
try {
|
||||
// if the artifact is an account artifact, get an account text.
|
||||
highlightedHitText = getAccountsText(content, nodeLookup);
|
||||
} catch (TskCoreException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Failed to create AccountsText for " + content, ex); //NON-NLS
|
||||
}
|
||||
} else if (artifact.getArtifactTypeID() == TSK_KEYWORD_HIT.getTypeID()) {
|
||||
try {
|
||||
//if there is kwh artifact use that to construct the HighlightedText
|
||||
highlightedHitText = new HighlightedText(artifact);
|
||||
} catch (TskCoreException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Failed to create HighlightedText for " + artifact, ex); //NON-NLS
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
/*
|
||||
* Results for ad-hoc search.
|
||||
*/
|
||||
QueryContent queryContent = nodeLookup.lookup(QueryContent.class);
|
||||
content = (AbstractFile) queryContent.getContent();
|
||||
|
||||
if (null != content && solrHasContent(content.getId())) {
|
||||
QueryResults queryResults = queryContent.getResults();
|
||||
if (queryResults != null) {
|
||||
/*
|
||||
* If there's a QueryContent object in the lookup, use that.
|
||||
* This happens when a user selects a row in an ad-hoc
|
||||
* search result.
|
||||
*/
|
||||
highlightedHitText = new HighlightedText(queryContent.getSolrObjectId(), queryResults);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (highlightedHitText != null) {
|
||||
sources.add(highlightedHitText);
|
||||
}
|
||||
|
||||
/*
|
||||
* Next, add the "raw" (not highlighted) text, if any, for any
|
||||
* content associated with the node.
|
||||
*/
|
||||
if (content != null) {
|
||||
rawContentText = new RawText(content, content.getId());
|
||||
sources.add(rawContentText);
|
||||
}
|
||||
@ -151,7 +168,7 @@ public class ExtractedContentViewer implements DataContentViewer {
|
||||
try {
|
||||
rawArtifactText = getRawArtifactText(nodeLookup);
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Error creating RawText for " + content, ex); //NON-NLS
|
||||
LOGGER.log(Level.SEVERE, "Error creating RawText for " + content, ex); //NON-NLS
|
||||
|
||||
}
|
||||
if (rawArtifactText != null) {
|
||||
@ -288,7 +305,7 @@ public class ExtractedContentViewer implements DataContentViewer {
|
||||
return true;
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Error getting TSK_ACCOUNT_TYPE attribute from artifact " + art.getArtifactID(), ex);
|
||||
LOGGER.log(Level.SEVERE, "Error getting TSK_ACCOUNT_TYPE attribute from artifact " + art.getArtifactID(), ex);
|
||||
}
|
||||
} else if (artifactTypeID == TSK_KEYWORD_HIT.getTypeID()) {
|
||||
return true;
|
||||
@ -352,7 +369,7 @@ public class ExtractedContentViewer implements DataContentViewer {
|
||||
try {
|
||||
return solrServer.queryIsIndexed(objectId);
|
||||
} catch (NoOpenCoreException | KeywordSearchModuleException ex) {
|
||||
logger.log(Level.SEVERE, "Error querying Solr server", ex); //NON-NLS
|
||||
LOGGER.log(Level.SEVERE, "Error querying Solr server", ex); //NON-NLS
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@ -386,7 +403,7 @@ public class ExtractedContentViewer implements DataContentViewer {
|
||||
return blackboardAttribute.getValueLong();
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Error getting associated artifact attributes", ex); //NON-NLS
|
||||
LOGGER.log(Level.SEVERE, "Error getting associated artifact attributes", ex); //NON-NLS
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -398,6 +415,13 @@ public class ExtractedContentViewer implements DataContentViewer {
|
||||
* handled above.
|
||||
*/
|
||||
Content content = node.getLookup().lookup(Content.class);
|
||||
if (content == null) {
|
||||
QueryContent queryContent = node.getLookup().lookup(QueryContent.class);
|
||||
if (queryContent != null) {
|
||||
content = queryContent.getContent();
|
||||
}
|
||||
}
|
||||
|
||||
if (content != null) {
|
||||
return content.getId();
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011-2017 Basis Technology Corp.
|
||||
* Copyright 2011-2018 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -20,7 +20,6 @@ package org.sleuthkit.autopsy.keywordsearch;
|
||||
|
||||
import com.google.common.collect.Iterators;
|
||||
import com.google.common.collect.Range;
|
||||
import com.google.common.collect.RangeSet;
|
||||
import com.google.common.collect.TreeRangeSet;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
@ -55,7 +54,7 @@ import org.sleuthkit.datamodel.TskCoreException;
|
||||
*/
|
||||
class HighlightedText implements IndexedText {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(HighlightedText.class.getName());
|
||||
private static final Logger LOGGER = Logger.getLogger(HighlightedText.class.getName());
|
||||
|
||||
private static final boolean DEBUG = (Version.getBuildType() == Version.Type.DEVELOPMENT);
|
||||
|
||||
@ -70,7 +69,7 @@ class HighlightedText implements IndexedText {
|
||||
|
||||
final private Server solrServer = KeywordSearch.getServer();
|
||||
|
||||
private final long objectId;
|
||||
private final long solrObjectId;
|
||||
/*
|
||||
* The keywords to highlight
|
||||
*/
|
||||
@ -106,14 +105,14 @@ class HighlightedText implements IndexedText {
|
||||
* search results. In that case we have the entire QueryResults object and
|
||||
need to arrange the paging.
|
||||
*
|
||||
* @param objectId The objectID of the content whose text will be
|
||||
* @param solrObjectId The solrObjectId of the content whose text will be
|
||||
* highlighted.
|
||||
* @param QueryResults The QueryResults for the ad-hoc search from whose
|
||||
results a selection was made leading to this
|
||||
HighlightedText.
|
||||
*/
|
||||
HighlightedText(long objectId, QueryResults hits) {
|
||||
this.objectId = objectId;
|
||||
HighlightedText(long solrObjectId, QueryResults hits) {
|
||||
this.solrObjectId = solrObjectId;
|
||||
this.hits = hits;
|
||||
}
|
||||
|
||||
@ -129,9 +128,9 @@ class HighlightedText implements IndexedText {
|
||||
this.artifact = artifact;
|
||||
BlackboardAttribute attribute = artifact.getAttribute(TSK_ASSOCIATED_ARTIFACT);
|
||||
if (attribute != null) {
|
||||
this.objectId = attribute.getValueLong();
|
||||
this.solrObjectId = attribute.getValueLong();
|
||||
} else {
|
||||
this.objectId = artifact.getObjectID();
|
||||
this.solrObjectId = artifact.getObjectID();
|
||||
}
|
||||
|
||||
}
|
||||
@ -146,7 +145,7 @@ class HighlightedText implements IndexedText {
|
||||
return;
|
||||
}
|
||||
|
||||
this.numberPages = solrServer.queryNumFileChunks(this.objectId);
|
||||
this.numberPages = solrServer.queryNumFileChunks(this.solrObjectId);
|
||||
|
||||
if (artifact != null) {
|
||||
loadPageInfoFromArtifact();
|
||||
@ -194,7 +193,7 @@ class HighlightedText implements IndexedText {
|
||||
// Run a query to figure out which chunks for the current object have
|
||||
// hits for this keyword.
|
||||
|
||||
chunksQuery.addFilter(new KeywordQueryFilter(FilterType.CHUNK, this.objectId));
|
||||
chunksQuery.addFilter(new KeywordQueryFilter(FilterType.CHUNK, this.solrObjectId));
|
||||
|
||||
hits = chunksQuery.performQuery();
|
||||
loadPageInfoFromHits();
|
||||
@ -216,7 +215,7 @@ class HighlightedText implements IndexedText {
|
||||
for (KeywordHit hit : hits.getResults(k)) {
|
||||
int chunkID = hit.getChunkId();
|
||||
if (artifact != null) {
|
||||
if (chunkID != 0 && this.objectId == hit.getSolrObjectId()) {
|
||||
if (chunkID != 0 && this.solrObjectId == hit.getSolrObjectId()) {
|
||||
String hit1 = hit.getHit();
|
||||
if (keywords.stream().anyMatch(hit1::contains)) {
|
||||
numberOfHitsPerPage.put(chunkID, 0); //unknown number of matches in the page
|
||||
@ -225,7 +224,7 @@ class HighlightedText implements IndexedText {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (chunkID != 0 && this.objectId == hit.getSolrObjectId()) {
|
||||
if (chunkID != 0 && this.solrObjectId == hit.getSolrObjectId()) {
|
||||
|
||||
numberOfHitsPerPage.put(chunkID, 0); //unknown number of matches in the page
|
||||
currentHitPerPage.put(chunkID, 0); //set current hit to 0th
|
||||
@ -354,7 +353,7 @@ class HighlightedText implements IndexedText {
|
||||
SolrQuery q = new SolrQuery();
|
||||
q.setShowDebugInfo(DEBUG); //debug
|
||||
|
||||
String contentIdStr = Long.toString(this.objectId);
|
||||
String contentIdStr = Long.toString(this.solrObjectId);
|
||||
if (numberPages != 0) {
|
||||
chunkID = Integer.toString(this.currentPage);
|
||||
contentIdStr += "0".equals(chunkID) ? "" : "_" + chunkID;
|
||||
@ -401,7 +400,7 @@ class HighlightedText implements IndexedText {
|
||||
// either be a single chunk containing hits or we narrow our
|
||||
// query down to the current page/chunk.
|
||||
if (response.getResults().size() > 1) {
|
||||
logger.log(Level.WARNING, "Unexpected number of results for Solr highlighting query: {0}", q); //NON-NLS
|
||||
LOGGER.log(Level.WARNING, "Unexpected number of results for Solr highlighting query: {0}", q); //NON-NLS
|
||||
}
|
||||
String highlightedContent;
|
||||
Map<String, Map<String, List<String>>> responseHighlight = response.getHighlighting();
|
||||
@ -427,7 +426,7 @@ class HighlightedText implements IndexedText {
|
||||
|
||||
return "<html><pre>" + highlightedContent + "</pre></html>"; //NON-NLS
|
||||
} catch (TskCoreException | KeywordSearchModuleException | NoOpenCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Error getting highlighted text for Solr doc id " + objectId + ", chunkID " + chunkID + ", highlight query: " + highlightField, ex); //NON-NLS
|
||||
LOGGER.log(Level.SEVERE, "Error getting highlighted text for Solr doc id " + solrObjectId + ", chunkID " + chunkID + ", highlight query: " + highlightField, ex); //NON-NLS
|
||||
return NbBundle.getMessage(this.getClass(), "HighlightedMatchesSource.getMarkup.queryFailedMsg");
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2013-2017 Basis Technology Corp.
|
||||
* Copyright 2013-2018 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -39,6 +39,7 @@ import org.sleuthkit.autopsy.actions.AddContentTagAction;
|
||||
import org.sleuthkit.autopsy.actions.DeleteFileContentTagAction;
|
||||
import org.sleuthkit.autopsy.directorytree.HashSearchAction;
|
||||
import org.sleuthkit.autopsy.directorytree.NewWindowViewAction;
|
||||
import org.sleuthkit.autopsy.keywordsearch.KeywordSearchResultFactory.QueryContent;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.ContentVisitor;
|
||||
@ -52,12 +53,18 @@ import org.sleuthkit.datamodel.TskData;
|
||||
import org.sleuthkit.datamodel.VirtualDirectory;
|
||||
|
||||
/**
|
||||
*
|
||||
* FilterNode containing data pertaining to keyword search.
|
||||
*/
|
||||
class KeywordSearchFilterNode extends FilterNode {
|
||||
|
||||
KeywordSearchFilterNode(QueryResults highlights, Node original) {
|
||||
super(original, null, new ProxyLookup(Lookups.singleton(highlights), original.getLookup()));
|
||||
/**
|
||||
* Instantiate a KeywordSearchFilterNode.
|
||||
*
|
||||
* @param queryContent The query content.
|
||||
* @param original The original source node.
|
||||
*/
|
||||
KeywordSearchFilterNode(QueryContent queryContent, Node original) {
|
||||
super(original, null, new ProxyLookup(Lookups.singleton(queryContent), original.getLookup()));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -250,13 +250,12 @@ class KeywordSearchResultFactory extends ChildFactory<KeyValue> {
|
||||
Node resultNode;
|
||||
|
||||
if (key instanceof KeyValueQueryContent) {
|
||||
final Content content = ((KeyValueQueryContent) key).getContent();
|
||||
QueryResults hits = ((KeyValueQueryContent) key).getHits();
|
||||
QueryContent queryContent = new QueryContent((KeyValueQueryContent) key);
|
||||
|
||||
Node kvNode = new KeyValueNode(key, Children.LEAF, Lookups.singleton(content));
|
||||
Node kvNode = new KeyValueNode(key, Children.LEAF, Lookups.singleton(queryContent));
|
||||
|
||||
//wrap in KeywordSearchFilterNode for the markup content, might need to override FilterNode for more customization
|
||||
resultNode = new KeywordSearchFilterNode(hits, kvNode);
|
||||
resultNode = new KeywordSearchFilterNode(queryContent, kvNode);
|
||||
} else {
|
||||
resultNode = new EmptyNode("This Node Is Empty");
|
||||
resultNode.setDisplayName(NbBundle.getMessage(this.getClass(), "KeywordSearchResultFactory.createNodeForKey.noResultsFound.text"));
|
||||
@ -265,6 +264,56 @@ class KeywordSearchResultFactory extends ChildFactory<KeyValue> {
|
||||
return resultNode;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* This class encapsulates content, query results, and an associated Solr
|
||||
* object ID for storing in the Lookup to be read later.
|
||||
*/
|
||||
class QueryContent {
|
||||
private final long solrObjectId;
|
||||
private final Content content;
|
||||
private final QueryResults results;
|
||||
|
||||
/**
|
||||
* Instantiate a QueryContent object.
|
||||
*
|
||||
* @param solrObjectId The Solr object ID associated with the content.
|
||||
* @param content The content for the query result.
|
||||
* @param results The query results.
|
||||
*/
|
||||
QueryContent(KeyValueQueryContent key) {
|
||||
this.solrObjectId = key.getSolrObjectId();
|
||||
this.content = key.getContent();
|
||||
this.results = key.getHits();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the Solr object ID associated with the content.
|
||||
*
|
||||
* @return The Solr object ID.
|
||||
*/
|
||||
long getSolrObjectId() {
|
||||
return solrObjectId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the content for the query result.
|
||||
*
|
||||
* @return The content.
|
||||
*/
|
||||
Content getContent() {
|
||||
return content;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the query results.
|
||||
*
|
||||
* @return The query results.
|
||||
*/
|
||||
QueryResults getResults() {
|
||||
return results;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Used to display keyword search results in table. Eventually turned into a
|
||||
|
Loading…
x
Reference in New Issue
Block a user