mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-19 11:07:43 +00:00
Merge branch 'develop' of https://github.com/sleuthkit/autopsy into 2348-DspSelectionPanel
This commit is contained in:
commit
9bb780792b
@ -27,7 +27,6 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
import java.util.TreeSet;
|
||||
import java.util.logging.Level;
|
||||
import java.util.stream.Collectors;
|
||||
import javax.annotation.concurrent.GuardedBy;
|
||||
@ -244,7 +243,6 @@ class HighlightedText implements IndexedText {
|
||||
@Override
|
||||
public boolean hasNextPage() {
|
||||
return getIndexOfCurrentPage() < pages.size() - 1;
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -255,7 +253,7 @@ class HighlightedText implements IndexedText {
|
||||
@Override
|
||||
public int nextPage() {
|
||||
if (hasNextPage()) {
|
||||
currentPage = Iterators.get(pages.iterator(),getIndexOfCurrentPage() + 1);
|
||||
currentPage = Iterators.get(pages.iterator(), getIndexOfCurrentPage() + 1);
|
||||
return currentPage;
|
||||
} else {
|
||||
throw new IllegalStateException("No next page.");
|
||||
@ -265,7 +263,7 @@ class HighlightedText implements IndexedText {
|
||||
@Override
|
||||
public int previousPage() {
|
||||
if (hasPreviousPage()) {
|
||||
currentPage = Iterators.get(pages.iterator(),getIndexOfCurrentPage()-1);
|
||||
currentPage = Iterators.get(pages.iterator(), getIndexOfCurrentPage() - 1);
|
||||
return currentPage;
|
||||
} else {
|
||||
throw new IllegalStateException("No previous page.");
|
||||
@ -387,7 +385,7 @@ class HighlightedText implements IndexedText {
|
||||
highlightedContent = insertAnchors(highlightedContent);
|
||||
|
||||
return "<html><pre>" + highlightedContent + "</pre></html>"; //NON-NLS
|
||||
} catch (Exception ex) {
|
||||
} catch (TskCoreException | KeywordSearchModuleException | NoOpenCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Error getting highlighted text for " + objectId, ex); //NON-NLS
|
||||
return NbBundle.getMessage(this.getClass(), "HighlightedMatchesSource.getMarkup.queryFailedMsg");
|
||||
}
|
||||
@ -453,8 +451,8 @@ class HighlightedText implements IndexedText {
|
||||
//we also need to escape the keyword so that it matches the escpared text
|
||||
final String escapedKeyword = StringEscapeUtils.escapeHtml(keyword);
|
||||
int textOffset = 0;
|
||||
int hitOffset;
|
||||
while ((hitOffset = StringUtils.indexOfIgnoreCase(text, escapedKeyword, textOffset)) != -1) {
|
||||
int hitOffset = StringUtils.indexOfIgnoreCase(text, escapedKeyword, textOffset);
|
||||
while (hitOffset != -1) {
|
||||
// Append the portion of text up to (but not including) the hit.
|
||||
highlightedText.append(text.substring(textOffset, hitOffset));
|
||||
// Add in the highlighting around the keyword.
|
||||
@ -464,13 +462,15 @@ class HighlightedText implements IndexedText {
|
||||
|
||||
// Advance the text offset past the keyword.
|
||||
textOffset = hitOffset + escapedKeyword.length();
|
||||
|
||||
hitOffset = StringUtils.indexOfIgnoreCase(text, escapedKeyword, textOffset);
|
||||
}
|
||||
// Append the remainder of text field
|
||||
highlightedText.append(text.substring(textOffset, text.length()));
|
||||
|
||||
|
||||
if (highlightedText.length() == 0) {
|
||||
return NbBundle.getMessage(HighlightedText.class, "HighlightedMatchesSource.getMarkup.noMatchMsg");
|
||||
}
|
||||
}
|
||||
//reset for next pass
|
||||
text = highlightedText.toString();
|
||||
highlightedText = new StringBuilder("");
|
||||
@ -487,22 +487,21 @@ class HighlightedText implements IndexedText {
|
||||
* @return
|
||||
*/
|
||||
private String insertAnchors(String searchableContent) {
|
||||
int searchOffset = 0;
|
||||
int index = -1;
|
||||
|
||||
StringBuilder buf = new StringBuilder(searchableContent);
|
||||
|
||||
final String searchToken = HIGHLIGHT_PRE;
|
||||
final int indexSearchTokLen = searchToken.length();
|
||||
final String insertPre = "<a name='" + ANCHOR_PREFIX; //NON-NLS
|
||||
final String insertPost = "'></a>"; //NON-NLS
|
||||
int count = 0;
|
||||
while ((index = buf.indexOf(searchToken, searchOffset)) >= 0) {
|
||||
int searchOffset = 0;
|
||||
int index = buf.indexOf(searchToken, searchOffset);
|
||||
while (index >= 0) {
|
||||
String insertString = insertPre + Integer.toString(count + 1) + insertPost;
|
||||
int insertStringLen = insertString.length();
|
||||
buf.insert(index, insertString);
|
||||
searchOffset = index + indexSearchTokLen + insertStringLen; //next offset past this anchor
|
||||
++count;
|
||||
index = buf.indexOf(searchToken, searchOffset);
|
||||
}
|
||||
|
||||
//store total hits for this page, now that we know it
|
||||
|
@ -120,7 +120,7 @@ class LuceneQuery implements KeywordSearchQuery {
|
||||
|
||||
@Override
|
||||
public QueryResults performQuery() throws KeywordSearchModuleException, NoOpenCoreException {
|
||||
QueryResults results = new QueryResults(this, keywordList);
|
||||
QueryResults results = new QueryResults(this);
|
||||
//in case of single term literal query there is only 1 term
|
||||
boolean showSnippets = KeywordSearchSettings.getShowSnippets();
|
||||
results.addResult(new Keyword(keywordString, true), performLuceneQuery(showSnippets));
|
||||
|
@ -60,25 +60,17 @@ class QueryResults {
|
||||
*/
|
||||
private final Map<Keyword, List<KeywordHit>> results = new HashMap<>();
|
||||
|
||||
/**
|
||||
* The list of keywords
|
||||
*/
|
||||
// TODO: This is redundant. The keyword list is in the query.
|
||||
private final KeywordList keywordList;
|
||||
|
||||
|
||||
QueryResults(KeywordSearchQuery query, KeywordList keywordList) {
|
||||
QueryResults(KeywordSearchQuery query) {
|
||||
this.keywordSearchQuery = query;
|
||||
this.keywordList = keywordList;
|
||||
}
|
||||
|
||||
void addResult(Keyword keyword, List<KeywordHit> hits) {
|
||||
results.put(keyword, hits);
|
||||
}
|
||||
|
||||
// TODO: This is redundant. The keyword list is in the query.
|
||||
KeywordList getKeywordList() {
|
||||
return keywordList;
|
||||
}
|
||||
|
||||
|
||||
KeywordSearchQuery getQuery() {
|
||||
return keywordSearchQuery;
|
||||
@ -129,7 +121,7 @@ class QueryResults {
|
||||
if (hitDisplayStr.length() > 50) {
|
||||
hitDisplayStr = hitDisplayStr.substring(0, 49) + "...";
|
||||
}
|
||||
subProgress.progress(keywordList.getName() + ": " + hitDisplayStr, unitProgress);
|
||||
subProgress.progress(keywordSearchQuery.getKeywordList().getName() + ": " + hitDisplayStr, unitProgress);
|
||||
}
|
||||
|
||||
for (KeywordHit hit : getOneHitPerObject(keyword)) {
|
||||
@ -138,7 +130,11 @@ class QueryResults {
|
||||
if (StringUtils.isBlank(snippet)) {
|
||||
final String snippetQuery = KeywordSearchUtil.escapeLuceneQuery(termString);
|
||||
try {
|
||||
//this doesn't work for regex queries...
|
||||
/*
|
||||
* this doesn't work for regex queries... But that is
|
||||
* okay because regex queries always have snippets made
|
||||
* from the content_str field we pull back from Solr
|
||||
*/
|
||||
snippet = LuceneQuery.querySnippet(snippetQuery, hit.getSolrObjectId(), hit.getChunkId(), !keywordSearchQuery.isLiteral(), true);
|
||||
} catch (NoOpenCoreException e) {
|
||||
logger.log(Level.WARNING, "Error querying snippet: " + snippetQuery, e); //NON-NLS
|
||||
@ -149,16 +145,14 @@ class QueryResults {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
if (snippet != null) {
|
||||
KeywordCachedArtifact writeResult = keywordSearchQuery.writeSingleFileHitsToBlackBoard(keyword, hit, snippet, keywordList.getName());
|
||||
if (writeResult != null) {
|
||||
newArtifacts.add(writeResult.getArtifact());
|
||||
if (notifyInbox) {
|
||||
writeSingleFileInboxMessage(writeResult, hit.getContent());
|
||||
}
|
||||
} else {
|
||||
logger.log(Level.WARNING, "BB artifact for keyword hit not written, file: {0}, hit: {1}", new Object[]{hit.getContent(), keyword.toString()}); //NON-NLS
|
||||
KeywordCachedArtifact writeResult = keywordSearchQuery.writeSingleFileHitsToBlackBoard(keyword, hit, snippet, keywordSearchQuery.getKeywordList().getName());
|
||||
if (writeResult != null) {
|
||||
newArtifacts.add(writeResult.getArtifact());
|
||||
if (notifyInbox) {
|
||||
writeSingleFileInboxMessage(writeResult, hit.getContent());
|
||||
}
|
||||
} else {
|
||||
logger.log(Level.WARNING, "BB artifact for keyword hit not written, file: {0}, hit: {1}", new Object[]{hit.getContent(), keyword.toString()}); //NON-NLS
|
||||
}
|
||||
}
|
||||
++unitProgress;
|
||||
|
@ -154,7 +154,7 @@ final class RegexQuery implements KeywordSearchQuery {
|
||||
|
||||
@Override
|
||||
public QueryResults performQuery() throws NoOpenCoreException {
|
||||
QueryResults results = new QueryResults(this, keywordList);
|
||||
QueryResults results = new QueryResults(this);
|
||||
|
||||
final Server solrServer = KeywordSearch.getServer();
|
||||
SolrQuery solrQuery = new SolrQuery();
|
||||
|
@ -590,7 +590,7 @@ public final class SearchRunner {
|
||||
|
||||
// Create a new (empty) QueryResults object to hold the most recently
|
||||
// found hits.
|
||||
QueryResults newResults = new QueryResults(queryResult.getQuery(), queryResult.getKeywordList());
|
||||
QueryResults newResults = new QueryResults(queryResult.getQuery());
|
||||
|
||||
// For each keyword represented in the results.
|
||||
for (Keyword keyword : queryResult.getKeywords()) {
|
||||
|
@ -280,7 +280,7 @@ final class TermsComponentQuery implements KeywordSearchQuery {
|
||||
/*
|
||||
* Do a term query for each term that matched the regex.
|
||||
*/
|
||||
QueryResults results = new QueryResults(this, keywordList);
|
||||
QueryResults results = new QueryResults(this);
|
||||
for (Term term : terms) {
|
||||
/*
|
||||
* If searching for credit card account numbers, do a Luhn check on
|
||||
|
Loading…
x
Reference in New Issue
Block a user