mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-15 01:07:42 +00:00
more cleanup
This commit is contained in:
parent
e25f1473bd
commit
3c46bf1ce8
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011-2016 Basis Technology Corp.
|
||||
* Copyright 2011-2017 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011-2016 Basis Technology Corp.
|
||||
* Copyright 2011-2017 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -18,6 +18,8 @@
|
||||
*/
|
||||
package org.sleuthkit.autopsy.keywordsearch;
|
||||
|
||||
import com.google.common.base.Predicate;
|
||||
import com.google.common.collect.Iterators;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
@ -26,10 +28,11 @@ import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
import java.util.TreeSet;
|
||||
import java.util.logging.Level;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
import javax.annotation.concurrent.GuardedBy;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.solr.client.solrj.SolrQuery;
|
||||
@ -80,16 +83,24 @@ class AccountsText implements IndexedText {
|
||||
@GuardedBy("this")
|
||||
private boolean isPageInfoLoaded = false;
|
||||
private int numberPagesForFile = 0;
|
||||
private int currentPage = 0;
|
||||
//list of pages, used for iterating back and forth. Only stores pages with hits
|
||||
private List<Integer> pages = new ArrayList<>();
|
||||
//map from page/chunk to number of hits. value is 0 if not yet known.
|
||||
private final LinkedHashMap<Integer, Integer> numberOfHitsPerPage = new LinkedHashMap<>();
|
||||
//map from page/chunk number to current hit on that page.
|
||||
private Integer currentPage = 0;
|
||||
|
||||
/*
|
||||
* map from page/chunk to number of hits. value is 0 if not yet known.
|
||||
*/
|
||||
private final TreeMap<Integer, Integer> numberOfHitsPerPage = new TreeMap<>();
|
||||
/*
|
||||
* set of pages, used for iterating back and forth. Only stores pages with hits
|
||||
*/
|
||||
private final Set<Integer> pages = numberOfHitsPerPage.keySet();
|
||||
/*
|
||||
* map from page/chunk number to current hit on that page.
|
||||
*/
|
||||
private final HashMap<Integer, Integer> currentHitPerPage = new HashMap<>();
|
||||
|
||||
AccountsText(long objectID, BlackboardArtifact artifact) {
|
||||
this(objectID, Arrays.asList(artifact));
|
||||
|
||||
}
|
||||
|
||||
@NbBundle.Messages({
|
||||
@ -119,21 +130,20 @@ class AccountsText implements IndexedText {
|
||||
|
||||
@Override
|
||||
public boolean hasNextPage() {
|
||||
return pages.indexOf(this.currentPage) < pages.size() - 1;
|
||||
return getIndexOfCurrentPage() < pages.size() - 1;
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasPreviousPage() {
|
||||
return pages.indexOf(this.currentPage) > 0;
|
||||
|
||||
return getIndexOfCurrentPage() > 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
@NbBundle.Messages("AccountsText.nextPage.exception.msg=No next page.")
|
||||
public int nextPage() {
|
||||
if (hasNextPage()) {
|
||||
currentPage = pages.get(pages.indexOf(this.currentPage) + 1);
|
||||
currentPage = getIndexOfCurrentPage() + 1;
|
||||
return currentPage;
|
||||
} else {
|
||||
throw new IllegalStateException(Bundle.AccountsText_nextPage_exception_msg());
|
||||
@ -144,13 +154,17 @@ class AccountsText implements IndexedText {
|
||||
@NbBundle.Messages("AccountsText.previousPage.exception.msg=No previous page.")
|
||||
public int previousPage() {
|
||||
if (hasPreviousPage()) {
|
||||
currentPage = pages.get(pages.indexOf(this.currentPage) - 1);
|
||||
currentPage = getIndexOfCurrentPage() - 1;
|
||||
return currentPage;
|
||||
} else {
|
||||
throw new IllegalStateException(Bundle.AccountsText_previousPage_exception_msg());
|
||||
}
|
||||
}
|
||||
|
||||
private int getIndexOfCurrentPage() {
|
||||
return Iterators.indexOf(pages.iterator(), this.currentPage::equals);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNextItem() {
|
||||
if (this.currentHitPerPage.containsKey(currentPage)) {
|
||||
@ -198,11 +212,6 @@ class AccountsText implements IndexedText {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public LinkedHashMap<Integer, Integer> getHitsPages() {
|
||||
return this.numberOfHitsPerPage;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize this object with information about which pages/chunks have
|
||||
* hits. Multiple calls will not change the initial results.
|
||||
@ -222,7 +231,7 @@ class AccountsText implements IndexedText {
|
||||
for (BlackboardArtifact artifact : artifacts) {
|
||||
addToPagingInfo(artifact);
|
||||
}
|
||||
pages = pages.stream().sorted().distinct().collect(Collectors.toList());
|
||||
|
||||
this.currentPage = pages.stream().findFirst().orElse(1);
|
||||
|
||||
isPageInfoLoaded = true;
|
||||
@ -254,11 +263,9 @@ class AccountsText implements IndexedText {
|
||||
.map(t -> StringUtils.substringAfterLast(t, Server.CHUNK_ID_SEPARATOR))
|
||||
.map(Integer::valueOf)
|
||||
.forEach(chunkID -> {
|
||||
pages.add(chunkID);
|
||||
numberOfHitsPerPage.put(chunkID, 0);
|
||||
currentHitPerPage.put(chunkID, 0);
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -18,15 +18,15 @@
|
||||
*/
|
||||
package org.sleuthkit.autopsy.keywordsearch;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import com.google.common.collect.Iterators;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
import java.util.TreeSet;
|
||||
import java.util.logging.Level;
|
||||
import java.util.stream.Collectors;
|
||||
@ -37,11 +37,9 @@ import org.apache.solr.client.solrj.SolrQuery;
|
||||
import org.apache.solr.client.solrj.SolrRequest.METHOD;
|
||||
import org.apache.solr.client.solrj.response.QueryResponse;
|
||||
import org.apache.solr.common.SolrDocumentList;
|
||||
import org.openide.util.Exceptions;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
||||
import org.sleuthkit.autopsy.coreutils.Version;
|
||||
import org.sleuthkit.autopsy.keywordsearch.KeywordQueryFilter.FilterType;
|
||||
import org.sleuthkit.autopsy.keywordsearch.KeywordSearch.QueryType;
|
||||
@ -76,24 +74,24 @@ class HighlightedText implements IndexedText {
|
||||
private final Set<String> keywords = new HashSet<>();
|
||||
|
||||
private int numberPages;
|
||||
private int currentPage;
|
||||
private Integer currentPage =0;
|
||||
|
||||
@GuardedBy("this")
|
||||
private boolean isPageInfoLoaded = false;
|
||||
|
||||
/**
|
||||
* stores the chunk number all pages/chunks that have hits as key, and
|
||||
* number of hits as a value, or 0 if not yet known
|
||||
/*
|
||||
* map from page/chunk to number of hits. value is 0 if not yet known.
|
||||
*/
|
||||
private final LinkedHashMap<Integer, Integer> numberOfHitsPerPage = new LinkedHashMap<>();
|
||||
private final TreeMap<Integer, Integer> numberOfHitsPerPage = new TreeMap<>();
|
||||
/*
|
||||
* stored page num -> current hit number mapping
|
||||
* set of pages, used for iterating back and forth. Only stores pages with hits
|
||||
*/
|
||||
private final Set<Integer> pages = numberOfHitsPerPage.keySet();
|
||||
/*
|
||||
* map from page/chunk number to current hit on that page.
|
||||
*/
|
||||
private final HashMap<Integer, Integer> currentHitPerPage = new HashMap<>();
|
||||
/*
|
||||
* List of unique page/chunk numbers with hits
|
||||
*/
|
||||
private final List<Integer> pages = new ArrayList<>();
|
||||
|
||||
|
||||
private QueryResults hits = null; //original hits that may get passed in
|
||||
private BlackboardArtifact artifact;
|
||||
@ -247,6 +245,9 @@ class HighlightedText implements IndexedText {
|
||||
static private String constructEscapedSolrQuery(String query) {
|
||||
return LuceneQuery.HIGHLIGHT_FIELD + ":" + "\"" + KeywordSearchUtil.escapeLuceneQuery(query) + "\"";
|
||||
}
|
||||
private int getIndexOfCurrentPage() {
|
||||
return Iterators.indexOf(pages.iterator(), this.currentPage::equals);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getNumberPages() {
|
||||
@ -259,39 +260,35 @@ class HighlightedText implements IndexedText {
|
||||
return this.currentPage;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Override
|
||||
public boolean hasNextPage() {
|
||||
final int numPages = pages.size();
|
||||
int idx = pages.indexOf(this.currentPage);
|
||||
return idx < numPages - 1;
|
||||
return getIndexOfCurrentPage() < pages.size() - 1;
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasPreviousPage() {
|
||||
int idx = pages.indexOf(this.currentPage);
|
||||
return idx > 0;
|
||||
|
||||
return getIndexOfCurrentPage() > 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int nextPage() {
|
||||
if (false == hasNextPage()) {
|
||||
if (hasNextPage()) {
|
||||
currentPage = getIndexOfCurrentPage() + 1;
|
||||
return currentPage;
|
||||
} else {
|
||||
throw new IllegalStateException("No next page.");
|
||||
}
|
||||
int idx = pages.indexOf(this.currentPage);
|
||||
currentPage = pages.get(idx + 1);
|
||||
return currentPage;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int previousPage() {
|
||||
if (!hasPreviousPage()) {
|
||||
if (hasPreviousPage()) {
|
||||
currentPage = getIndexOfCurrentPage() - 1;
|
||||
return currentPage;
|
||||
} else {
|
||||
throw new IllegalStateException("No previous page.");
|
||||
}
|
||||
int idx = pages.indexOf(this.currentPage);
|
||||
currentPage = pages.get(idx - 1);
|
||||
return currentPage;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -338,11 +335,6 @@ class HighlightedText implements IndexedText {
|
||||
return currentHitPerPage.get(currentPage);
|
||||
}
|
||||
|
||||
@Override
|
||||
public LinkedHashMap<Integer, Integer> getHitsPages() {
|
||||
return this.numberOfHitsPerPage;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getText() {
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011 Basis Technology Corp.
|
||||
* Copyright 2011-2017 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -50,8 +50,7 @@ interface IndexedText {
|
||||
String getAnchorPrefix();
|
||||
|
||||
/**
|
||||
* if searchable text, returns number of hits found and encoded in the
|
||||
* text
|
||||
* if searchable text, returns number of hits found and encoded in the text
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
@ -139,13 +138,4 @@ interface IndexedText {
|
||||
* @return the current item number
|
||||
*/
|
||||
int currentItem();
|
||||
|
||||
/**
|
||||
* get a map storing which pages have matches to their number, or 0 if
|
||||
* unknown
|
||||
*
|
||||
* @return map storing pages with matches, or null if not supported
|
||||
*/
|
||||
LinkedHashMap<Integer, Integer> getHitsPages();
|
||||
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011-2016 Basis Technology Corp.
|
||||
* Copyright 2011-2017 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -186,10 +186,6 @@ class RawText implements IndexedText {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public LinkedHashMap<Integer, Integer> getHitsPages() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getNumberPages() {
|
||||
|
Loading…
x
Reference in New Issue
Block a user