more cleanup

This commit is contained in:
millmanorama 2017-02-21 14:27:21 +01:00
parent e25f1473bd
commit 3c46bf1ce8
6 changed files with 58 additions and 73 deletions

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2011-2016 Basis Technology Corp. * Copyright 2011-2017 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2011-2016 Basis Technology Corp. * Copyright 2011-2017 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");

View File

@ -18,6 +18,8 @@
*/ */
package org.sleuthkit.autopsy.keywordsearch; package org.sleuthkit.autopsy.keywordsearch;
import com.google.common.base.Predicate;
import com.google.common.collect.Iterators;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
@ -26,10 +28,11 @@ import java.util.HashSet;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.logging.Level; import java.util.logging.Level;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import java.util.stream.Collectors;
import javax.annotation.concurrent.GuardedBy; import javax.annotation.concurrent.GuardedBy;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrQuery;
@ -80,16 +83,24 @@ class AccountsText implements IndexedText {
@GuardedBy("this") @GuardedBy("this")
private boolean isPageInfoLoaded = false; private boolean isPageInfoLoaded = false;
private int numberPagesForFile = 0; private int numberPagesForFile = 0;
private int currentPage = 0; private Integer currentPage = 0;
//list of pages, used for iterating back and forth. Only stores pages with hits
private List<Integer> pages = new ArrayList<>(); /*
//map from page/chunk to number of hits. value is 0 if not yet known. * map from page/chunk to number of hits. value is 0 if not yet known.
private final LinkedHashMap<Integer, Integer> numberOfHitsPerPage = new LinkedHashMap<>(); */
//map from page/chunk number to current hit on that page. private final TreeMap<Integer, Integer> numberOfHitsPerPage = new TreeMap<>();
/*
* set of pages, used for iterating back and forth. Only stores pages with hits
*/
private final Set<Integer> pages = numberOfHitsPerPage.keySet();
/*
* map from page/chunk number to current hit on that page.
*/
private final HashMap<Integer, Integer> currentHitPerPage = new HashMap<>(); private final HashMap<Integer, Integer> currentHitPerPage = new HashMap<>();
AccountsText(long objectID, BlackboardArtifact artifact) { AccountsText(long objectID, BlackboardArtifact artifact) {
this(objectID, Arrays.asList(artifact)); this(objectID, Arrays.asList(artifact));
} }
@NbBundle.Messages({ @NbBundle.Messages({
@ -119,21 +130,20 @@ class AccountsText implements IndexedText {
@Override @Override
public boolean hasNextPage() { public boolean hasNextPage() {
return pages.indexOf(this.currentPage) < pages.size() - 1; return getIndexOfCurrentPage() < pages.size() - 1;
} }
@Override @Override
public boolean hasPreviousPage() { public boolean hasPreviousPage() {
return pages.indexOf(this.currentPage) > 0; return getIndexOfCurrentPage() > 0;
} }
@Override @Override
@NbBundle.Messages("AccountsText.nextPage.exception.msg=No next page.") @NbBundle.Messages("AccountsText.nextPage.exception.msg=No next page.")
public int nextPage() { public int nextPage() {
if (hasNextPage()) { if (hasNextPage()) {
currentPage = pages.get(pages.indexOf(this.currentPage) + 1); currentPage = getIndexOfCurrentPage() + 1;
return currentPage; return currentPage;
} else { } else {
throw new IllegalStateException(Bundle.AccountsText_nextPage_exception_msg()); throw new IllegalStateException(Bundle.AccountsText_nextPage_exception_msg());
@ -144,13 +154,17 @@ class AccountsText implements IndexedText {
@NbBundle.Messages("AccountsText.previousPage.exception.msg=No previous page.") @NbBundle.Messages("AccountsText.previousPage.exception.msg=No previous page.")
public int previousPage() { public int previousPage() {
if (hasPreviousPage()) { if (hasPreviousPage()) {
currentPage = pages.get(pages.indexOf(this.currentPage) - 1); currentPage = getIndexOfCurrentPage() - 1;
return currentPage; return currentPage;
} else { } else {
throw new IllegalStateException(Bundle.AccountsText_previousPage_exception_msg()); throw new IllegalStateException(Bundle.AccountsText_previousPage_exception_msg());
} }
} }
private int getIndexOfCurrentPage() {
return Iterators.indexOf(pages.iterator(), this.currentPage::equals);
}
@Override @Override
public boolean hasNextItem() { public boolean hasNextItem() {
if (this.currentHitPerPage.containsKey(currentPage)) { if (this.currentHitPerPage.containsKey(currentPage)) {
@ -198,11 +212,6 @@ class AccountsText implements IndexedText {
} }
} }
@Override
public LinkedHashMap<Integer, Integer> getHitsPages() {
return this.numberOfHitsPerPage;
}
/** /**
* Initialize this object with information about which pages/chunks have * Initialize this object with information about which pages/chunks have
* hits. Multiple calls will not change the initial results. * hits. Multiple calls will not change the initial results.
@ -222,7 +231,7 @@ class AccountsText implements IndexedText {
for (BlackboardArtifact artifact : artifacts) { for (BlackboardArtifact artifact : artifacts) {
addToPagingInfo(artifact); addToPagingInfo(artifact);
} }
pages = pages.stream().sorted().distinct().collect(Collectors.toList());
this.currentPage = pages.stream().findFirst().orElse(1); this.currentPage = pages.stream().findFirst().orElse(1);
isPageInfoLoaded = true; isPageInfoLoaded = true;
@ -254,11 +263,9 @@ class AccountsText implements IndexedText {
.map(t -> StringUtils.substringAfterLast(t, Server.CHUNK_ID_SEPARATOR)) .map(t -> StringUtils.substringAfterLast(t, Server.CHUNK_ID_SEPARATOR))
.map(Integer::valueOf) .map(Integer::valueOf)
.forEach(chunkID -> { .forEach(chunkID -> {
pages.add(chunkID);
numberOfHitsPerPage.put(chunkID, 0); numberOfHitsPerPage.put(chunkID, 0);
currentHitPerPage.put(chunkID, 0); currentHitPerPage.put(chunkID, 0);
}); });
} }
@Override @Override

View File

@ -18,15 +18,15 @@
*/ */
package org.sleuthkit.autopsy.keywordsearch; package org.sleuthkit.autopsy.keywordsearch;
import java.util.ArrayList; import com.google.common.collect.Iterators;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet; import java.util.TreeSet;
import java.util.logging.Level; import java.util.logging.Level;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@ -37,11 +37,9 @@ import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrRequest.METHOD; import org.apache.solr.client.solrj.SolrRequest.METHOD;
import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.SolrDocumentList;
import org.openide.util.Exceptions;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages; import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.coreutils.Version; import org.sleuthkit.autopsy.coreutils.Version;
import org.sleuthkit.autopsy.keywordsearch.KeywordQueryFilter.FilterType; import org.sleuthkit.autopsy.keywordsearch.KeywordQueryFilter.FilterType;
import org.sleuthkit.autopsy.keywordsearch.KeywordSearch.QueryType; import org.sleuthkit.autopsy.keywordsearch.KeywordSearch.QueryType;
@ -76,24 +74,24 @@ class HighlightedText implements IndexedText {
private final Set<String> keywords = new HashSet<>(); private final Set<String> keywords = new HashSet<>();
private int numberPages; private int numberPages;
private int currentPage; private Integer currentPage =0;
@GuardedBy("this") @GuardedBy("this")
private boolean isPageInfoLoaded = false; private boolean isPageInfoLoaded = false;
/**
* stores the chunk number all pages/chunks that have hits as key, and
* number of hits as a value, or 0 if not yet known
*/
private final LinkedHashMap<Integer, Integer> numberOfHitsPerPage = new LinkedHashMap<>();
/* /*
* stored page num -> current hit number mapping * map from page/chunk to number of hits. value is 0 if not yet known.
*/
private final TreeMap<Integer, Integer> numberOfHitsPerPage = new TreeMap<>();
/*
* set of pages, used for iterating back and forth. Only stores pages with hits
*/
private final Set<Integer> pages = numberOfHitsPerPage.keySet();
/*
* map from page/chunk number to current hit on that page.
*/ */
private final HashMap<Integer, Integer> currentHitPerPage = new HashMap<>(); private final HashMap<Integer, Integer> currentHitPerPage = new HashMap<>();
/*
* List of unique page/chunk numbers with hits
*/
private final List<Integer> pages = new ArrayList<>();
private QueryResults hits = null; //original hits that may get passed in private QueryResults hits = null; //original hits that may get passed in
private BlackboardArtifact artifact; private BlackboardArtifact artifact;
@ -247,6 +245,9 @@ class HighlightedText implements IndexedText {
static private String constructEscapedSolrQuery(String query) { static private String constructEscapedSolrQuery(String query) {
return LuceneQuery.HIGHLIGHT_FIELD + ":" + "\"" + KeywordSearchUtil.escapeLuceneQuery(query) + "\""; return LuceneQuery.HIGHLIGHT_FIELD + ":" + "\"" + KeywordSearchUtil.escapeLuceneQuery(query) + "\"";
} }
private int getIndexOfCurrentPage() {
return Iterators.indexOf(pages.iterator(), this.currentPage::equals);
}
@Override @Override
public int getNumberPages() { public int getNumberPages() {
@ -261,37 +262,33 @@ class HighlightedText implements IndexedText {
@Override @Override
public boolean hasNextPage() { public boolean hasNextPage() {
final int numPages = pages.size(); return getIndexOfCurrentPage() < pages.size() - 1;
int idx = pages.indexOf(this.currentPage);
return idx < numPages - 1;
} }
@Override @Override
public boolean hasPreviousPage() { public boolean hasPreviousPage() {
int idx = pages.indexOf(this.currentPage); return getIndexOfCurrentPage() > 0;
return idx > 0;
} }
@Override @Override
public int nextPage() { public int nextPage() {
if (false == hasNextPage()) { if (hasNextPage()) {
currentPage = getIndexOfCurrentPage() + 1;
return currentPage;
} else {
throw new IllegalStateException("No next page."); throw new IllegalStateException("No next page.");
} }
int idx = pages.indexOf(this.currentPage);
currentPage = pages.get(idx + 1);
return currentPage;
} }
@Override @Override
public int previousPage() { public int previousPage() {
if (!hasPreviousPage()) { if (hasPreviousPage()) {
currentPage = getIndexOfCurrentPage() - 1;
return currentPage;
} else {
throw new IllegalStateException("No previous page."); throw new IllegalStateException("No previous page.");
} }
int idx = pages.indexOf(this.currentPage);
currentPage = pages.get(idx - 1);
return currentPage;
} }
@Override @Override
@ -338,11 +335,6 @@ class HighlightedText implements IndexedText {
return currentHitPerPage.get(currentPage); return currentHitPerPage.get(currentPage);
} }
@Override
public LinkedHashMap<Integer, Integer> getHitsPages() {
return this.numberOfHitsPerPage;
}
@Override @Override
public String getText() { public String getText() {

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2011 Basis Technology Corp. * Copyright 2011-2017 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -50,8 +50,7 @@ interface IndexedText {
String getAnchorPrefix(); String getAnchorPrefix();
/** /**
* if searchable text, returns number of hits found and encoded in the * if searchable text, returns number of hits found and encoded in the text
* text
* *
* @return * @return
*/ */
@ -139,13 +138,4 @@ interface IndexedText {
* @return the current item number * @return the current item number
*/ */
int currentItem(); int currentItem();
/**
* get a map storing which pages have matches to their number, or 0 if
* unknown
*
* @return map storing pages with matches, or null if not supported
*/
LinkedHashMap<Integer, Integer> getHitsPages();
} }

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2011-2016 Basis Technology Corp. * Copyright 2011-2017 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -186,10 +186,6 @@ class RawText implements IndexedText {
return 0; return 0;
} }
@Override
public LinkedHashMap<Integer, Integer> getHitsPages() {
return null;
}
@Override @Override
public int getNumberPages() { public int getNumberPages() {