TSK-468 keyword search hang on certain content

TSK-429 	Case closing exception handling
- better design of Solr server, better encapsulation to improve concurrency and error handling
- added timeout (30 mins by default, to be tweaked per file size) for file index operation
- implemented better solr server stop() (graceful, then brutal)
- added Solr restart in case of timeout, fixes issue when server instance is shot and does not respond.  After restart, ingest continues on following files
- better exception/error handling and handling of case closing while keyword search threads are being shutdown, but still running
This commit is contained in:
adam-m 2012-04-27 17:53:41 -04:00
parent 27e9b7f8ee
commit 7e1b633be7
12 changed files with 471 additions and 227 deletions

View File

@ -196,25 +196,21 @@ public class ExtractedContentViewer implements DataContentViewer {
return false;
}
Server.Core solrCore = null;
try {
solrCore = KeywordSearch.getServer().getCore();
} catch (SolrServerException e) {
logger.log(Level.INFO, "Could not get Solr Core", e);
}
final Server solrServer = KeywordSearch.getServer();
if (solrCore == null) {
return false;
}
SolrQuery q = new SolrQuery();
q.setQuery("*:*");
q.addFilterQuery("id:" + content.getId());
q.setFields("id");
try {
return !solrCore.query(q).getResults().isEmpty();
} catch (SolrServerException ex) {
return !solrServer.query(q).getResults().isEmpty();
}
catch (NoOpenCoreException ex) {
logger.log(Level.WARNING, "Couldn't determine whether content is supported.", ex);
return false;
}
catch (SolrServerException ex) {
logger.log(Level.WARNING, "Couldn't determine whether content is supported.", ex);
return false;
}
@ -228,13 +224,20 @@ public class ExtractedContentViewer implements DataContentViewer {
* @throws SolrServerException if something goes wrong
*/
private String getSolrContent(Node node) throws SolrServerException {
Server.Core solrCore = KeywordSearch.getServer().getCore();
Server solrServer = KeywordSearch.getServer();
SolrQuery q = new SolrQuery();
q.setQuery("*:*");
q.addFilterQuery("id:" + node.getLookup().lookup(Content.class).getId());
q.setFields("content");
String content = (String) solrCore.query(q).getResults().get(0).getFieldValue("content");
String content;
try {
content = (String) solrServer.query(q).getResults().get(0).getFieldValue("content");
}
catch (NoOpenCoreException ex) {
logger.log(Level.WARNING, "Couldn't get Solr content.", ex);
return "";
}
return content;
}

View File

@ -43,7 +43,7 @@ class HighlightedMatchesSource implements MarkupSource, HighlightLookup {
private static final String NO_MATCHES = "<span style='background:red'>No matches in content.</span>";
private Content content;
private String solrQuery;
private Core solrCore;
private Server solrServer;
private int numberHits;
private boolean isRegex = false;
private boolean group = true;
@ -54,11 +54,8 @@ class HighlightedMatchesSource implements MarkupSource, HighlightLookup {
this.isRegex = isRegex;
this.group = true;
try {
this.solrCore = KeywordSearch.getServer().getCore();
} catch (SolrServerException ex) {
logger.log(Level.INFO, "Could not get Solr core", ex);
}
this.solrServer = KeywordSearch.getServer();
}
HighlightedMatchesSource(Content content, String solrQuery, boolean isRegex, boolean group) {
@ -72,10 +69,6 @@ class HighlightedMatchesSource implements MarkupSource, HighlightLookup {
@Override
public String getMarkup() {
if (solrCore == null) {
return NO_MATCHES;
}
String highLightField = null;
String highlightQuery = solrQuery;
@ -124,7 +117,7 @@ class HighlightedMatchesSource implements MarkupSource, HighlightLookup {
q.setHighlightFragsize(0); // don't fragment the highlight
try {
QueryResponse response = solrCore.query(q, METHOD.POST);
QueryResponse response = solrServer.query(q, METHOD.POST);
Map<String, Map<String, List<String>>> responseHighlight = response.getHighlighting();
long contentID = content.getId();
Map<String, List<String>> responseHighlightID = responseHighlight.get(Long.toString(contentID));
@ -140,7 +133,12 @@ class HighlightedMatchesSource implements MarkupSource, HighlightLookup {
highlightedContent = insertAnchors(highlightedContent);
return "<pre>" + highlightedContent + "</pre>";
}
} catch (SolrServerException ex) {
}
catch (NoOpenCoreException ex) {
logger.log(Level.WARNING, "Couldn't query markup.", ex);
return "";
}
catch (SolrServerException ex) {
logger.log(Level.INFO, "Could not query markup. ", ex);
return "";
}

View File

@ -24,8 +24,13 @@ import java.io.Reader;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.solr.client.solrj.SolrServer;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.request.AbstractUpdateRequest;
import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest;
@ -41,11 +46,12 @@ import org.sleuthkit.datamodel.ReadContentInputStream;
class Ingester {
private static final Logger logger = Logger.getLogger(Ingester.class.getName());
private SolrServer solrCore;
private boolean uncommitedIngests = false;
private final ExecutorService upRequestExecutor = Executors.newSingleThreadExecutor();
static final int UP_REQUEST_TIMEOUT_SECS = 30 * 60; //30 min TODO use variable time depending on file size
private final Server solrServer = KeywordSearch.getServer();
Ingester(SolrServer solrCore) {
this.solrCore = solrCore;
Ingester() {
}
@Override
@ -108,42 +114,79 @@ class Ingester {
* content, but the Solr server is probably fine.
*/
private void ingest(ContentStream cs, Map<String, String> fields) throws IngesterException {
ContentStreamUpdateRequest up = new ContentStreamUpdateRequest("/update/extract");
final ContentStreamUpdateRequest up = new ContentStreamUpdateRequest("/update/extract");
up.addContentStream(cs);
setFields(up, fields);
up.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
//logger.log(Level.INFO, "Ingesting " + fields.get("file_name"));
up.setParam("commit", "false");
try {
solrCore.request(up);
// should't get any checked exceptions,
} catch (IOException ex) {
// It's possible that we will have IO errors
throw new IngesterException("Problem reading file.", ex);
} catch (IllegalStateException ex) {
// problems with content
throw new IngesterException("Problem reading file.", ex);
} catch (SolrServerException ex) {
// If there's a problem talking to Solr, something is fundamentally
// wrong with ingest
throw new IngesterException("Problem with Solr", ex);
} catch (SolrException ex) {
// Tika problems result in an unchecked SolrException
ErrorCode ec = ErrorCode.getErrorCode(ex.code());
// When Tika has problems with a document, it throws a server error
// but it's okay to continue with other documents
if (ec.equals(ErrorCode.SERVER_ERROR)) {
throw new IngesterException("Problem posting file contents to Solr. SolrException error code: " + ec, ex);
} else {
// shouldn't get any other error codes
throw ex;
}
final Future f = upRequestExecutor.submit(new UpRequestTask(up));
try {
//TODO use timeout proportional to content size
f.get(UP_REQUEST_TIMEOUT_SECS, TimeUnit.SECONDS);
} catch (TimeoutException te) {
logger.log(Level.WARNING, "Solr timeout encountered, trying to restart Solr");
//TODO restart solr might be needed to recover from some error conditions
hardSolrRestart();
throw new IngesterException("Solr index request time out for id: " + fields.get("id") + ", name: " + fields.get("file_name"));
} catch (Exception e) {
throw new IngesterException("Problem posting content to Solr, id: " + fields.get("id") + ", name: " + fields.get("file_name"), e);
}
uncommitedIngests = true;
}
//attempt to restart Solr and recover from its internal error
private void hardSolrRestart() {
solrServer.closeCore();
solrServer.stop();
solrServer.start();
solrServer.openCore();
}
private class UpRequestTask implements Runnable {
ContentStreamUpdateRequest up;
UpRequestTask(ContentStreamUpdateRequest up) {
this.up = up;
}
uncommitedIngests = true;
@Override
public void run() {
try {
solrServer.request(up);
} catch (NoOpenCoreException ex) {
throw new RuntimeException("No Solr core available, cannot index the content", ex);
} catch (IllegalStateException ex) {
// problems with content
throw new RuntimeException("Problem reading file.", ex);
} catch (SolrServerException ex) {
// If there's a problem talking to Solr, something is fundamentally
// wrong with ingest
throw new RuntimeException("Problem with Solr", ex);
} catch (SolrException ex) {
// Tika problems result in an unchecked SolrException
ErrorCode ec = ErrorCode.getErrorCode(ex.code());
// When Tika has problems with a document, it throws a server error
// but it's okay to continue with other documents
if (ec.equals(ErrorCode.SERVER_ERROR)) {
throw new RuntimeException("Problem posting file contents to Solr. SolrException error code: " + ec, ex);
} else {
// shouldn't get any other error codes
throw ex;
}
}
}
}
/**
@ -152,13 +195,12 @@ class Ingester {
*/
void commit() {
try {
solrCore.commit();
solrServer.commit();
uncommitedIngests = false;
// if commit doesn't work, something's broken
} catch (IOException ex) {
throw new RuntimeException(ex);
} catch (NoOpenCoreException ex) {
logger.log(Level.WARNING, "Error commiting index", ex);
} catch (SolrServerException ex) {
throw new RuntimeException(ex);
logger.log(Level.WARNING, "Error commiting index", ex);
}
}
@ -170,6 +212,8 @@ class Ingester {
private static void setFields(ContentStreamUpdateRequest up, Map<String, String> fields) {
for (Entry<String, String> field : fields.entrySet()) {
up.setParam("literal." + field.getKey(), field.getValue());
}
}
@ -224,5 +268,9 @@ class Ingester {
IngesterException(String message, Throwable ex) {
super(message, ex);
}
IngesterException(String message) {
super(message);
}
}
}

View File

@ -27,6 +27,7 @@ import org.openide.nodes.Node;
import org.openide.nodes.Node.Property;
import org.openide.nodes.PropertySupport;
import org.openide.nodes.Sheet;
import org.openide.util.Exceptions;
import org.openide.util.lookup.Lookups;
import org.openide.util.lookup.ProxyLookup;
import org.sleuthkit.autopsy.directorytree.ExternalViewerAction;
@ -52,7 +53,14 @@ class KeywordSearchFilterNode extends FilterNode {
String getSnippet() {
final Content content = this.getOriginal().getLookup().lookup(Content.class);
final String snippet = LuceneQuery.querySnippet(solrQuery, content.getId(), false, true);
String snippet;
try {
snippet = LuceneQuery.querySnippet(solrQuery, content.getId(), false, true);
} catch (NoOpenCoreException ex) {
//logger.log(Level.WARNING, "Could not perform the snippet query. ", ex);
return "";
}
return snippet;
}
@ -101,7 +109,7 @@ class KeywordSearchFilterNode extends FilterNode {
return propertySets;
}
/**
* Right click action for the nodes that we want to pass to the directory
* table and the output view.
@ -113,18 +121,17 @@ class KeywordSearchFilterNode extends FilterNode {
public Action[] getActions(boolean popup) {
List<Action> actions = new ArrayList<Action>();
Content content = this.getOriginal().getLookup().lookup(Content.class);
actions.addAll(content.accept(new GetPopupActionsContentVisitor()));
//actions.add(new IndexContentFilesAction(nodeContent, "Index"));
return actions.toArray(new Action[actions.size()]);
}
private class GetPopupActionsContentVisitor extends ContentVisitor.Default<List<Action>> {
@Override
public List<Action> visit(File f) {
List<Action> actions = new ArrayList<Action>();
@ -133,10 +140,10 @@ class KeywordSearchFilterNode extends FilterNode {
actions.add(new ExtractAction("Extract File", getOriginal()));
return actions;
}
@Override
protected List<Action> defaultVisit(Content c) {
return new ArrayList<Action>();
}
}
}

View File

@ -88,8 +88,7 @@ public final class KeywordSearchIngestService implements IngestServiceFsContent
public enum IngestStatus {
INGESTED, EXTRACTED_INGESTED, SKIPPED,
};
INGESTED, EXTRACTED_INGESTED, SKIPPED,};
private Map<Long, IngestStatus> ingestStatus;
private Map<String, List<FsContent>> reportedHits; //already reported hits
@ -219,16 +218,10 @@ public final class KeywordSearchIngestService implements IngestServiceFsContent
this.managerProxy = managerProxy;
Server.Core solrCore = null;
try {
solrCore = KeywordSearch.getServer().getCore();
} catch (SolrServerException ex) {
logger.log(Level.WARNING, "Could not get Solr core", ex);
managerProxy.postMessage(IngestMessage.createErrorMessage(++messageID, instance, "Error initializing.", "Keyword indexing and search cannot proceed. Try restarting the application."));
return;
}
Server solrServer = KeywordSearch.getServer();
ingester = solrCore.getIngester();
ingester = solrServer.getIngester();
ingestStatus = new HashMap<Long, IngestStatus>();
@ -348,7 +341,7 @@ public final class KeywordSearchIngestService implements IngestServiceFsContent
private void indexChangeNotify() {
//signal a potential change in number of indexed files
try {
final int numIndexedFiles = KeywordSearch.getServer().getCore().queryNumIndexedFiles();
final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles();
SwingUtilities.invokeLater(new Runnable() {
@Override
@ -356,6 +349,8 @@ public final class KeywordSearchIngestService implements IngestServiceFsContent
KeywordSearch.changeSupport.firePropertyChange(KeywordSearch.NUM_FILES_CHANGE_EVT, null, new Integer(numIndexedFiles));
}
});
} catch (NoOpenCoreException ex) {
logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files: ", ex);
} catch (SolrServerException se) {
logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files: ", se);
}
@ -591,6 +586,12 @@ public final class KeywordSearchIngestService implements IngestServiceFsContent
try {
queryResult = del.performQuery();
} catch (NoOpenCoreException ex) {
logger.log(Level.WARNING, "Error performing query: " + keywordQuery.getQuery(), ex);
//no reason to continue with next query if recovery failed
//or wait for recovery to kick in and run again later
//likely case has closed and threads are being interrupted
break;
} catch (Exception e) {
logger.log(Level.WARNING, "Error performing query: " + keywordQuery.getQuery(), e);
continue;

View File

@ -46,7 +46,7 @@ import org.sleuthkit.autopsy.casemodule.Case;
*
* @author dfickling
*/
public class KeywordSearchPanel extends AbstractKeywordSearchPerformer{
public class KeywordSearchPanel extends AbstractKeywordSearchPerformer {
private static final Logger logger = Logger.getLogger(KeywordSearchPanel.class.getName());
private KeywordPropertyChangeListener listener;
@ -60,14 +60,15 @@ public class KeywordSearchPanel extends AbstractKeywordSearchPerformer{
}
private void customizeComponents() {
listener = new KeywordPropertyChangeListener();
KeywordSearch.getServer().addServerActionListener(listener);
Case.addPropertyChangeListener(listener);
searchBox.addFocusListener(new FocusListener() {
@Override
public void focusGained(FocusEvent e) {
if (searchBox.getText().equals("Search...")) {
@ -76,6 +77,7 @@ public class KeywordSearchPanel extends AbstractKeywordSearchPerformer{
entered = true;
}
}
@Override
public void focusLost(FocusEvent e) {
if (searchBox.getText().equals("")) {
@ -90,10 +92,9 @@ public class KeywordSearchPanel extends AbstractKeywordSearchPerformer{
public void actionPerformed(ActionEvent e) {
listsMenu.setVisible(false);
}
});
// Adding border of six to account for menu border
listsMenu.setSize(listsPanel.getPreferredSize().width+6, listsPanel.getPreferredSize().height+6);
listsMenu.setSize(listsPanel.getPreferredSize().width + 6, listsPanel.getPreferredSize().height + 6);
listsMenu.add(listsPanel);
listsMenu.addPopupMenuListener(new PopupMenuListener() {
@ -111,9 +112,8 @@ public class KeywordSearchPanel extends AbstractKeywordSearchPerformer{
public void popupMenuCanceled(PopupMenuEvent e) {
listsButton.setSelected(false);
}
});
searchBox.setComponentPopupMenu(rightClickMenu);
ActionListener actList = new ActionListener() {
@ -140,9 +140,9 @@ public class KeywordSearchPanel extends AbstractKeywordSearchPerformer{
copyMenuItem.addActionListener(actList);
pasteMenuItem.addActionListener(actList);
selectAllMenuItem.addActionListener(actList);
}
private void resetSearchBox() {
searchBox.setEditable(true);
searchBox.setText("Search...");
@ -293,8 +293,9 @@ public class KeywordSearchPanel extends AbstractKeywordSearchPerformer{
}// </editor-fold>//GEN-END:initComponents
private void searchBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_searchBoxActionPerformed
if(!entered)
if (!entered) {
return;
}
getRootPane().setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR));
try {
search();
@ -326,7 +327,6 @@ public class KeywordSearchPanel extends AbstractKeywordSearchPerformer{
private void settingsLabelMouseExited(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_settingsLabelMouseExited
settingsLabel.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/keywordsearch/dropdown-icon.png")));
}//GEN-LAST:event_settingsLabelMouseExited
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JMenuItem copyMenuItem;
private javax.swing.JMenuItem cutMenuItem;
@ -362,7 +362,7 @@ public class KeywordSearchPanel extends AbstractKeywordSearchPerformer{
public List<Keyword> getQueryList() {
throw new UnsupportedOperationException("No list for single-keyword search");
}
private class KeywordPropertyChangeListener implements PropertyChangeListener {
@Override
@ -370,7 +370,7 @@ public class KeywordSearchPanel extends AbstractKeywordSearchPerformer{
String changed = evt.getPropertyName();
Object oldValue = evt.getOldValue();
Object newValue = evt.getNewValue();
if (changed.equals(Case.CASE_CURRENT_CASE)) {
if (newValue == null) {
setFields(false);
@ -382,10 +382,14 @@ public class KeywordSearchPanel extends AbstractKeywordSearchPerformer{
switch (state) {
case STARTED:
try {
final int numIndexedFiles = KeywordSearch.getServer().getCore().queryNumIndexedFiles();
final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles();
KeywordSearch.changeSupport.firePropertyChange(KeywordSearch.NUM_FILES_CHANGE_EVT, null, new Integer(numIndexedFiles));
//setFilesIndexed(numIndexedFiles);
} catch (SolrServerException se) {
}
catch (NoOpenCoreException ex) {
logger.log(Level.SEVERE, "Error executing Solr query, " + ex);
}
catch (SolrServerException se) {
logger.log(Level.SEVERE, "Error executing Solr query, " + se.getMessage());
}
break;
@ -395,8 +399,8 @@ public class KeywordSearchPanel extends AbstractKeywordSearchPerformer{
}
}
}
private void setFields(boolean enabled){
private void setFields(boolean enabled) {
searchBox.setEnabled(enabled);
regExCheckboxMenuItem.setEnabled(enabled);
settingsLabel.setEnabled(enabled);
@ -405,25 +409,25 @@ public class KeywordSearchPanel extends AbstractKeywordSearchPerformer{
active = enabled;
}
}
private void maybeShowSettingsPopup (MouseEvent evt) {
if(!active) {
private void maybeShowSettingsPopup(MouseEvent evt) {
if (!active) {
return;
}
if (evt != null && !SwingUtilities.isLeftMouseButton(evt)) {
return;
}
settingsMenu.show(searchBoxPanel, 0, searchBoxPanel.getHeight());
}
private void maybeShowListsPopup (MouseEvent evt) {
if(!active) {
private void maybeShowListsPopup(MouseEvent evt) {
if (!active) {
return;
}
if (evt != null && !SwingUtilities.isLeftMouseButton(evt)) {
return;
}
listsMenu.show(listsButton, listsButton.getWidth()-listsMenu.getWidth(), listsButton.getHeight()-1);
listsMenu.show(listsButton, listsButton.getWidth() - listsMenu.getWidth(), listsButton.getHeight() - 1);
}
}

View File

@ -36,10 +36,10 @@ public interface KeywordSearchQuery {
/**
* execute query and return results without publishing them
* return results for all matching terms
*
* @throws NoOpenCoreException if query failed due to server error, this could be a notification to stop processing
* @return
*/
public Map<String,List<FsContent>> performQuery();
public Map<String,List<FsContent>> performQuery() throws NoOpenCoreException;
@ -84,8 +84,9 @@ public interface KeywordSearchQuery {
* @param newFsHit fscontent for which to write results for this hit
* @param listName listname
* @return collection of results (with cached bb artifacts/attributes) created and written
* @throws NoOpenCoreException if could not write to bb because required query failed due to server error, this could be a notification to stop processing
*/
public KeywordWriteResult writeToBlackBoard(String termHit, FsContent newFsHit, String listName);
public KeywordWriteResult writeToBlackBoard(String termHit, FsContent newFsHit, String listName) throws NoOpenCoreException;
}

View File

@ -225,7 +225,13 @@ public class KeywordSearchResultFactory extends ChildFactory<KeyValueQuery> {
}
//execute the query and get fscontents matching
Map<String, List<FsContent>> tcqRes = tcq.performQuery();
Map<String, List<FsContent>> tcqRes;
try {
tcqRes = tcq.performQuery();
} catch (NoOpenCoreException ex) {
logger.log(Level.WARNING, "Could not perform the query. ", ex);
return false;
}
final Set<FsContent> fsContents = new HashSet<FsContent>();
for (String key : tcqRes.keySet()) {
fsContents.addAll(tcqRes.get(key));
@ -247,8 +253,14 @@ public class KeywordSearchResultFactory extends ChildFactory<KeyValueQuery> {
AbstractFsContentNode.fillPropertyMap(resMap, f);
setCommonProperty(resMap, CommonPropertyTypes.MATCH, f.getName());
if (literal_query) {
final String snippet = LuceneQuery.querySnippet(tcq.getEscapedQueryString(), f.getId(), false, true);
setCommonProperty(resMap, CommonPropertyTypes.CONTEXT, snippet);
try {
String snippet;
snippet = LuceneQuery.querySnippet(tcq.getEscapedQueryString(), f.getId(), false, true);
setCommonProperty(resMap, CommonPropertyTypes.CONTEXT, snippet);
} catch (NoOpenCoreException ex) {
logger.log(Level.WARNING, "Could not perform the query. ", ex);
return false;
}
}
final String highlightQueryEscaped = getHighlightQuery(tcq, literal_query, tcqRes, f);
toPopulate.add(new KeyValueQueryContent(f.getName(), resMap, ++resID, f, highlightQueryEscaped, tcq));
@ -367,7 +379,14 @@ public class KeywordSearchResultFactory extends ChildFactory<KeyValueQuery> {
LuceneQuery filesQuery = new LuceneQuery(keywordQuery);
filesQuery.escape();
Map<String, List<FsContent>> matchesRes = filesQuery.performQuery();
Map<String, List<FsContent>> matchesRes;
try {
matchesRes = filesQuery.performQuery();
} catch (NoOpenCoreException ex) {
logger.log(Level.WARNING, "Could not perform the query. ", ex);
return false;
}
Set<FsContent> matches = new HashSet<FsContent>();
for (String key : matchesRes.keySet()) {
matches.addAll(matchesRes.get(key));
@ -435,7 +454,7 @@ public class KeywordSearchResultFactory extends ChildFactory<KeyValueQuery> {
* worker for writing results to bb, with progress bar, cancellation,
* and central registry of workers to be stopped when case is closed
*/
static class ResultWriter extends SwingWorker<Object,Void> {
static class ResultWriter extends SwingWorker<Object, Void> {
private static List<ResultWriter> writers = new ArrayList<ResultWriter>();
private ProgressHandle progress;

View File

@ -39,12 +39,11 @@ import org.apache.solr.client.solrj.response.TermsResponse.Term;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.openide.nodes.Node;
import org.openide.util.Exceptions;
import org.openide.windows.TopComponent;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent;
import org.sleuthkit.autopsy.corecomponents.TableFilterNode;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.autopsy.ingest.ServiceDataEvent;
import org.sleuthkit.autopsy.keywordsearch.KeywordSearchResultFactory.ResultWriter;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
@ -104,10 +103,8 @@ public class LuceneQuery implements KeywordSearchQuery {
return null;
}
@Override
public Map<String, List<FsContent>> performQuery() {
public Map<String, List<FsContent>> performQuery() throws NoOpenCoreException {
Map<String, List<FsContent>> results = new HashMap<String, List<FsContent>>();
//in case of single term literal query there is only 1 term
results.put(query, performLuceneQuery());
@ -118,12 +115,19 @@ public class LuceneQuery implements KeywordSearchQuery {
@Override
public void execute() {
escape();
Set<FsContent>fsMatches = new HashSet<FsContent>();
final Map<String, List<FsContent>> matches = performQuery();
Set<FsContent> fsMatches = new HashSet<FsContent>();
final Map<String, List<FsContent>> matches;
try {
matches = performQuery();
} catch (NoOpenCoreException ex) {
return;
}
for (String key : matches.keySet()) {
fsMatches.addAll(matches.get(key));
}
String pathText = "Keyword query: " + query;
if (matches.isEmpty()) {
@ -133,7 +137,7 @@ public class LuceneQuery implements KeywordSearchQuery {
//get listname
String listName = "";
Node rootNode = new KeywordSearchNode(new ArrayList<FsContent>(fsMatches), queryEscaped);
Node filteredRootNode = new TableFilterNode(rootNode, true);
@ -142,7 +146,7 @@ public class LuceneQuery implements KeywordSearchQuery {
//write to bb
new ResultWriter(matches, this, listName).execute();
}
@Override
@ -150,17 +154,9 @@ public class LuceneQuery implements KeywordSearchQuery {
return query != null && !query.equals("");
}
private Collection<KeywordWriteResult> writeToBlackBoard(FsContent newFsHit, String listName) {
List<KeywordWriteResult> ret = new ArrayList<KeywordWriteResult>();
KeywordWriteResult written = writeToBlackBoard(query, newFsHit, listName);
if (written != null) {
ret.add(written);
}
return ret;
}
@Override
public KeywordWriteResult writeToBlackBoard(String termHit, FsContent newFsHit, String listName) {
public KeywordWriteResult writeToBlackBoard(String termHit, FsContent newFsHit, String listName) throws NoOpenCoreException {
final String MODULE_NAME = KeywordSearchIngestService.MODULE_NAME;
KeywordWriteResult writeResult = null;
@ -170,15 +166,20 @@ public class LuceneQuery implements KeywordSearchQuery {
bba = newFsHit.newArtifact(ARTIFACT_TYPE.TSK_KEYWORD_HIT);
writeResult = new KeywordWriteResult(bba);
} catch (Exception e) {
logger.log(Level.INFO, "Error adding bb artifact for keyword hit", e);
logger.log(Level.WARNING, "Error adding bb artifact for keyword hit", e);
return null;
}
String snippet = null;
try {
snippet = LuceneQuery.querySnippet(queryEscaped, newFsHit.getId(), false, true);
} catch (Exception e) {
logger.log(Level.INFO, "Error querying snippet: " + query, e);
}
catch (NoOpenCoreException e) {
logger.log(Level.WARNING, "Error querying snippet: " + query, e);
throw e;
}
catch (Exception e) {
logger.log(Level.WARNING, "Error querying snippet: " + query, e);
return null;
}
if (snippet != null) {
@ -207,36 +208,25 @@ public class LuceneQuery implements KeywordSearchQuery {
writeResult.add(attributes);
return writeResult;
} catch (TskException e) {
logger.log(Level.INFO, "Error adding bb attributes to artifact", e);
logger.log(Level.WARNING, "Error adding bb attributes to artifact", e);
}
return null;
}
/**
* Just perform the query and return result without updating the GUI
* This utility is used in this class, can be potentially reused by other classes
* @param query
* @return matches List
*/
private List<FsContent> performLuceneQuery() throws RuntimeException {
private List<FsContent> performLuceneQuery() throws NoOpenCoreException {
List<FsContent> matches = new ArrayList<FsContent>();
boolean allMatchesFetched = false;
final int ROWS_PER_FETCH = 10000;
Server.Core solrCore = null;
try {
solrCore = KeywordSearch.getServer().getCore();
} catch (SolrServerException e) {
logger.log(Level.INFO, "Could not get Solr core", e);
}
if (solrCore == null) {
return matches;
}
final Server solrServer = KeywordSearch.getServer();
SolrQuery q = new SolrQuery();
@ -249,17 +239,24 @@ public class LuceneQuery implements KeywordSearchQuery {
q.setStart(start);
try {
QueryResponse response = solrCore.query(q, METHOD.POST);
QueryResponse response = solrServer.query(q, METHOD.POST);
SolrDocumentList resultList = response.getResults();
long results = resultList.getNumFound();
allMatchesFetched = start + ROWS_PER_FETCH >= results;
SleuthkitCase sc;
try {
sc = Case.getCurrentCase().getSleuthkitCase();
} catch (IllegalStateException ex) {
//no case open, must be just closed
return matches;
}
for (SolrDocument resultDoc : resultList) {
long id = Long.parseLong((String) resultDoc.getFieldValue("id"));
SleuthkitCase sc = Case.getCurrentCase().getSleuthkitCase();
// TODO: has to be a better way to get files. Also, need to
// check that we actually get 1 hit for each id
ResultSet rs = sc.runQuery("select * from tsk_files where obj_id=" + id);
@ -271,18 +268,22 @@ public class LuceneQuery implements KeywordSearchQuery {
}
}
} catch (NoOpenCoreException ex) {
logger.log(Level.WARNING, "Error executing Lucene Solr Query: " + query, ex);
throw ex;
} catch (SolrServerException ex) {
logger.log(Level.WARNING, "Error executing Lucene Solr Query: " + query.substring(0, Math.min(query.length() - 1, 200)), ex);
throw new RuntimeException(ex);
logger.log(Level.WARNING, "Error executing Lucene Solr Query: " + query, ex);
// TODO: handle bad query strings, among other issues
} catch (SQLException ex) {
logger.log(Level.WARNING, "Error interpreting results from Lucene Solr Query: " + query, ex);
return matches;
}
}
return matches;
}
/**
* return snippet preview context
* @param query the keyword query for text to highlight. Lucene special cahrs should already be escaped.
@ -291,18 +292,10 @@ public class LuceneQuery implements KeywordSearchQuery {
* @param group whether the query should look for all terms grouped together in the query order, or not
* @return
*/
public static String querySnippet(String query, long contentID, boolean isRegex, boolean group) {
public static String querySnippet(String query, long contentID, boolean isRegex, boolean group) throws NoOpenCoreException {
final int SNIPPET_LENGTH = 45;
Server.Core solrCore = null;
try {
solrCore = KeywordSearch.getServer().getCore();
} catch (SolrServerException ex) {
logger.log(Level.INFO, "Could not get Solr core", ex);
}
if (solrCore == null)
return "";
Server solrServer = KeywordSearch.getServer();
String highlightField = null;
if (isRegex) {
@ -316,17 +309,19 @@ public class LuceneQuery implements KeywordSearchQuery {
if (isRegex) {
StringBuilder sb = new StringBuilder();
sb.append(highlightField).append(":");
if (group)
if (group) {
sb.append("\"");
}
sb.append(query);
if (group)
if (group) {
sb.append("\"");
}
q.setQuery(sb.toString());
} else {
//simplify query/escaping and use default field
//quote only if user supplies quotes
q.setQuery(query);
q.setQuery(query);
}
q.addFilterQuery("id:" + contentID);
q.addHighlightField(highlightField);
@ -336,7 +331,7 @@ public class LuceneQuery implements KeywordSearchQuery {
q.setHighlightFragsize(SNIPPET_LENGTH);
try {
QueryResponse response = solrCore.query(q);
QueryResponse response = solrServer.query(q);
Map<String, Map<String, List<String>>> responseHighlight = response.getHighlighting();
Map<String, List<String>> responseHighlightID = responseHighlight.get(Long.toString(contentID));
if (responseHighlightID == null) {
@ -349,8 +344,12 @@ public class LuceneQuery implements KeywordSearchQuery {
// extracted content is HTML-escaped, but snippet goes in a plain text field
return StringEscapeUtils.unescapeHtml(contentHighlights.get(0)).trim();
}
} catch (NoOpenCoreException ex) {
logger.log(Level.WARNING, "Error executing Lucene Solr Query: " + query, ex);
throw ex;
} catch (SolrServerException ex) {
throw new RuntimeException(ex);
logger.log(Level.WARNING, "Error executing Lucene Solr Query: " + query, ex);
return "";
}
}
}

View File

@ -0,0 +1,32 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2012 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.keywordsearch;
/**
*
* Exception thrown when no core is open
*/
public class NoOpenCoreException extends Exception {
NoOpenCoreException() {
super("No currently open Solr core.");
}
}

View File

@ -44,7 +44,7 @@ import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.client.solrj.response.TermsResponse;
import org.apache.commons.httpclient.NoHttpResponseException;
import org.apache.solr.client.solrj.SolrRequest;
import org.apache.solr.client.solrj.SolrRequest.METHOD;
import org.apache.solr.common.util.NamedList;
import org.openide.modules.InstalledFileLocator;
import org.openide.util.Exceptions;
import org.sleuthkit.autopsy.casemodule.Case;
@ -60,8 +60,8 @@ class Server {
private static final String DEFAULT_CORE_NAME = "coreCase";
// TODO: DEFAULT_CORE_NAME needs to be replaced with unique names to support multiple open cases
public static final String CORE_EVT = "CORE_EVT";
private String javaPath = "java";
private Process curSolrProcess = null;
public enum CORE_EVT_STATES {
@ -86,7 +86,7 @@ class Server {
serverAction = new ServerAction();
solrFolder = InstalledFileLocator.getDefault().locate("solr", Server.class.getPackage().getName(), false);
instanceDir = solrFolder.getAbsolutePath() + File.separator + "solr";
javaPath = PlatformUtil.getJavaPath();
}
@ -110,22 +110,25 @@ class Server {
InputStreamPrinterThread(InputStream stream, String type) {
this.stream = stream;
try{
try {
String log = System.getProperty("netbeans.user") + "/var/log/solr.log." + type;
File outputFile = new File(log.concat(".0"));
File first = new File(log.concat(".1"));
File second = new File(log.concat(".2"));
if(second.exists())
if (second.exists()) {
second.delete();
if(first.exists())
}
if (first.exists()) {
first.renameTo(second);
if(outputFile.exists())
}
if (outputFile.exists()) {
outputFile.renameTo(first);
else
} else {
outputFile.createNewFile();
}
out = new FileOutputStream(outputFile);
} catch(Exception ex){
} catch (Exception ex) {
logger.log(Level.WARNING, "Failed to create solr log file", ex);
}
}
@ -134,7 +137,7 @@ class Server {
public void run() {
InputStreamReader isr = new InputStreamReader(stream);
BufferedReader br = new BufferedReader(isr);
try{
try {
OutputStreamWriter osw = new OutputStreamWriter(out);
BufferedWriter bw = new BufferedWriter(osw);
String line = null;
@ -153,14 +156,20 @@ class Server {
* (probably before the server is ready) and doesn't check whether it was
* successful.
*/
synchronized void start() {
void start() {
logger.log(Level.INFO, "Starting Solr server from: " + solrFolder.getAbsolutePath());
try {
Process start = Runtime.getRuntime().exec(javaPath + " -DSTOP.PORT=8079 -DSTOP.KEY=mysecret -jar start.jar", null, solrFolder);
curSolrProcess = Runtime.getRuntime().exec(javaPath + " -DSTOP.PORT=8079 -DSTOP.KEY=mysecret -jar start.jar", null, solrFolder);
try {
//block, give time to fully stary the process
//so if it's restarted solr operations can be resumed seamlessly
Thread.sleep(3000);
} catch (InterruptedException ex) {
Exceptions.printStackTrace(ex);
}
// Handle output to prevent process from blocking
(new InputStreamPrinterThread(start.getInputStream(), "input")).start();
(new InputStreamPrinterThread(start.getErrorStream(), "error")).start();
(new InputStreamPrinterThread(curSolrProcess.getInputStream(), "input")).start();
(new InputStreamPrinterThread(curSolrProcess.getErrorStream(), "error")).start();
} catch (IOException ex) {
throw new RuntimeException(ex);
@ -172,13 +181,18 @@ class Server {
*
* Waits for the stop command to finish
* before returning.
* @return true if the stop command finished successfully, else false
*/
synchronized boolean stop() {
synchronized void stop() {
try {
logger.log(Level.INFO, "Stopping Solr server from: " + solrFolder.getAbsolutePath());
//try graceful shutdown
Process stop = Runtime.getRuntime().exec(javaPath + " -DSTOP.PORT=8079 -DSTOP.KEY=mysecret -jar start.jar --stop", null, solrFolder);
return stop.waitFor() == 0;
stop.waitFor();
//if still running, forcefully stop it
if (curSolrProcess != null) {
curSolrProcess.destroy();
curSolrProcess = null;
}
} catch (InterruptedException ex) {
throw new RuntimeException(ex);
@ -217,7 +231,7 @@ class Server {
return true;
}
/**** Convenience methods for use while we only open one case at a time ****/
private Core currentCore = null;
private volatile Core currentCore = null;
synchronized void openCore() {
if (currentCore != null) {
@ -236,13 +250,6 @@ class Server {
serverAction.putValue(CORE_EVT, CORE_EVT_STATES.STOPPED);
}
synchronized Core getCore() throws SolrServerException {
if (currentCore == null) {
throw new SolrServerException("No currently open Core!");
}
return currentCore;
}
/**** end single-case specific methods ****/
/**
* Open a core for the given case
@ -255,6 +262,106 @@ class Server {
return this.openCore(DEFAULT_CORE_NAME, new File(dataDir));
}
/**
* commit current core if it exists
* @throws SolrServerException, NoOpenCoreException
*/
synchronized void commit() throws SolrServerException, NoOpenCoreException {
if (currentCore == null) {
throw new NoOpenCoreException();
}
currentCore.commit();
}
NamedList<Object> request(SolrRequest request) throws SolrServerException, NoOpenCoreException {
if (currentCore == null) {
throw new NoOpenCoreException();
}
return currentCore.request(request);
}
/**
* Execute query that gets only number of all Solr documents indexed
* without actually returning the documents
* @return int representing number of indexed files
* @throws SolrServerException
*/
public synchronized int queryNumIndexedFiles() throws SolrServerException, NoOpenCoreException {
if (currentCore == null) {
throw new NoOpenCoreException();
}
SolrQuery q = new SolrQuery("*:*");
q.setRows(0);
return (int) query(q).getResults().getNumFound();
}
/**
* Execute solr query
* @param sq query
* @return query response
* @throws SolrServerException
* @throws NoOpenCoreException
*/
public synchronized QueryResponse query(SolrQuery sq) throws SolrServerException, NoOpenCoreException {
if (currentCore == null) {
throw new NoOpenCoreException();
}
return currentCore.query(sq);
}
/**
* Execute solr query
* @param sq the query
* @param method http method to use
* @return query response
* @throws SolrServerException
* @throws NoOpenCoreException
*/
public synchronized QueryResponse query(SolrQuery sq, SolrRequest.METHOD method) throws SolrServerException, NoOpenCoreException {
if (currentCore == null) {
throw new NoOpenCoreException();
}
return currentCore.query(sq, method);
}
/**
* Execute Solr terms query
* @param sq the query
* @return terms response
* @throws SolrServerException
* @throws NoOpenCoreException
*/
public synchronized TermsResponse queryTerms(SolrQuery sq) throws SolrServerException, NoOpenCoreException {
if (currentCore == null) {
throw new NoOpenCoreException();
}
QueryResponse qres = currentCore.query(sq);
return qres.getTermsResponse();
}
/**
* Execute Solr query to get content text
* @param content to get the text for
* @return content text string
* @throws SolrServerException
* @throws NoOpenCoreException
*/
public synchronized String getSolrContent(final Content content) throws SolrServerException, NoOpenCoreException {
if (currentCore == null) {
throw new NoOpenCoreException();
}
return currentCore.getSolrContent(content);
}
/**
* factory method to create ingester
* @return ingester
*/
public Ingester getIngester() {
return new Ingester();
}
/**
* Open a new core
* @param coreName name to refer to the core by in Solr
@ -300,25 +407,39 @@ class Server {
}
}
public Ingester getIngester() {
return new Ingester(this.solrCore);
}
public synchronized QueryResponse query(SolrQuery sq) throws SolrServerException {
private QueryResponse query(SolrQuery sq) throws SolrServerException {
return solrCore.query(sq);
}
public synchronized QueryResponse query(SolrQuery sq, SolrRequest.METHOD method) throws SolrServerException {
private NamedList<Object> request(SolrRequest request) throws SolrServerException {
try {
return solrCore.request(request);
} catch (IOException e) {
logger.log(Level.WARNING, "Could not issue Solr request. ", e);
throw new SolrServerException("Could not issue Solr request", e);
}
}
private QueryResponse query(SolrQuery sq, SolrRequest.METHOD method) throws SolrServerException {
return solrCore.query(sq, method);
}
public synchronized TermsResponse queryTerms(SolrQuery sq) throws SolrServerException {
private TermsResponse queryTerms(SolrQuery sq) throws SolrServerException {
QueryResponse qres = solrCore.query(sq);
return qres.getTermsResponse();
}
public synchronized String getSolrContent(final Content content) {
private void commit() throws SolrServerException {
try {
solrCore.commit();
} catch (IOException e) {
logger.log(Level.WARNING, "Could not commit index. ", e);
throw new SolrServerException("Could not commit index", e);
}
}
private String getSolrContent(final Content content) {
final SolrQuery q = new SolrQuery();
q.setQuery("*:*");
q.addFilterQuery("id:" + content.getId());
@ -347,7 +468,7 @@ class Server {
* @return int representing number of indexed files
* @throws SolrServerException
*/
public synchronized int queryNumIndexedFiles() throws SolrServerException {
private int queryNumIndexedFiles() throws SolrServerException {
SolrQuery q = new SolrQuery("*:*");
q.setRows(0);
return (int) query(q).getResults().getNumFound();

View File

@ -128,10 +128,13 @@ public class TermComponentQuery implements KeywordSearchQuery {
protected List<Term> executeQuery(SolrQuery q) {
List<Term> termsCol = null;
try {
Server.Core solrCore = KeywordSearch.getServer().getCore();
TermsResponse tr = solrCore.queryTerms(q);
Server solrServer = KeywordSearch.getServer();
TermsResponse tr = solrServer.queryTerms(q);
termsCol = tr.getTerms(TERMS_SEARCH_FIELD);
return termsCol;
} catch (NoOpenCoreException ex) {
logger.log(Level.SEVERE, "Error executing the regex terms query: " + termsQuery, ex);
return null; //no need to create result view, just display error dialog
} catch (SolrServerException ex) {
logger.log(Level.SEVERE, "Error executing the regex terms query: " + termsQuery, ex);
return null; //no need to create result view, just display error dialog
@ -154,15 +157,20 @@ public class TermComponentQuery implements KeywordSearchQuery {
}
@Override
public KeywordWriteResult writeToBlackBoard(String termHit, FsContent newFsHit, String listName) {
public KeywordWriteResult writeToBlackBoard(String termHit, FsContent newFsHit, String listName) throws NoOpenCoreException {
final String MODULE_NAME = KeywordSearchIngestService.MODULE_NAME;
//snippet
String snippet = null;
try {
snippet = LuceneQuery.querySnippet(KeywordSearchUtil.escapeLuceneQuery(termHit, true, false), newFsHit.getId(), true, true);
} catch (Exception e) {
logger.log(Level.INFO, "Error querying snippet: " + termHit, e);
}
catch (NoOpenCoreException e) {
logger.log(Level.WARNING, "Error querying snippet: " + termHit, e);
throw e;
}
catch (Exception e) {
logger.log(Level.WARNING, "Error querying snippet: " + termHit, e);
return null;
}
@ -178,7 +186,7 @@ public class TermComponentQuery implements KeywordSearchQuery {
bba = newFsHit.newArtifact(ARTIFACT_TYPE.TSK_KEYWORD_HIT);
writeResult = new KeywordWriteResult(bba);
} catch (Exception e) {
logger.log(Level.INFO, "Error adding bb artifact for keyword hit", e);
logger.log(Level.WARNING, "Error adding bb artifact for keyword hit", e);
return null;
}
@ -212,16 +220,15 @@ public class TermComponentQuery implements KeywordSearchQuery {
writeResult.add(attributes);
return writeResult;
} catch (TskException e) {
logger.log(Level.INFO, "Error adding bb attributes for terms search artifact", e);
logger.log(Level.WARNING, "Error adding bb attributes for terms search artifact", e);
}
return null;
}
@Override
public Map<String, List<FsContent>> performQuery() {
public Map<String, List<FsContent>> performQuery() throws NoOpenCoreException{
Map<String, List<FsContent>> results = new HashMap<String, List<FsContent>>();
final SolrQuery q = createQuery();
@ -230,20 +237,25 @@ public class TermComponentQuery implements KeywordSearchQuery {
for (Term term : terms) {
final String termS = KeywordSearchUtil.escapeLuceneQuery(term.getTerm(), true, false);
StringBuilder filesQueryB = new StringBuilder();
filesQueryB.append(TERMS_SEARCH_FIELD).append(":").append(termS);
final String queryStr = filesQueryB.toString();
LuceneQuery filesQuery = new LuceneQuery(queryStr);
try {
Map<String,List<FsContent>> subResults = filesQuery.performQuery();
Set<FsContent>filesResults = new HashSet<FsContent>();
Map<String, List<FsContent>> subResults = filesQuery.performQuery();
Set<FsContent> filesResults = new HashSet<FsContent>();
for (String key : subResults.keySet()) {
filesResults.addAll(subResults.get(key));
}
results.put(term.getTerm(), new ArrayList<FsContent>(filesResults));
} catch (RuntimeException e) {
}
catch (NoOpenCoreException e) {
logger.log(Level.SEVERE, "Error executing Solr query,", e);
throw e;
}
catch (RuntimeException e) {
logger.log(Level.SEVERE, "Error executing Solr query,", e);
}
@ -253,7 +265,6 @@ public class TermComponentQuery implements KeywordSearchQuery {
return results;
}
@Override
public void execute() {
SolrQuery q = createQuery();