TSK-468 keyword search hang on certain content

TSK-429 	Case closing exception handling
- better design of Solr server, better encapsulation to improve concurrency and error handling
- added timeout (30 mins by default, to be tweaked per file size) for file index operation
- implemented better solr server stop() (graceful, then brutal)
- added Solr restart in case of timeout, fixes issue when server instance is shot and does not respond.  After restart, ingest continues on following files
- better exception/error handling and handling of case closing while keyword search threads are being shutdown, but still running
This commit is contained in:
adam-m 2012-04-27 17:53:41 -04:00
parent 27e9b7f8ee
commit 7e1b633be7
12 changed files with 471 additions and 227 deletions

View File

@ -196,16 +196,7 @@ public class ExtractedContentViewer implements DataContentViewer {
return false; return false;
} }
Server.Core solrCore = null; final Server solrServer = KeywordSearch.getServer();
try {
solrCore = KeywordSearch.getServer().getCore();
} catch (SolrServerException e) {
logger.log(Level.INFO, "Could not get Solr Core", e);
}
if (solrCore == null) {
return false;
}
SolrQuery q = new SolrQuery(); SolrQuery q = new SolrQuery();
q.setQuery("*:*"); q.setQuery("*:*");
@ -213,8 +204,13 @@ public class ExtractedContentViewer implements DataContentViewer {
q.setFields("id"); q.setFields("id");
try { try {
return !solrCore.query(q).getResults().isEmpty(); return !solrServer.query(q).getResults().isEmpty();
} catch (SolrServerException ex) { }
catch (NoOpenCoreException ex) {
logger.log(Level.WARNING, "Couldn't determine whether content is supported.", ex);
return false;
}
catch (SolrServerException ex) {
logger.log(Level.WARNING, "Couldn't determine whether content is supported.", ex); logger.log(Level.WARNING, "Couldn't determine whether content is supported.", ex);
return false; return false;
} }
@ -228,13 +224,20 @@ public class ExtractedContentViewer implements DataContentViewer {
* @throws SolrServerException if something goes wrong * @throws SolrServerException if something goes wrong
*/ */
private String getSolrContent(Node node) throws SolrServerException { private String getSolrContent(Node node) throws SolrServerException {
Server.Core solrCore = KeywordSearch.getServer().getCore(); Server solrServer = KeywordSearch.getServer();
SolrQuery q = new SolrQuery(); SolrQuery q = new SolrQuery();
q.setQuery("*:*"); q.setQuery("*:*");
q.addFilterQuery("id:" + node.getLookup().lookup(Content.class).getId()); q.addFilterQuery("id:" + node.getLookup().lookup(Content.class).getId());
q.setFields("content"); q.setFields("content");
String content = (String) solrCore.query(q).getResults().get(0).getFieldValue("content"); String content;
try {
content = (String) solrServer.query(q).getResults().get(0).getFieldValue("content");
}
catch (NoOpenCoreException ex) {
logger.log(Level.WARNING, "Couldn't get Solr content.", ex);
return "";
}
return content; return content;
} }

View File

@ -43,7 +43,7 @@ class HighlightedMatchesSource implements MarkupSource, HighlightLookup {
private static final String NO_MATCHES = "<span style='background:red'>No matches in content.</span>"; private static final String NO_MATCHES = "<span style='background:red'>No matches in content.</span>";
private Content content; private Content content;
private String solrQuery; private String solrQuery;
private Core solrCore; private Server solrServer;
private int numberHits; private int numberHits;
private boolean isRegex = false; private boolean isRegex = false;
private boolean group = true; private boolean group = true;
@ -54,11 +54,8 @@ class HighlightedMatchesSource implements MarkupSource, HighlightLookup {
this.isRegex = isRegex; this.isRegex = isRegex;
this.group = true; this.group = true;
try { this.solrServer = KeywordSearch.getServer();
this.solrCore = KeywordSearch.getServer().getCore();
} catch (SolrServerException ex) {
logger.log(Level.INFO, "Could not get Solr core", ex);
}
} }
HighlightedMatchesSource(Content content, String solrQuery, boolean isRegex, boolean group) { HighlightedMatchesSource(Content content, String solrQuery, boolean isRegex, boolean group) {
@ -72,10 +69,6 @@ class HighlightedMatchesSource implements MarkupSource, HighlightLookup {
@Override @Override
public String getMarkup() { public String getMarkup() {
if (solrCore == null) {
return NO_MATCHES;
}
String highLightField = null; String highLightField = null;
String highlightQuery = solrQuery; String highlightQuery = solrQuery;
@ -124,7 +117,7 @@ class HighlightedMatchesSource implements MarkupSource, HighlightLookup {
q.setHighlightFragsize(0); // don't fragment the highlight q.setHighlightFragsize(0); // don't fragment the highlight
try { try {
QueryResponse response = solrCore.query(q, METHOD.POST); QueryResponse response = solrServer.query(q, METHOD.POST);
Map<String, Map<String, List<String>>> responseHighlight = response.getHighlighting(); Map<String, Map<String, List<String>>> responseHighlight = response.getHighlighting();
long contentID = content.getId(); long contentID = content.getId();
Map<String, List<String>> responseHighlightID = responseHighlight.get(Long.toString(contentID)); Map<String, List<String>> responseHighlightID = responseHighlight.get(Long.toString(contentID));
@ -140,7 +133,12 @@ class HighlightedMatchesSource implements MarkupSource, HighlightLookup {
highlightedContent = insertAnchors(highlightedContent); highlightedContent = insertAnchors(highlightedContent);
return "<pre>" + highlightedContent + "</pre>"; return "<pre>" + highlightedContent + "</pre>";
} }
} catch (SolrServerException ex) { }
catch (NoOpenCoreException ex) {
logger.log(Level.WARNING, "Couldn't query markup.", ex);
return "";
}
catch (SolrServerException ex) {
logger.log(Level.INFO, "Could not query markup. ", ex); logger.log(Level.INFO, "Could not query markup. ", ex);
return ""; return "";
} }

View File

@ -24,8 +24,13 @@ import java.io.Reader;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.logging.Level;
import java.util.logging.Logger; import java.util.logging.Logger;
import org.apache.solr.client.solrj.SolrServer;
import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.request.AbstractUpdateRequest; import org.apache.solr.client.solrj.request.AbstractUpdateRequest;
import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest; import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest;
@ -41,11 +46,12 @@ import org.sleuthkit.datamodel.ReadContentInputStream;
class Ingester { class Ingester {
private static final Logger logger = Logger.getLogger(Ingester.class.getName()); private static final Logger logger = Logger.getLogger(Ingester.class.getName());
private SolrServer solrCore;
private boolean uncommitedIngests = false; private boolean uncommitedIngests = false;
private final ExecutorService upRequestExecutor = Executors.newSingleThreadExecutor();
static final int UP_REQUEST_TIMEOUT_SECS = 30 * 60; //30 min TODO use variable time depending on file size
private final Server solrServer = KeywordSearch.getServer();
Ingester(SolrServer solrCore) { Ingester() {
this.solrCore = solrCore;
} }
@Override @Override
@ -108,27 +114,64 @@ class Ingester {
* content, but the Solr server is probably fine. * content, but the Solr server is probably fine.
*/ */
private void ingest(ContentStream cs, Map<String, String> fields) throws IngesterException { private void ingest(ContentStream cs, Map<String, String> fields) throws IngesterException {
ContentStreamUpdateRequest up = new ContentStreamUpdateRequest("/update/extract"); final ContentStreamUpdateRequest up = new ContentStreamUpdateRequest("/update/extract");
up.addContentStream(cs); up.addContentStream(cs);
setFields(up, fields); setFields(up, fields);
up.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true); up.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
//logger.log(Level.INFO, "Ingesting " + fields.get("file_name")); //logger.log(Level.INFO, "Ingesting " + fields.get("file_name"));
up.setParam("commit", "false"); up.setParam("commit", "false");
final Future f = upRequestExecutor.submit(new UpRequestTask(up));
try { try {
solrCore.request(up); //TODO use timeout proportional to content size
// should't get any checked exceptions, f.get(UP_REQUEST_TIMEOUT_SECS, TimeUnit.SECONDS);
} catch (IOException ex) { } catch (TimeoutException te) {
// It's possible that we will have IO errors logger.log(Level.WARNING, "Solr timeout encountered, trying to restart Solr");
throw new IngesterException("Problem reading file.", ex); //TODO restart solr might be needed to recover from some error conditions
hardSolrRestart();
throw new IngesterException("Solr index request time out for id: " + fields.get("id") + ", name: " + fields.get("file_name"));
} catch (Exception e) {
throw new IngesterException("Problem posting content to Solr, id: " + fields.get("id") + ", name: " + fields.get("file_name"), e);
}
uncommitedIngests = true;
}
//attempt to restart Solr and recover from its internal error
private void hardSolrRestart() {
solrServer.closeCore();
solrServer.stop();
solrServer.start();
solrServer.openCore();
}
private class UpRequestTask implements Runnable {
ContentStreamUpdateRequest up;
UpRequestTask(ContentStreamUpdateRequest up) {
this.up = up;
}
@Override
public void run() {
try {
solrServer.request(up);
} catch (NoOpenCoreException ex) {
throw new RuntimeException("No Solr core available, cannot index the content", ex);
} catch (IllegalStateException ex) { } catch (IllegalStateException ex) {
// problems with content // problems with content
throw new IngesterException("Problem reading file.", ex); throw new RuntimeException("Problem reading file.", ex);
} catch (SolrServerException ex) { } catch (SolrServerException ex) {
// If there's a problem talking to Solr, something is fundamentally // If there's a problem talking to Solr, something is fundamentally
// wrong with ingest // wrong with ingest
throw new IngesterException("Problem with Solr", ex); throw new RuntimeException("Problem with Solr", ex);
} catch (SolrException ex) { } catch (SolrException ex) {
// Tika problems result in an unchecked SolrException // Tika problems result in an unchecked SolrException
ErrorCode ec = ErrorCode.getErrorCode(ex.code()); ErrorCode ec = ErrorCode.getErrorCode(ex.code());
@ -136,14 +179,14 @@ class Ingester {
// When Tika has problems with a document, it throws a server error // When Tika has problems with a document, it throws a server error
// but it's okay to continue with other documents // but it's okay to continue with other documents
if (ec.equals(ErrorCode.SERVER_ERROR)) { if (ec.equals(ErrorCode.SERVER_ERROR)) {
throw new IngesterException("Problem posting file contents to Solr. SolrException error code: " + ec, ex); throw new RuntimeException("Problem posting file contents to Solr. SolrException error code: " + ec, ex);
} else { } else {
// shouldn't get any other error codes // shouldn't get any other error codes
throw ex; throw ex;
} }
} }
uncommitedIngests = true; }
} }
/** /**
@ -152,13 +195,12 @@ class Ingester {
*/ */
void commit() { void commit() {
try { try {
solrCore.commit(); solrServer.commit();
uncommitedIngests = false; uncommitedIngests = false;
// if commit doesn't work, something's broken } catch (NoOpenCoreException ex) {
} catch (IOException ex) { logger.log(Level.WARNING, "Error commiting index", ex);
throw new RuntimeException(ex);
} catch (SolrServerException ex) { } catch (SolrServerException ex) {
throw new RuntimeException(ex); logger.log(Level.WARNING, "Error commiting index", ex);
} }
} }
@ -170,6 +212,8 @@ class Ingester {
private static void setFields(ContentStreamUpdateRequest up, Map<String, String> fields) { private static void setFields(ContentStreamUpdateRequest up, Map<String, String> fields) {
for (Entry<String, String> field : fields.entrySet()) { for (Entry<String, String> field : fields.entrySet()) {
up.setParam("literal." + field.getKey(), field.getValue()); up.setParam("literal." + field.getKey(), field.getValue());
} }
} }
@ -224,5 +268,9 @@ class Ingester {
IngesterException(String message, Throwable ex) { IngesterException(String message, Throwable ex) {
super(message, ex); super(message, ex);
} }
IngesterException(String message) {
super(message);
}
} }
} }

View File

@ -27,6 +27,7 @@ import org.openide.nodes.Node;
import org.openide.nodes.Node.Property; import org.openide.nodes.Node.Property;
import org.openide.nodes.PropertySupport; import org.openide.nodes.PropertySupport;
import org.openide.nodes.Sheet; import org.openide.nodes.Sheet;
import org.openide.util.Exceptions;
import org.openide.util.lookup.Lookups; import org.openide.util.lookup.Lookups;
import org.openide.util.lookup.ProxyLookup; import org.openide.util.lookup.ProxyLookup;
import org.sleuthkit.autopsy.directorytree.ExternalViewerAction; import org.sleuthkit.autopsy.directorytree.ExternalViewerAction;
@ -52,7 +53,14 @@ class KeywordSearchFilterNode extends FilterNode {
String getSnippet() { String getSnippet() {
final Content content = this.getOriginal().getLookup().lookup(Content.class); final Content content = this.getOriginal().getLookup().lookup(Content.class);
final String snippet = LuceneQuery.querySnippet(solrQuery, content.getId(), false, true); String snippet;
try {
snippet = LuceneQuery.querySnippet(solrQuery, content.getId(), false, true);
} catch (NoOpenCoreException ex) {
//logger.log(Level.WARNING, "Could not perform the snippet query. ", ex);
return "";
}
return snippet; return snippet;
} }
@ -122,7 +130,6 @@ class KeywordSearchFilterNode extends FilterNode {
return actions.toArray(new Action[actions.size()]); return actions.toArray(new Action[actions.size()]);
} }
private class GetPopupActionsContentVisitor extends ContentVisitor.Default<List<Action>> { private class GetPopupActionsContentVisitor extends ContentVisitor.Default<List<Action>> {
@Override @Override
@ -133,10 +140,10 @@ class KeywordSearchFilterNode extends FilterNode {
actions.add(new ExtractAction("Extract File", getOriginal())); actions.add(new ExtractAction("Extract File", getOriginal()));
return actions; return actions;
} }
@Override @Override
protected List<Action> defaultVisit(Content c) { protected List<Action> defaultVisit(Content c) {
return new ArrayList<Action>(); return new ArrayList<Action>();
} }
} }
} }

View File

@ -88,8 +88,7 @@ public final class KeywordSearchIngestService implements IngestServiceFsContent
public enum IngestStatus { public enum IngestStatus {
INGESTED, EXTRACTED_INGESTED, SKIPPED, INGESTED, EXTRACTED_INGESTED, SKIPPED,};
};
private Map<Long, IngestStatus> ingestStatus; private Map<Long, IngestStatus> ingestStatus;
private Map<String, List<FsContent>> reportedHits; //already reported hits private Map<String, List<FsContent>> reportedHits; //already reported hits
@ -219,16 +218,10 @@ public final class KeywordSearchIngestService implements IngestServiceFsContent
this.managerProxy = managerProxy; this.managerProxy = managerProxy;
Server.Core solrCore = null; Server solrServer = KeywordSearch.getServer();
try {
solrCore = KeywordSearch.getServer().getCore();
} catch (SolrServerException ex) {
logger.log(Level.WARNING, "Could not get Solr core", ex);
managerProxy.postMessage(IngestMessage.createErrorMessage(++messageID, instance, "Error initializing.", "Keyword indexing and search cannot proceed. Try restarting the application."));
return;
}
ingester = solrCore.getIngester();
ingester = solrServer.getIngester();
ingestStatus = new HashMap<Long, IngestStatus>(); ingestStatus = new HashMap<Long, IngestStatus>();
@ -348,7 +341,7 @@ public final class KeywordSearchIngestService implements IngestServiceFsContent
private void indexChangeNotify() { private void indexChangeNotify() {
//signal a potential change in number of indexed files //signal a potential change in number of indexed files
try { try {
final int numIndexedFiles = KeywordSearch.getServer().getCore().queryNumIndexedFiles(); final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles();
SwingUtilities.invokeLater(new Runnable() { SwingUtilities.invokeLater(new Runnable() {
@Override @Override
@ -356,6 +349,8 @@ public final class KeywordSearchIngestService implements IngestServiceFsContent
KeywordSearch.changeSupport.firePropertyChange(KeywordSearch.NUM_FILES_CHANGE_EVT, null, new Integer(numIndexedFiles)); KeywordSearch.changeSupport.firePropertyChange(KeywordSearch.NUM_FILES_CHANGE_EVT, null, new Integer(numIndexedFiles));
} }
}); });
} catch (NoOpenCoreException ex) {
logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files: ", ex);
} catch (SolrServerException se) { } catch (SolrServerException se) {
logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files: ", se); logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files: ", se);
} }
@ -591,6 +586,12 @@ public final class KeywordSearchIngestService implements IngestServiceFsContent
try { try {
queryResult = del.performQuery(); queryResult = del.performQuery();
} catch (NoOpenCoreException ex) {
logger.log(Level.WARNING, "Error performing query: " + keywordQuery.getQuery(), ex);
//no reason to continue with next query if recovery failed
//or wait for recovery to kick in and run again later
//likely case has closed and threads are being interrupted
break;
} catch (Exception e) { } catch (Exception e) {
logger.log(Level.WARNING, "Error performing query: " + keywordQuery.getQuery(), e); logger.log(Level.WARNING, "Error performing query: " + keywordQuery.getQuery(), e);
continue; continue;

View File

@ -68,6 +68,7 @@ public class KeywordSearchPanel extends AbstractKeywordSearchPerformer{
Case.addPropertyChangeListener(listener); Case.addPropertyChangeListener(listener);
searchBox.addFocusListener(new FocusListener() { searchBox.addFocusListener(new FocusListener() {
@Override @Override
public void focusGained(FocusEvent e) { public void focusGained(FocusEvent e) {
if (searchBox.getText().equals("Search...")) { if (searchBox.getText().equals("Search...")) {
@ -76,6 +77,7 @@ public class KeywordSearchPanel extends AbstractKeywordSearchPerformer{
entered = true; entered = true;
} }
} }
@Override @Override
public void focusLost(FocusEvent e) { public void focusLost(FocusEvent e) {
if (searchBox.getText().equals("")) { if (searchBox.getText().equals("")) {
@ -90,7 +92,6 @@ public class KeywordSearchPanel extends AbstractKeywordSearchPerformer{
public void actionPerformed(ActionEvent e) { public void actionPerformed(ActionEvent e) {
listsMenu.setVisible(false); listsMenu.setVisible(false);
} }
}); });
// Adding border of six to account for menu border // Adding border of six to account for menu border
listsMenu.setSize(listsPanel.getPreferredSize().width + 6, listsPanel.getPreferredSize().height + 6); listsMenu.setSize(listsPanel.getPreferredSize().width + 6, listsPanel.getPreferredSize().height + 6);
@ -111,7 +112,6 @@ public class KeywordSearchPanel extends AbstractKeywordSearchPerformer{
public void popupMenuCanceled(PopupMenuEvent e) { public void popupMenuCanceled(PopupMenuEvent e) {
listsButton.setSelected(false); listsButton.setSelected(false);
} }
}); });
searchBox.setComponentPopupMenu(rightClickMenu); searchBox.setComponentPopupMenu(rightClickMenu);
@ -293,8 +293,9 @@ public class KeywordSearchPanel extends AbstractKeywordSearchPerformer{
}// </editor-fold>//GEN-END:initComponents }// </editor-fold>//GEN-END:initComponents
private void searchBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_searchBoxActionPerformed private void searchBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_searchBoxActionPerformed
if(!entered) if (!entered) {
return; return;
}
getRootPane().setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); getRootPane().setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR));
try { try {
search(); search();
@ -326,7 +327,6 @@ public class KeywordSearchPanel extends AbstractKeywordSearchPerformer{
private void settingsLabelMouseExited(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_settingsLabelMouseExited private void settingsLabelMouseExited(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_settingsLabelMouseExited
settingsLabel.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/keywordsearch/dropdown-icon.png"))); settingsLabel.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/keywordsearch/dropdown-icon.png")));
}//GEN-LAST:event_settingsLabelMouseExited }//GEN-LAST:event_settingsLabelMouseExited
// Variables declaration - do not modify//GEN-BEGIN:variables // Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JMenuItem copyMenuItem; private javax.swing.JMenuItem copyMenuItem;
private javax.swing.JMenuItem cutMenuItem; private javax.swing.JMenuItem cutMenuItem;
@ -382,10 +382,14 @@ public class KeywordSearchPanel extends AbstractKeywordSearchPerformer{
switch (state) { switch (state) {
case STARTED: case STARTED:
try { try {
final int numIndexedFiles = KeywordSearch.getServer().getCore().queryNumIndexedFiles(); final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles();
KeywordSearch.changeSupport.firePropertyChange(KeywordSearch.NUM_FILES_CHANGE_EVT, null, new Integer(numIndexedFiles)); KeywordSearch.changeSupport.firePropertyChange(KeywordSearch.NUM_FILES_CHANGE_EVT, null, new Integer(numIndexedFiles));
//setFilesIndexed(numIndexedFiles); //setFilesIndexed(numIndexedFiles);
} catch (SolrServerException se) { }
catch (NoOpenCoreException ex) {
logger.log(Level.SEVERE, "Error executing Solr query, " + ex);
}
catch (SolrServerException se) {
logger.log(Level.SEVERE, "Error executing Solr query, " + se.getMessage()); logger.log(Level.SEVERE, "Error executing Solr query, " + se.getMessage());
} }
break; break;

View File

@ -36,10 +36,10 @@ public interface KeywordSearchQuery {
/** /**
* execute query and return results without publishing them * execute query and return results without publishing them
* return results for all matching terms * return results for all matching terms
* * @throws NoOpenCoreException if query failed due to server error, this could be a notification to stop processing
* @return * @return
*/ */
public Map<String,List<FsContent>> performQuery(); public Map<String,List<FsContent>> performQuery() throws NoOpenCoreException;
@ -84,8 +84,9 @@ public interface KeywordSearchQuery {
* @param newFsHit fscontent for which to write results for this hit * @param newFsHit fscontent for which to write results for this hit
* @param listName listname * @param listName listname
* @return collection of results (with cached bb artifacts/attributes) created and written * @return collection of results (with cached bb artifacts/attributes) created and written
* @throws NoOpenCoreException if could not write to bb because required query failed due to server error, this could be a notification to stop processing
*/ */
public KeywordWriteResult writeToBlackBoard(String termHit, FsContent newFsHit, String listName); public KeywordWriteResult writeToBlackBoard(String termHit, FsContent newFsHit, String listName) throws NoOpenCoreException;
} }

View File

@ -225,7 +225,13 @@ public class KeywordSearchResultFactory extends ChildFactory<KeyValueQuery> {
} }
//execute the query and get fscontents matching //execute the query and get fscontents matching
Map<String, List<FsContent>> tcqRes = tcq.performQuery(); Map<String, List<FsContent>> tcqRes;
try {
tcqRes = tcq.performQuery();
} catch (NoOpenCoreException ex) {
logger.log(Level.WARNING, "Could not perform the query. ", ex);
return false;
}
final Set<FsContent> fsContents = new HashSet<FsContent>(); final Set<FsContent> fsContents = new HashSet<FsContent>();
for (String key : tcqRes.keySet()) { for (String key : tcqRes.keySet()) {
fsContents.addAll(tcqRes.get(key)); fsContents.addAll(tcqRes.get(key));
@ -247,8 +253,14 @@ public class KeywordSearchResultFactory extends ChildFactory<KeyValueQuery> {
AbstractFsContentNode.fillPropertyMap(resMap, f); AbstractFsContentNode.fillPropertyMap(resMap, f);
setCommonProperty(resMap, CommonPropertyTypes.MATCH, f.getName()); setCommonProperty(resMap, CommonPropertyTypes.MATCH, f.getName());
if (literal_query) { if (literal_query) {
final String snippet = LuceneQuery.querySnippet(tcq.getEscapedQueryString(), f.getId(), false, true); try {
String snippet;
snippet = LuceneQuery.querySnippet(tcq.getEscapedQueryString(), f.getId(), false, true);
setCommonProperty(resMap, CommonPropertyTypes.CONTEXT, snippet); setCommonProperty(resMap, CommonPropertyTypes.CONTEXT, snippet);
} catch (NoOpenCoreException ex) {
logger.log(Level.WARNING, "Could not perform the query. ", ex);
return false;
}
} }
final String highlightQueryEscaped = getHighlightQuery(tcq, literal_query, tcqRes, f); final String highlightQueryEscaped = getHighlightQuery(tcq, literal_query, tcqRes, f);
toPopulate.add(new KeyValueQueryContent(f.getName(), resMap, ++resID, f, highlightQueryEscaped, tcq)); toPopulate.add(new KeyValueQueryContent(f.getName(), resMap, ++resID, f, highlightQueryEscaped, tcq));
@ -367,7 +379,14 @@ public class KeywordSearchResultFactory extends ChildFactory<KeyValueQuery> {
LuceneQuery filesQuery = new LuceneQuery(keywordQuery); LuceneQuery filesQuery = new LuceneQuery(keywordQuery);
filesQuery.escape(); filesQuery.escape();
Map<String, List<FsContent>> matchesRes = filesQuery.performQuery(); Map<String, List<FsContent>> matchesRes;
try {
matchesRes = filesQuery.performQuery();
} catch (NoOpenCoreException ex) {
logger.log(Level.WARNING, "Could not perform the query. ", ex);
return false;
}
Set<FsContent> matches = new HashSet<FsContent>(); Set<FsContent> matches = new HashSet<FsContent>();
for (String key : matchesRes.keySet()) { for (String key : matchesRes.keySet()) {
matches.addAll(matchesRes.get(key)); matches.addAll(matchesRes.get(key));

View File

@ -39,12 +39,11 @@ import org.apache.solr.client.solrj.response.TermsResponse.Term;
import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.SolrDocumentList;
import org.openide.nodes.Node; import org.openide.nodes.Node;
import org.openide.util.Exceptions;
import org.openide.windows.TopComponent; import org.openide.windows.TopComponent;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent; import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent;
import org.sleuthkit.autopsy.corecomponents.TableFilterNode; import org.sleuthkit.autopsy.corecomponents.TableFilterNode;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.autopsy.ingest.ServiceDataEvent;
import org.sleuthkit.autopsy.keywordsearch.KeywordSearchResultFactory.ResultWriter; import org.sleuthkit.autopsy.keywordsearch.KeywordSearchResultFactory.ResultWriter;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
@ -104,10 +103,8 @@ public class LuceneQuery implements KeywordSearchQuery {
return null; return null;
} }
@Override @Override
public Map<String, List<FsContent>> performQuery() { public Map<String, List<FsContent>> performQuery() throws NoOpenCoreException {
Map<String, List<FsContent>> results = new HashMap<String, List<FsContent>>(); Map<String, List<FsContent>> results = new HashMap<String, List<FsContent>>();
//in case of single term literal query there is only 1 term //in case of single term literal query there is only 1 term
results.put(query, performLuceneQuery()); results.put(query, performLuceneQuery());
@ -119,7 +116,14 @@ public class LuceneQuery implements KeywordSearchQuery {
public void execute() { public void execute() {
escape(); escape();
Set<FsContent> fsMatches = new HashSet<FsContent>(); Set<FsContent> fsMatches = new HashSet<FsContent>();
final Map<String, List<FsContent>> matches = performQuery(); final Map<String, List<FsContent>> matches;
try {
matches = performQuery();
} catch (NoOpenCoreException ex) {
return;
}
for (String key : matches.keySet()) { for (String key : matches.keySet()) {
fsMatches.addAll(matches.get(key)); fsMatches.addAll(matches.get(key));
} }
@ -150,17 +154,9 @@ public class LuceneQuery implements KeywordSearchQuery {
return query != null && !query.equals(""); return query != null && !query.equals("");
} }
private Collection<KeywordWriteResult> writeToBlackBoard(FsContent newFsHit, String listName) {
List<KeywordWriteResult> ret = new ArrayList<KeywordWriteResult>();
KeywordWriteResult written = writeToBlackBoard(query, newFsHit, listName);
if (written != null) {
ret.add(written);
}
return ret;
}
@Override @Override
public KeywordWriteResult writeToBlackBoard(String termHit, FsContent newFsHit, String listName) { public KeywordWriteResult writeToBlackBoard(String termHit, FsContent newFsHit, String listName) throws NoOpenCoreException {
final String MODULE_NAME = KeywordSearchIngestService.MODULE_NAME; final String MODULE_NAME = KeywordSearchIngestService.MODULE_NAME;
KeywordWriteResult writeResult = null; KeywordWriteResult writeResult = null;
@ -170,15 +166,20 @@ public class LuceneQuery implements KeywordSearchQuery {
bba = newFsHit.newArtifact(ARTIFACT_TYPE.TSK_KEYWORD_HIT); bba = newFsHit.newArtifact(ARTIFACT_TYPE.TSK_KEYWORD_HIT);
writeResult = new KeywordWriteResult(bba); writeResult = new KeywordWriteResult(bba);
} catch (Exception e) { } catch (Exception e) {
logger.log(Level.INFO, "Error adding bb artifact for keyword hit", e); logger.log(Level.WARNING, "Error adding bb artifact for keyword hit", e);
return null; return null;
} }
String snippet = null; String snippet = null;
try { try {
snippet = LuceneQuery.querySnippet(queryEscaped, newFsHit.getId(), false, true); snippet = LuceneQuery.querySnippet(queryEscaped, newFsHit.getId(), false, true);
} catch (Exception e) { }
logger.log(Level.INFO, "Error querying snippet: " + query, e); catch (NoOpenCoreException e) {
logger.log(Level.WARNING, "Error querying snippet: " + query, e);
throw e;
}
catch (Exception e) {
logger.log(Level.WARNING, "Error querying snippet: " + query, e);
return null; return null;
} }
if (snippet != null) { if (snippet != null) {
@ -207,36 +208,25 @@ public class LuceneQuery implements KeywordSearchQuery {
writeResult.add(attributes); writeResult.add(attributes);
return writeResult; return writeResult;
} catch (TskException e) { } catch (TskException e) {
logger.log(Level.INFO, "Error adding bb attributes to artifact", e); logger.log(Level.WARNING, "Error adding bb attributes to artifact", e);
} }
return null; return null;
} }
/** /**
* Just perform the query and return result without updating the GUI * Just perform the query and return result without updating the GUI
* This utility is used in this class, can be potentially reused by other classes * This utility is used in this class, can be potentially reused by other classes
* @param query * @param query
* @return matches List * @return matches List
*/ */
private List<FsContent> performLuceneQuery() throws RuntimeException { private List<FsContent> performLuceneQuery() throws NoOpenCoreException {
List<FsContent> matches = new ArrayList<FsContent>(); List<FsContent> matches = new ArrayList<FsContent>();
boolean allMatchesFetched = false; boolean allMatchesFetched = false;
final int ROWS_PER_FETCH = 10000; final int ROWS_PER_FETCH = 10000;
Server.Core solrCore = null; final Server solrServer = KeywordSearch.getServer();
try {
solrCore = KeywordSearch.getServer().getCore();
} catch (SolrServerException e) {
logger.log(Level.INFO, "Could not get Solr core", e);
}
if (solrCore == null) {
return matches;
}
SolrQuery q = new SolrQuery(); SolrQuery q = new SolrQuery();
@ -249,17 +239,24 @@ public class LuceneQuery implements KeywordSearchQuery {
q.setStart(start); q.setStart(start);
try { try {
QueryResponse response = solrCore.query(q, METHOD.POST); QueryResponse response = solrServer.query(q, METHOD.POST);
SolrDocumentList resultList = response.getResults(); SolrDocumentList resultList = response.getResults();
long results = resultList.getNumFound(); long results = resultList.getNumFound();
allMatchesFetched = start + ROWS_PER_FETCH >= results; allMatchesFetched = start + ROWS_PER_FETCH >= results;
SleuthkitCase sc;
try {
sc = Case.getCurrentCase().getSleuthkitCase();
} catch (IllegalStateException ex) {
//no case open, must be just closed
return matches;
}
for (SolrDocument resultDoc : resultList) { for (SolrDocument resultDoc : resultList) {
long id = Long.parseLong((String) resultDoc.getFieldValue("id")); long id = Long.parseLong((String) resultDoc.getFieldValue("id"));
SleuthkitCase sc = Case.getCurrentCase().getSleuthkitCase();
// TODO: has to be a better way to get files. Also, need to // TODO: has to be a better way to get files. Also, need to
// check that we actually get 1 hit for each id // check that we actually get 1 hit for each id
ResultSet rs = sc.runQuery("select * from tsk_files where obj_id=" + id); ResultSet rs = sc.runQuery("select * from tsk_files where obj_id=" + id);
@ -271,12 +268,16 @@ public class LuceneQuery implements KeywordSearchQuery {
} }
} }
} catch (NoOpenCoreException ex) {
logger.log(Level.WARNING, "Error executing Lucene Solr Query: " + query, ex);
throw ex;
} catch (SolrServerException ex) { } catch (SolrServerException ex) {
logger.log(Level.WARNING, "Error executing Lucene Solr Query: " + query.substring(0, Math.min(query.length() - 1, 200)), ex); logger.log(Level.WARNING, "Error executing Lucene Solr Query: " + query, ex);
throw new RuntimeException(ex);
// TODO: handle bad query strings, among other issues // TODO: handle bad query strings, among other issues
} catch (SQLException ex) { } catch (SQLException ex) {
logger.log(Level.WARNING, "Error interpreting results from Lucene Solr Query: " + query, ex); logger.log(Level.WARNING, "Error interpreting results from Lucene Solr Query: " + query, ex);
return matches;
} }
} }
@ -291,18 +292,10 @@ public class LuceneQuery implements KeywordSearchQuery {
* @param group whether the query should look for all terms grouped together in the query order, or not * @param group whether the query should look for all terms grouped together in the query order, or not
* @return * @return
*/ */
public static String querySnippet(String query, long contentID, boolean isRegex, boolean group) { public static String querySnippet(String query, long contentID, boolean isRegex, boolean group) throws NoOpenCoreException {
final int SNIPPET_LENGTH = 45; final int SNIPPET_LENGTH = 45;
Server.Core solrCore = null; Server solrServer = KeywordSearch.getServer();
try {
solrCore = KeywordSearch.getServer().getCore();
} catch (SolrServerException ex) {
logger.log(Level.INFO, "Could not get Solr core", ex);
}
if (solrCore == null)
return "";
String highlightField = null; String highlightField = null;
if (isRegex) { if (isRegex) {
@ -316,11 +309,13 @@ public class LuceneQuery implements KeywordSearchQuery {
if (isRegex) { if (isRegex) {
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
sb.append(highlightField).append(":"); sb.append(highlightField).append(":");
if (group) if (group) {
sb.append("\""); sb.append("\"");
}
sb.append(query); sb.append(query);
if (group) if (group) {
sb.append("\""); sb.append("\"");
}
q.setQuery(sb.toString()); q.setQuery(sb.toString());
} else { } else {
@ -336,7 +331,7 @@ public class LuceneQuery implements KeywordSearchQuery {
q.setHighlightFragsize(SNIPPET_LENGTH); q.setHighlightFragsize(SNIPPET_LENGTH);
try { try {
QueryResponse response = solrCore.query(q); QueryResponse response = solrServer.query(q);
Map<String, Map<String, List<String>>> responseHighlight = response.getHighlighting(); Map<String, Map<String, List<String>>> responseHighlight = response.getHighlighting();
Map<String, List<String>> responseHighlightID = responseHighlight.get(Long.toString(contentID)); Map<String, List<String>> responseHighlightID = responseHighlight.get(Long.toString(contentID));
if (responseHighlightID == null) { if (responseHighlightID == null) {
@ -349,8 +344,12 @@ public class LuceneQuery implements KeywordSearchQuery {
// extracted content is HTML-escaped, but snippet goes in a plain text field // extracted content is HTML-escaped, but snippet goes in a plain text field
return StringEscapeUtils.unescapeHtml(contentHighlights.get(0)).trim(); return StringEscapeUtils.unescapeHtml(contentHighlights.get(0)).trim();
} }
} catch (NoOpenCoreException ex) {
logger.log(Level.WARNING, "Error executing Lucene Solr Query: " + query, ex);
throw ex;
} catch (SolrServerException ex) { } catch (SolrServerException ex) {
throw new RuntimeException(ex); logger.log(Level.WARNING, "Error executing Lucene Solr Query: " + query, ex);
return "";
} }
} }
} }

View File

@ -0,0 +1,32 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2012 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.keywordsearch;
/**
*
* Exception thrown when no core is open
*/
public class NoOpenCoreException extends Exception {
NoOpenCoreException() {
super("No currently open Solr core.");
}
}

View File

@ -44,7 +44,7 @@ import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.client.solrj.response.TermsResponse; import org.apache.solr.client.solrj.response.TermsResponse;
import org.apache.commons.httpclient.NoHttpResponseException; import org.apache.commons.httpclient.NoHttpResponseException;
import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.client.solrj.SolrRequest;
import org.apache.solr.client.solrj.SolrRequest.METHOD; import org.apache.solr.common.util.NamedList;
import org.openide.modules.InstalledFileLocator; import org.openide.modules.InstalledFileLocator;
import org.openide.util.Exceptions; import org.openide.util.Exceptions;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
@ -60,8 +60,8 @@ class Server {
private static final String DEFAULT_CORE_NAME = "coreCase"; private static final String DEFAULT_CORE_NAME = "coreCase";
// TODO: DEFAULT_CORE_NAME needs to be replaced with unique names to support multiple open cases // TODO: DEFAULT_CORE_NAME needs to be replaced with unique names to support multiple open cases
public static final String CORE_EVT = "CORE_EVT"; public static final String CORE_EVT = "CORE_EVT";
private String javaPath = "java"; private String javaPath = "java";
private Process curSolrProcess = null;
public enum CORE_EVT_STATES { public enum CORE_EVT_STATES {
@ -115,14 +115,17 @@ class Server {
File outputFile = new File(log.concat(".0")); File outputFile = new File(log.concat(".0"));
File first = new File(log.concat(".1")); File first = new File(log.concat(".1"));
File second = new File(log.concat(".2")); File second = new File(log.concat(".2"));
if(second.exists()) if (second.exists()) {
second.delete(); second.delete();
if(first.exists()) }
if (first.exists()) {
first.renameTo(second); first.renameTo(second);
if(outputFile.exists()) }
if (outputFile.exists()) {
outputFile.renameTo(first); outputFile.renameTo(first);
else } else {
outputFile.createNewFile(); outputFile.createNewFile();
}
out = new FileOutputStream(outputFile); out = new FileOutputStream(outputFile);
} catch (Exception ex) { } catch (Exception ex) {
@ -153,14 +156,20 @@ class Server {
* (probably before the server is ready) and doesn't check whether it was * (probably before the server is ready) and doesn't check whether it was
* successful. * successful.
*/ */
synchronized void start() { void start() {
logger.log(Level.INFO, "Starting Solr server from: " + solrFolder.getAbsolutePath()); logger.log(Level.INFO, "Starting Solr server from: " + solrFolder.getAbsolutePath());
try { try {
Process start = Runtime.getRuntime().exec(javaPath + " -DSTOP.PORT=8079 -DSTOP.KEY=mysecret -jar start.jar", null, solrFolder); curSolrProcess = Runtime.getRuntime().exec(javaPath + " -DSTOP.PORT=8079 -DSTOP.KEY=mysecret -jar start.jar", null, solrFolder);
try {
//block, give time to fully stary the process
//so if it's restarted solr operations can be resumed seamlessly
Thread.sleep(3000);
} catch (InterruptedException ex) {
Exceptions.printStackTrace(ex);
}
// Handle output to prevent process from blocking // Handle output to prevent process from blocking
(new InputStreamPrinterThread(start.getInputStream(), "input")).start(); (new InputStreamPrinterThread(curSolrProcess.getInputStream(), "input")).start();
(new InputStreamPrinterThread(start.getErrorStream(), "error")).start(); (new InputStreamPrinterThread(curSolrProcess.getErrorStream(), "error")).start();
} catch (IOException ex) { } catch (IOException ex) {
throw new RuntimeException(ex); throw new RuntimeException(ex);
@ -172,13 +181,18 @@ class Server {
* *
* Waits for the stop command to finish * Waits for the stop command to finish
* before returning. * before returning.
* @return true if the stop command finished successfully, else false
*/ */
synchronized boolean stop() { synchronized void stop() {
try { try {
logger.log(Level.INFO, "Stopping Solr server from: " + solrFolder.getAbsolutePath()); logger.log(Level.INFO, "Stopping Solr server from: " + solrFolder.getAbsolutePath());
//try graceful shutdown
Process stop = Runtime.getRuntime().exec(javaPath + " -DSTOP.PORT=8079 -DSTOP.KEY=mysecret -jar start.jar --stop", null, solrFolder); Process stop = Runtime.getRuntime().exec(javaPath + " -DSTOP.PORT=8079 -DSTOP.KEY=mysecret -jar start.jar --stop", null, solrFolder);
return stop.waitFor() == 0; stop.waitFor();
//if still running, forcefully stop it
if (curSolrProcess != null) {
curSolrProcess.destroy();
curSolrProcess = null;
}
} catch (InterruptedException ex) { } catch (InterruptedException ex) {
throw new RuntimeException(ex); throw new RuntimeException(ex);
@ -217,7 +231,7 @@ class Server {
return true; return true;
} }
/**** Convenience methods for use while we only open one case at a time ****/ /**** Convenience methods for use while we only open one case at a time ****/
private Core currentCore = null; private volatile Core currentCore = null;
synchronized void openCore() { synchronized void openCore() {
if (currentCore != null) { if (currentCore != null) {
@ -236,13 +250,6 @@ class Server {
serverAction.putValue(CORE_EVT, CORE_EVT_STATES.STOPPED); serverAction.putValue(CORE_EVT, CORE_EVT_STATES.STOPPED);
} }
synchronized Core getCore() throws SolrServerException {
if (currentCore == null) {
throw new SolrServerException("No currently open Core!");
}
return currentCore;
}
/**** end single-case specific methods ****/ /**** end single-case specific methods ****/
/** /**
* Open a core for the given case * Open a core for the given case
@ -255,6 +262,106 @@ class Server {
return this.openCore(DEFAULT_CORE_NAME, new File(dataDir)); return this.openCore(DEFAULT_CORE_NAME, new File(dataDir));
} }
/**
* commit current core if it exists
* @throws SolrServerException, NoOpenCoreException
*/
synchronized void commit() throws SolrServerException, NoOpenCoreException {
if (currentCore == null) {
throw new NoOpenCoreException();
}
currentCore.commit();
}
NamedList<Object> request(SolrRequest request) throws SolrServerException, NoOpenCoreException {
if (currentCore == null) {
throw new NoOpenCoreException();
}
return currentCore.request(request);
}
/**
* Execute query that gets only number of all Solr documents indexed
* without actually returning the documents
* @return int representing number of indexed files
* @throws SolrServerException
*/
public synchronized int queryNumIndexedFiles() throws SolrServerException, NoOpenCoreException {
if (currentCore == null) {
throw new NoOpenCoreException();
}
SolrQuery q = new SolrQuery("*:*");
q.setRows(0);
return (int) query(q).getResults().getNumFound();
}
/**
* Execute solr query
* @param sq query
* @return query response
* @throws SolrServerException
* @throws NoOpenCoreException
*/
public synchronized QueryResponse query(SolrQuery sq) throws SolrServerException, NoOpenCoreException {
if (currentCore == null) {
throw new NoOpenCoreException();
}
return currentCore.query(sq);
}
/**
* Execute solr query
* @param sq the query
* @param method http method to use
* @return query response
* @throws SolrServerException
* @throws NoOpenCoreException
*/
public synchronized QueryResponse query(SolrQuery sq, SolrRequest.METHOD method) throws SolrServerException, NoOpenCoreException {
if (currentCore == null) {
throw new NoOpenCoreException();
}
return currentCore.query(sq, method);
}
/**
* Execute Solr terms query
* @param sq the query
* @return terms response
* @throws SolrServerException
* @throws NoOpenCoreException
*/
public synchronized TermsResponse queryTerms(SolrQuery sq) throws SolrServerException, NoOpenCoreException {
if (currentCore == null) {
throw new NoOpenCoreException();
}
QueryResponse qres = currentCore.query(sq);
return qres.getTermsResponse();
}
/**
* Execute Solr query to get content text
* @param content to get the text for
* @return content text string
* @throws SolrServerException
* @throws NoOpenCoreException
*/
public synchronized String getSolrContent(final Content content) throws SolrServerException, NoOpenCoreException {
if (currentCore == null) {
throw new NoOpenCoreException();
}
return currentCore.getSolrContent(content);
}
/**
* factory method to create ingester
* @return ingester
*/
public Ingester getIngester() {
return new Ingester();
}
/** /**
* Open a new core * Open a new core
* @param coreName name to refer to the core by in Solr * @param coreName name to refer to the core by in Solr
@ -300,25 +407,39 @@ class Server {
} }
} }
public Ingester getIngester() { private QueryResponse query(SolrQuery sq) throws SolrServerException {
return new Ingester(this.solrCore);
}
public synchronized QueryResponse query(SolrQuery sq) throws SolrServerException {
return solrCore.query(sq); return solrCore.query(sq);
} }
public synchronized QueryResponse query(SolrQuery sq, SolrRequest.METHOD method) throws SolrServerException { private NamedList<Object> request(SolrRequest request) throws SolrServerException {
try {
return solrCore.request(request);
} catch (IOException e) {
logger.log(Level.WARNING, "Could not issue Solr request. ", e);
throw new SolrServerException("Could not issue Solr request", e);
}
}
private QueryResponse query(SolrQuery sq, SolrRequest.METHOD method) throws SolrServerException {
return solrCore.query(sq, method); return solrCore.query(sq, method);
} }
private TermsResponse queryTerms(SolrQuery sq) throws SolrServerException {
public synchronized TermsResponse queryTerms(SolrQuery sq) throws SolrServerException {
QueryResponse qres = solrCore.query(sq); QueryResponse qres = solrCore.query(sq);
return qres.getTermsResponse(); return qres.getTermsResponse();
} }
public synchronized String getSolrContent(final Content content) { private void commit() throws SolrServerException {
try {
solrCore.commit();
} catch (IOException e) {
logger.log(Level.WARNING, "Could not commit index. ", e);
throw new SolrServerException("Could not commit index", e);
}
}
private String getSolrContent(final Content content) {
final SolrQuery q = new SolrQuery(); final SolrQuery q = new SolrQuery();
q.setQuery("*:*"); q.setQuery("*:*");
q.addFilterQuery("id:" + content.getId()); q.addFilterQuery("id:" + content.getId());
@ -347,7 +468,7 @@ class Server {
* @return int representing number of indexed files * @return int representing number of indexed files
* @throws SolrServerException * @throws SolrServerException
*/ */
public synchronized int queryNumIndexedFiles() throws SolrServerException { private int queryNumIndexedFiles() throws SolrServerException {
SolrQuery q = new SolrQuery("*:*"); SolrQuery q = new SolrQuery("*:*");
q.setRows(0); q.setRows(0);
return (int) query(q).getResults().getNumFound(); return (int) query(q).getResults().getNumFound();

View File

@ -128,10 +128,13 @@ public class TermComponentQuery implements KeywordSearchQuery {
protected List<Term> executeQuery(SolrQuery q) { protected List<Term> executeQuery(SolrQuery q) {
List<Term> termsCol = null; List<Term> termsCol = null;
try { try {
Server.Core solrCore = KeywordSearch.getServer().getCore(); Server solrServer = KeywordSearch.getServer();
TermsResponse tr = solrCore.queryTerms(q); TermsResponse tr = solrServer.queryTerms(q);
termsCol = tr.getTerms(TERMS_SEARCH_FIELD); termsCol = tr.getTerms(TERMS_SEARCH_FIELD);
return termsCol; return termsCol;
} catch (NoOpenCoreException ex) {
logger.log(Level.SEVERE, "Error executing the regex terms query: " + termsQuery, ex);
return null; //no need to create result view, just display error dialog
} catch (SolrServerException ex) { } catch (SolrServerException ex) {
logger.log(Level.SEVERE, "Error executing the regex terms query: " + termsQuery, ex); logger.log(Level.SEVERE, "Error executing the regex terms query: " + termsQuery, ex);
return null; //no need to create result view, just display error dialog return null; //no need to create result view, just display error dialog
@ -154,15 +157,20 @@ public class TermComponentQuery implements KeywordSearchQuery {
} }
@Override @Override
public KeywordWriteResult writeToBlackBoard(String termHit, FsContent newFsHit, String listName) { public KeywordWriteResult writeToBlackBoard(String termHit, FsContent newFsHit, String listName) throws NoOpenCoreException {
final String MODULE_NAME = KeywordSearchIngestService.MODULE_NAME; final String MODULE_NAME = KeywordSearchIngestService.MODULE_NAME;
//snippet //snippet
String snippet = null; String snippet = null;
try { try {
snippet = LuceneQuery.querySnippet(KeywordSearchUtil.escapeLuceneQuery(termHit, true, false), newFsHit.getId(), true, true); snippet = LuceneQuery.querySnippet(KeywordSearchUtil.escapeLuceneQuery(termHit, true, false), newFsHit.getId(), true, true);
} catch (Exception e) { }
logger.log(Level.INFO, "Error querying snippet: " + termHit, e); catch (NoOpenCoreException e) {
logger.log(Level.WARNING, "Error querying snippet: " + termHit, e);
throw e;
}
catch (Exception e) {
logger.log(Level.WARNING, "Error querying snippet: " + termHit, e);
return null; return null;
} }
@ -178,7 +186,7 @@ public class TermComponentQuery implements KeywordSearchQuery {
bba = newFsHit.newArtifact(ARTIFACT_TYPE.TSK_KEYWORD_HIT); bba = newFsHit.newArtifact(ARTIFACT_TYPE.TSK_KEYWORD_HIT);
writeResult = new KeywordWriteResult(bba); writeResult = new KeywordWriteResult(bba);
} catch (Exception e) { } catch (Exception e) {
logger.log(Level.INFO, "Error adding bb artifact for keyword hit", e); logger.log(Level.WARNING, "Error adding bb artifact for keyword hit", e);
return null; return null;
} }
@ -212,16 +220,15 @@ public class TermComponentQuery implements KeywordSearchQuery {
writeResult.add(attributes); writeResult.add(attributes);
return writeResult; return writeResult;
} catch (TskException e) { } catch (TskException e) {
logger.log(Level.INFO, "Error adding bb attributes for terms search artifact", e); logger.log(Level.WARNING, "Error adding bb attributes for terms search artifact", e);
} }
return null; return null;
} }
@Override @Override
public Map<String, List<FsContent>> performQuery() { public Map<String, List<FsContent>> performQuery() throws NoOpenCoreException{
Map<String, List<FsContent>> results = new HashMap<String, List<FsContent>>(); Map<String, List<FsContent>> results = new HashMap<String, List<FsContent>>();
final SolrQuery q = createQuery(); final SolrQuery q = createQuery();
@ -243,7 +250,12 @@ public class TermComponentQuery implements KeywordSearchQuery {
filesResults.addAll(subResults.get(key)); filesResults.addAll(subResults.get(key));
} }
results.put(term.getTerm(), new ArrayList<FsContent>(filesResults)); results.put(term.getTerm(), new ArrayList<FsContent>(filesResults));
} catch (RuntimeException e) { }
catch (NoOpenCoreException e) {
logger.log(Level.SEVERE, "Error executing Solr query,", e);
throw e;
}
catch (RuntimeException e) {
logger.log(Level.SEVERE, "Error executing Solr query,", e); logger.log(Level.SEVERE, "Error executing Solr query,", e);
} }
@ -253,7 +265,6 @@ public class TermComponentQuery implements KeywordSearchQuery {
return results; return results;
} }
@Override @Override
public void execute() { public void execute() {
SolrQuery q = createQuery(); SolrQuery q = createQuery();