No longer attemting to index new data after 5 failed consecutive attempts

This commit is contained in:
Eugene Livis 2021-04-13 15:59:23 -04:00
parent 3822ef10f0
commit acb5d34aea

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2020 Basis Technology Corp.
* Copyright 2011-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -95,7 +95,6 @@ import org.sleuthkit.autopsy.coreutils.ThreadUtils;
import org.sleuthkit.autopsy.healthmonitor.HealthMonitor;
import org.sleuthkit.autopsy.healthmonitor.TimingMetric;
import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchServiceException;
import org.sleuthkit.autopsy.report.GeneralReportSettings;
import org.sleuthkit.autopsy.report.ReportProgressPanel;
import org.sleuthkit.datamodel.Content;
@ -2030,6 +2029,13 @@ public class Server {
private final List<SolrInputDocument> buffer;
private final Object bufferLock;
/* (JIRA-7521) Sometimes we get into a situation where Solr server is no longer able to index new data.
* Typically main reason for this is Solr unning out of memory. In this case we will stop trying to send new
* data to Solr (for this collection) after certain number of consecutive batches have failed. */
private static final int MAX_NUM_CONSECUTIVE_FAILURES = 5;
private int numConsecutiveFailures = 0;
private boolean skipIndexing = false;
private final ScheduledThreadPoolExecutor periodicTasksExecutor;
private static final long PERIODIC_BATCH_SEND_INTERVAL_MINUTES = 10;
private static final int NUM_BATCH_UPDATE_RETRIES = 10;
@ -2076,6 +2082,11 @@ public class Server {
@Override
public void run() {
if (skipIndexing) {
return;
}
List<SolrInputDocument> clone;
synchronized (bufferLock) {
@ -2242,6 +2253,10 @@ public class Server {
* @throws KeywordSearchModuleException
*/
void addDocument(SolrInputDocument doc) throws KeywordSearchModuleException {
if (skipIndexing) {
return;
}
List<SolrInputDocument> clone;
synchronized (bufferLock) {
@ -2268,6 +2283,10 @@ public class Server {
*
* @throws KeywordSearchModuleException
*/
@NbBundle.Messages({
"Collection.unableToIndexData.error=Unable to add data to text index. All future text indexing for the current case will be skipped.",
})
private void sendBufferedDocs(List<SolrInputDocument> docBuffer) throws KeywordSearchModuleException {
if (docBuffer.isEmpty()) {
@ -2293,6 +2312,7 @@ public class Server {
}
}
if (success) {
numConsecutiveFailures = 0;
if (reTryAttempt > 0) {
logger.log(Level.INFO, "Batch update suceeded after {0} re-try", reTryAttempt); //NON-NLS
}
@ -2304,10 +2324,26 @@ public class Server {
throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg")); //NON-NLS
} catch (Exception ex) {
// Solr throws a lot of unexpected exception types
numConsecutiveFailures++;
logger.log(Level.SEVERE, "Could not add batched documents to index", ex); //NON-NLS
// display message to user that that a document batch is missing from the index
MessageNotifyUtil.Notify.error(
NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg"),
NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg"));
throw new KeywordSearchModuleException(
NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg"), ex); //NON-NLS
} finally {
if (numConsecutiveFailures > MAX_NUM_CONSECUTIVE_FAILURES) {
// skip all future indexing
skipIndexing = true;
// display message to user that no more data will be added to the index
MessageNotifyUtil.Notify.error(
NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg"),
Bundle.Collection_unableToIndexData_error());
MessageNotifyUtil.Message.error(Bundle.Collection_unableToIndexData_error());
}
docBuffer.clear();
}
}