mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-15 01:07:42 +00:00
No longer attemting to index new data after 5 failed consecutive attempts
This commit is contained in:
parent
3822ef10f0
commit
acb5d34aea
@ -1,7 +1,7 @@
|
|||||||
/*
|
/*
|
||||||
* Autopsy Forensic Browser
|
* Autopsy Forensic Browser
|
||||||
*
|
*
|
||||||
* Copyright 2011-2020 Basis Technology Corp.
|
* Copyright 2011-2021 Basis Technology Corp.
|
||||||
* Contact: carrier <at> sleuthkit <dot> org
|
* Contact: carrier <at> sleuthkit <dot> org
|
||||||
*
|
*
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
@ -95,7 +95,6 @@ import org.sleuthkit.autopsy.coreutils.ThreadUtils;
|
|||||||
import org.sleuthkit.autopsy.healthmonitor.HealthMonitor;
|
import org.sleuthkit.autopsy.healthmonitor.HealthMonitor;
|
||||||
import org.sleuthkit.autopsy.healthmonitor.TimingMetric;
|
import org.sleuthkit.autopsy.healthmonitor.TimingMetric;
|
||||||
import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchServiceException;
|
import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchServiceException;
|
||||||
import org.sleuthkit.autopsy.report.GeneralReportSettings;
|
|
||||||
import org.sleuthkit.autopsy.report.ReportProgressPanel;
|
import org.sleuthkit.autopsy.report.ReportProgressPanel;
|
||||||
import org.sleuthkit.datamodel.Content;
|
import org.sleuthkit.datamodel.Content;
|
||||||
|
|
||||||
@ -2030,6 +2029,13 @@ public class Server {
|
|||||||
private final List<SolrInputDocument> buffer;
|
private final List<SolrInputDocument> buffer;
|
||||||
private final Object bufferLock;
|
private final Object bufferLock;
|
||||||
|
|
||||||
|
/* (JIRA-7521) Sometimes we get into a situation where Solr server is no longer able to index new data.
|
||||||
|
* Typically main reason for this is Solr unning out of memory. In this case we will stop trying to send new
|
||||||
|
* data to Solr (for this collection) after certain number of consecutive batches have failed. */
|
||||||
|
private static final int MAX_NUM_CONSECUTIVE_FAILURES = 5;
|
||||||
|
private int numConsecutiveFailures = 0;
|
||||||
|
private boolean skipIndexing = false;
|
||||||
|
|
||||||
private final ScheduledThreadPoolExecutor periodicTasksExecutor;
|
private final ScheduledThreadPoolExecutor periodicTasksExecutor;
|
||||||
private static final long PERIODIC_BATCH_SEND_INTERVAL_MINUTES = 10;
|
private static final long PERIODIC_BATCH_SEND_INTERVAL_MINUTES = 10;
|
||||||
private static final int NUM_BATCH_UPDATE_RETRIES = 10;
|
private static final int NUM_BATCH_UPDATE_RETRIES = 10;
|
||||||
@ -2076,6 +2082,11 @@ public class Server {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void run() {
|
public void run() {
|
||||||
|
|
||||||
|
if (skipIndexing) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
List<SolrInputDocument> clone;
|
List<SolrInputDocument> clone;
|
||||||
synchronized (bufferLock) {
|
synchronized (bufferLock) {
|
||||||
|
|
||||||
@ -2243,6 +2254,10 @@ public class Server {
|
|||||||
*/
|
*/
|
||||||
void addDocument(SolrInputDocument doc) throws KeywordSearchModuleException {
|
void addDocument(SolrInputDocument doc) throws KeywordSearchModuleException {
|
||||||
|
|
||||||
|
if (skipIndexing) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
List<SolrInputDocument> clone;
|
List<SolrInputDocument> clone;
|
||||||
synchronized (bufferLock) {
|
synchronized (bufferLock) {
|
||||||
buffer.add(doc);
|
buffer.add(doc);
|
||||||
@ -2268,6 +2283,10 @@ public class Server {
|
|||||||
*
|
*
|
||||||
* @throws KeywordSearchModuleException
|
* @throws KeywordSearchModuleException
|
||||||
*/
|
*/
|
||||||
|
@NbBundle.Messages({
|
||||||
|
"Collection.unableToIndexData.error=Unable to add data to text index. All future text indexing for the current case will be skipped.",
|
||||||
|
|
||||||
|
})
|
||||||
private void sendBufferedDocs(List<SolrInputDocument> docBuffer) throws KeywordSearchModuleException {
|
private void sendBufferedDocs(List<SolrInputDocument> docBuffer) throws KeywordSearchModuleException {
|
||||||
|
|
||||||
if (docBuffer.isEmpty()) {
|
if (docBuffer.isEmpty()) {
|
||||||
@ -2293,6 +2312,7 @@ public class Server {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (success) {
|
if (success) {
|
||||||
|
numConsecutiveFailures = 0;
|
||||||
if (reTryAttempt > 0) {
|
if (reTryAttempt > 0) {
|
||||||
logger.log(Level.INFO, "Batch update suceeded after {0} re-try", reTryAttempt); //NON-NLS
|
logger.log(Level.INFO, "Batch update suceeded after {0} re-try", reTryAttempt); //NON-NLS
|
||||||
}
|
}
|
||||||
@ -2304,10 +2324,26 @@ public class Server {
|
|||||||
throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg")); //NON-NLS
|
throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg")); //NON-NLS
|
||||||
} catch (Exception ex) {
|
} catch (Exception ex) {
|
||||||
// Solr throws a lot of unexpected exception types
|
// Solr throws a lot of unexpected exception types
|
||||||
|
numConsecutiveFailures++;
|
||||||
logger.log(Level.SEVERE, "Could not add batched documents to index", ex); //NON-NLS
|
logger.log(Level.SEVERE, "Could not add batched documents to index", ex); //NON-NLS
|
||||||
|
|
||||||
|
// display message to user that that a document batch is missing from the index
|
||||||
|
MessageNotifyUtil.Notify.error(
|
||||||
|
NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg"),
|
||||||
|
NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg"));
|
||||||
throw new KeywordSearchModuleException(
|
throw new KeywordSearchModuleException(
|
||||||
NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg"), ex); //NON-NLS
|
NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg"), ex); //NON-NLS
|
||||||
} finally {
|
} finally {
|
||||||
|
if (numConsecutiveFailures > MAX_NUM_CONSECUTIVE_FAILURES) {
|
||||||
|
// skip all future indexing
|
||||||
|
skipIndexing = true;
|
||||||
|
|
||||||
|
// display message to user that no more data will be added to the index
|
||||||
|
MessageNotifyUtil.Notify.error(
|
||||||
|
NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg"),
|
||||||
|
Bundle.Collection_unableToIndexData_error());
|
||||||
|
MessageNotifyUtil.Message.error(Bundle.Collection_unableToIndexData_error());
|
||||||
|
}
|
||||||
docBuffer.clear();
|
docBuffer.clear();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user