Merge pull request #3138 from eugene7646/file_visitor_fix

File visitor exception handling fix
This commit is contained in:
Brian Carrier 2017-10-06 16:37:23 -04:00 committed by GitHub
commit 8a445ca52c
3 changed files with 165 additions and 116 deletions

View File

@ -89,7 +89,8 @@ public final class AutoIngestJob implements Comparable<AutoIngestJob>, Serializa
* *
* @param manifest The manifest for an automated ingest job. * @param manifest The manifest for an automated ingest job.
*/ */
AutoIngestJob(Manifest manifest) { AutoIngestJob(Manifest manifest) throws AutoIngestJobException {
try {
/* /*
* Version 0 fields. * Version 0 fields.
*/ */
@ -113,6 +114,9 @@ public final class AutoIngestJob implements Comparable<AutoIngestJob>, Serializa
this.processingStatus = ProcessingStatus.PENDING; this.processingStatus = ProcessingStatus.PENDING;
this.numberOfCrashes = 0; this.numberOfCrashes = 0;
this.stageDetails = this.getProcessingStageDetails(); this.stageDetails = this.getProcessingStageDetails();
} catch (Exception ex) {
throw new AutoIngestJobException(String.format("Error creating automated ingest job"), ex);
}
} }
/** /**
@ -122,7 +126,8 @@ public final class AutoIngestJob implements Comparable<AutoIngestJob>, Serializa
* @param nodeData The coordination service node data for an automated * @param nodeData The coordination service node data for an automated
* ingest job. * ingest job.
*/ */
AutoIngestJob(AutoIngestJobNodeData nodeData) { AutoIngestJob(AutoIngestJobNodeData nodeData) throws AutoIngestJobException {
try {
/* /*
* Version 0 fields. * Version 0 fields.
*/ */
@ -146,6 +151,9 @@ public final class AutoIngestJob implements Comparable<AutoIngestJob>, Serializa
this.processingStatus = nodeData.getProcessingStatus(); this.processingStatus = nodeData.getProcessingStatus();
this.numberOfCrashes = nodeData.getNumberOfCrashes(); this.numberOfCrashes = nodeData.getNumberOfCrashes();
this.stageDetails = this.getProcessingStageDetails(); this.stageDetails = this.getProcessingStageDetails();
} catch (Exception ex) {
throw new AutoIngestJobException(String.format("Error creating automated ingest job"), ex);
}
} }
/** /**
@ -623,4 +631,32 @@ public final class AutoIngestJob implements Comparable<AutoIngestJob>, Serializa
} }
/**
* Exception thrown when there is a problem creating auto ingest job.
*/
final static class AutoIngestJobException extends Exception {
private static final long serialVersionUID = 1L;
/**
* Constructs an exception to throw when there is a problem creating
* auto ingest job.
*
* @param message The exception message.
*/
private AutoIngestJobException(String message) {
super(message);
}
/**
* Constructs an exception to throw when there is a problem creating
* auto ingest job.
*
* @param message The exception message.
* @param cause The cause of the exception, if it was an exception.
*/
private AutoIngestJobException(String message, Throwable cause) {
super(message, cause);
}
}
} }

View File

@ -61,6 +61,7 @@ import java.util.stream.Collectors;
import javax.annotation.concurrent.GuardedBy; import javax.annotation.concurrent.GuardedBy;
import javax.annotation.concurrent.Immutable; import javax.annotation.concurrent.Immutable;
import javax.annotation.concurrent.ThreadSafe; import javax.annotation.concurrent.ThreadSafe;
import org.openide.util.Exceptions;
import org.openide.util.Lookup; import org.openide.util.Lookup;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.Case.CaseType; import org.sleuthkit.autopsy.casemodule.Case.CaseType;
@ -93,6 +94,7 @@ import org.sleuthkit.autopsy.experimental.configuration.SharedConfiguration;
import org.sleuthkit.autopsy.experimental.configuration.SharedConfiguration.SharedConfigurationException; import org.sleuthkit.autopsy.experimental.configuration.SharedConfiguration.SharedConfigurationException;
import org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSourceProcessor; import org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSourceProcessor;
import org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException; import org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException;
import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJob.AutoIngestJobException;
import org.sleuthkit.autopsy.ingest.IngestJob; import org.sleuthkit.autopsy.ingest.IngestJob;
import org.sleuthkit.autopsy.ingest.IngestJob.CancellationReason; import org.sleuthkit.autopsy.ingest.IngestJob.CancellationReason;
import org.sleuthkit.autopsy.ingest.IngestJobSettings; import org.sleuthkit.autopsy.ingest.IngestJobSettings;
@ -759,7 +761,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang
AutoIngestJob deletedJob = new AutoIngestJob(nodeData); AutoIngestJob deletedJob = new AutoIngestJob(nodeData);
deletedJob.setProcessingStatus(AutoIngestJob.ProcessingStatus.DELETED); deletedJob.setProcessingStatus(AutoIngestJob.ProcessingStatus.DELETED);
this.updateCoordinationServiceNode(deletedJob); this.updateCoordinationServiceNode(deletedJob);
} catch (AutoIngestJobNodeData.InvalidDataException ex) { } catch (AutoIngestJobNodeData.InvalidDataException | AutoIngestJobException ex) {
SYS_LOGGER.log(Level.WARNING, String.format("Invalid auto ingest job node data for %s", manifestPath), ex); SYS_LOGGER.log(Level.WARNING, String.format("Invalid auto ingest job node data for %s", manifestPath), ex);
return CaseDeletionResult.PARTIALLY_DELETED; return CaseDeletionResult.PARTIALLY_DELETED;
} catch (InterruptedException | CoordinationServiceException ex) { } catch (InterruptedException | CoordinationServiceException ex) {
@ -1015,15 +1017,14 @@ public final class AutoIngestManager extends Observable implements PropertyChang
* @return TERMINATE if auto ingest is shutting down, CONTINUE if it has * @return TERMINATE if auto ingest is shutting down, CONTINUE if it has
* not. * not.
* *
* @throws IOException if an I/O error occurs, but this implementation
* does not throw.
*/ */
@Override @Override
public FileVisitResult visitFile(Path filePath, BasicFileAttributes attrs) throws IOException { public FileVisitResult visitFile(Path filePath, BasicFileAttributes attrs) {
if (Thread.currentThread().isInterrupted()) { if (Thread.currentThread().isInterrupted()) {
return TERMINATE; return TERMINATE;
} }
try {
Manifest manifest = null; Manifest manifest = null;
for (ManifestFileParser parser : Lookup.getDefault().lookupAll(ManifestFileParser.class)) { for (ManifestFileParser parser : Lookup.getDefault().lookupAll(ManifestFileParser.class)) {
if (parser.fileIsManifest(filePath)) { if (parser.fileIsManifest(filePath)) {
@ -1088,11 +1089,15 @@ public final class AutoIngestManager extends Observable implements PropertyChang
SYS_LOGGER.log(Level.SEVERE, "Unknown ManifestNodeData.ProcessingStatus"); SYS_LOGGER.log(Level.SEVERE, "Unknown ManifestNodeData.ProcessingStatus");
break; break;
} }
} catch (AutoIngestJobNodeData.InvalidDataException ex) { } catch (AutoIngestJobNodeData.InvalidDataException | AutoIngestJobException ex) {
SYS_LOGGER.log(Level.WARNING, String.format("Invalid auto ingest job node data for %s", manifestPath), ex); SYS_LOGGER.log(Level.SEVERE, String.format("Invalid auto ingest job node data for %s", manifestPath), ex);
} }
} else { } else {
try {
addNewPendingJob(manifest); addNewPendingJob(manifest);
} catch (AutoIngestJobException ex) {
SYS_LOGGER.log(Level.SEVERE, String.format("Invalid manifest data for %s", manifestPath), ex);
}
} }
} catch (CoordinationServiceException ex) { } catch (CoordinationServiceException ex) {
SYS_LOGGER.log(Level.SEVERE, String.format("Error transmitting node data for %s", manifestPath), ex); SYS_LOGGER.log(Level.SEVERE, String.format("Error transmitting node data for %s", manifestPath), ex);
@ -1103,6 +1108,14 @@ public final class AutoIngestManager extends Observable implements PropertyChang
} }
} }
} catch (Exception ex) {
// Catch all unhandled and unexpected exceptions. Otherwise one bad file
// can stop the entire input folder scanning. Given that the exception is unexpected,
// I'm hesitant to add logging which requires accessing or de-referencing data.
SYS_LOGGER.log(Level.SEVERE, "Unexpected exception in file visitor", ex);
return CONTINUE;
}
if (!Thread.currentThread().isInterrupted()) { if (!Thread.currentThread().isInterrupted()) {
return CONTINUE; return CONTINUE;
} else { } else {
@ -1122,7 +1135,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang
* blocked, i.e., if auto ingest is * blocked, i.e., if auto ingest is
* shutting down. * shutting down.
*/ */
private void addPendingJob(Manifest manifest, AutoIngestJobNodeData nodeData) throws InterruptedException { private void addPendingJob(Manifest manifest, AutoIngestJobNodeData nodeData) throws InterruptedException, AutoIngestJobException {
AutoIngestJob job; AutoIngestJob job;
if (nodeData.getVersion() == AutoIngestJobNodeData.getCurrentVersion()) { if (nodeData.getVersion() == AutoIngestJobNodeData.getCurrentVersion()) {
job = new AutoIngestJob(nodeData); job = new AutoIngestJob(nodeData);
@ -1176,7 +1189,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang
* blocked, i.e., if auto ingest is * blocked, i.e., if auto ingest is
* shutting down. * shutting down.
*/ */
private void addNewPendingJob(Manifest manifest) throws InterruptedException { private void addNewPendingJob(Manifest manifest) throws InterruptedException, AutoIngestJobException {
/* /*
* Create the coordination service node data for the job. Note that * Create the coordination service node data for the job. Note that
* getting the lock will create the node for the job (with no data) * getting the lock will create the node for the job (with no data)
@ -1218,7 +1231,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang
* blocked, i.e., if auto ingest is * blocked, i.e., if auto ingest is
* shutting down. * shutting down.
*/ */
private void doRecoveryIfCrashed(Manifest manifest, AutoIngestJobNodeData nodeData) throws InterruptedException { private void doRecoveryIfCrashed(Manifest manifest, AutoIngestJobNodeData nodeData) throws InterruptedException, AutoIngestJobException {
/* /*
* Try to get an exclusive lock on the coordination service node for * Try to get an exclusive lock on the coordination service node for
* the job. If the lock cannot be obtained, another host in the auto * the job. If the lock cannot be obtained, another host in the auto
@ -1314,7 +1327,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang
* @throws CoordinationServiceException * @throws CoordinationServiceException
* @throws InterruptedException * @throws InterruptedException
*/ */
private void addCompletedJob(Manifest manifest, AutoIngestJobNodeData nodeData) throws CoordinationServiceException, InterruptedException { private void addCompletedJob(Manifest manifest, AutoIngestJobNodeData nodeData) throws CoordinationServiceException, InterruptedException, AutoIngestJobException {
Path caseDirectoryPath = PathUtils.findCaseDirectory(rootOutputDirectory, manifest.getCaseName()); Path caseDirectoryPath = PathUtils.findCaseDirectory(rootOutputDirectory, manifest.getCaseName());
if (null != caseDirectoryPath) { if (null != caseDirectoryPath) {
AutoIngestJob job; AutoIngestJob job;

View File

@ -265,7 +265,7 @@ public final class AutoIngestMonitor extends Observable implements PropertyChang
} }
} catch (InterruptedException ex) { } catch (InterruptedException ex) {
LOGGER.log(Level.SEVERE, String.format("Unexpected interrupt while retrieving coordination service node data for '%s'", node), ex); LOGGER.log(Level.SEVERE, String.format("Unexpected interrupt while retrieving coordination service node data for '%s'", node), ex);
} catch (AutoIngestJobNodeData.InvalidDataException ex) { } catch (AutoIngestJobNodeData.InvalidDataException | AutoIngestJob.AutoIngestJobException ex) {
LOGGER.log(Level.SEVERE, String.format("Unable to use node data for '%s'", node), ex); LOGGER.log(Level.SEVERE, String.format("Unable to use node data for '%s'", node), ex);
} }
} }