mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-17 10:17:41 +00:00
Merge pull request #3138 from eugene7646/file_visitor_fix
File visitor exception handling fix
This commit is contained in:
commit
8a445ca52c
@ -89,30 +89,34 @@ public final class AutoIngestJob implements Comparable<AutoIngestJob>, Serializa
|
||||
*
|
||||
* @param manifest The manifest for an automated ingest job.
|
||||
*/
|
||||
AutoIngestJob(Manifest manifest) {
|
||||
/*
|
||||
* Version 0 fields.
|
||||
*/
|
||||
this.manifest = manifest;
|
||||
this.nodeName = "";
|
||||
this.caseDirectoryPath = "";
|
||||
this.priority = DEFAULT_PRIORITY;
|
||||
this.stage = Stage.PENDING;
|
||||
this.stageStartDate = manifest.getDateFileCreated();
|
||||
this.dataSourceProcessor = null;
|
||||
this.ingestJob = null;
|
||||
this.cancelled = false;
|
||||
this.completed = false;
|
||||
this.completedDate = new Date(0);
|
||||
this.errorsOccurred = false;
|
||||
AutoIngestJob(Manifest manifest) throws AutoIngestJobException {
|
||||
try {
|
||||
/*
|
||||
* Version 0 fields.
|
||||
*/
|
||||
this.manifest = manifest;
|
||||
this.nodeName = "";
|
||||
this.caseDirectoryPath = "";
|
||||
this.priority = DEFAULT_PRIORITY;
|
||||
this.stage = Stage.PENDING;
|
||||
this.stageStartDate = manifest.getDateFileCreated();
|
||||
this.dataSourceProcessor = null;
|
||||
this.ingestJob = null;
|
||||
this.cancelled = false;
|
||||
this.completed = false;
|
||||
this.completedDate = new Date(0);
|
||||
this.errorsOccurred = false;
|
||||
|
||||
/*
|
||||
* Version 1 fields.
|
||||
*/
|
||||
this.version = CURRENT_VERSION;
|
||||
this.processingStatus = ProcessingStatus.PENDING;
|
||||
this.numberOfCrashes = 0;
|
||||
this.stageDetails = this.getProcessingStageDetails();
|
||||
/*
|
||||
* Version 1 fields.
|
||||
*/
|
||||
this.version = CURRENT_VERSION;
|
||||
this.processingStatus = ProcessingStatus.PENDING;
|
||||
this.numberOfCrashes = 0;
|
||||
this.stageDetails = this.getProcessingStageDetails();
|
||||
} catch (Exception ex) {
|
||||
throw new AutoIngestJobException(String.format("Error creating automated ingest job"), ex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -122,30 +126,34 @@ public final class AutoIngestJob implements Comparable<AutoIngestJob>, Serializa
|
||||
* @param nodeData The coordination service node data for an automated
|
||||
* ingest job.
|
||||
*/
|
||||
AutoIngestJob(AutoIngestJobNodeData nodeData) {
|
||||
/*
|
||||
* Version 0 fields.
|
||||
*/
|
||||
this.manifest = new Manifest(nodeData.getManifestFilePath(), nodeData.getManifestFileDate(), nodeData.getCaseName(), nodeData.getDeviceId(), nodeData.getDataSourcePath(), Collections.emptyMap());
|
||||
this.nodeName = nodeData.getProcessingHostName();
|
||||
this.caseDirectoryPath = nodeData.getCaseDirectoryPath().toString();
|
||||
this.priority = nodeData.getPriority();
|
||||
this.stage = nodeData.getProcessingStage();
|
||||
this.stageStartDate = nodeData.getProcessingStageStartDate();
|
||||
this.dataSourceProcessor = null; // Transient data not in node data.
|
||||
this.ingestJob = null; // Transient data not in node data.
|
||||
this.cancelled = false; // Transient data not in node data.
|
||||
this.completed = false; // Transient data not in node data.
|
||||
this.completedDate = nodeData.getCompletedDate();
|
||||
this.errorsOccurred = nodeData.getErrorsOccurred();
|
||||
AutoIngestJob(AutoIngestJobNodeData nodeData) throws AutoIngestJobException {
|
||||
try {
|
||||
/*
|
||||
* Version 0 fields.
|
||||
*/
|
||||
this.manifest = new Manifest(nodeData.getManifestFilePath(), nodeData.getManifestFileDate(), nodeData.getCaseName(), nodeData.getDeviceId(), nodeData.getDataSourcePath(), Collections.emptyMap());
|
||||
this.nodeName = nodeData.getProcessingHostName();
|
||||
this.caseDirectoryPath = nodeData.getCaseDirectoryPath().toString();
|
||||
this.priority = nodeData.getPriority();
|
||||
this.stage = nodeData.getProcessingStage();
|
||||
this.stageStartDate = nodeData.getProcessingStageStartDate();
|
||||
this.dataSourceProcessor = null; // Transient data not in node data.
|
||||
this.ingestJob = null; // Transient data not in node data.
|
||||
this.cancelled = false; // Transient data not in node data.
|
||||
this.completed = false; // Transient data not in node data.
|
||||
this.completedDate = nodeData.getCompletedDate();
|
||||
this.errorsOccurred = nodeData.getErrorsOccurred();
|
||||
|
||||
/*
|
||||
* Version 1 fields.
|
||||
*/
|
||||
this.version = CURRENT_VERSION;
|
||||
this.processingStatus = nodeData.getProcessingStatus();
|
||||
this.numberOfCrashes = nodeData.getNumberOfCrashes();
|
||||
this.stageDetails = this.getProcessingStageDetails();
|
||||
/*
|
||||
* Version 1 fields.
|
||||
*/
|
||||
this.version = CURRENT_VERSION;
|
||||
this.processingStatus = nodeData.getProcessingStatus();
|
||||
this.numberOfCrashes = nodeData.getNumberOfCrashes();
|
||||
this.stageDetails = this.getProcessingStageDetails();
|
||||
} catch (Exception ex) {
|
||||
throw new AutoIngestJobException(String.format("Error creating automated ingest job"), ex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -622,5 +630,33 @@ public final class AutoIngestJob implements Comparable<AutoIngestJob>, Serializa
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Exception thrown when there is a problem creating auto ingest job.
|
||||
*/
|
||||
final static class AutoIngestJobException extends Exception {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* Constructs an exception to throw when there is a problem creating
|
||||
* auto ingest job.
|
||||
*
|
||||
* @param message The exception message.
|
||||
*/
|
||||
private AutoIngestJobException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs an exception to throw when there is a problem creating
|
||||
* auto ingest job.
|
||||
*
|
||||
* @param message The exception message.
|
||||
* @param cause The cause of the exception, if it was an exception.
|
||||
*/
|
||||
private AutoIngestJobException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -61,6 +61,7 @@ import java.util.stream.Collectors;
|
||||
import javax.annotation.concurrent.GuardedBy;
|
||||
import javax.annotation.concurrent.Immutable;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.openide.util.Exceptions;
|
||||
import org.openide.util.Lookup;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.Case.CaseType;
|
||||
@ -93,6 +94,7 @@ import org.sleuthkit.autopsy.experimental.configuration.SharedConfiguration;
|
||||
import org.sleuthkit.autopsy.experimental.configuration.SharedConfiguration.SharedConfigurationException;
|
||||
import org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSourceProcessor;
|
||||
import org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException;
|
||||
import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJob.AutoIngestJobException;
|
||||
import org.sleuthkit.autopsy.ingest.IngestJob;
|
||||
import org.sleuthkit.autopsy.ingest.IngestJob.CancellationReason;
|
||||
import org.sleuthkit.autopsy.ingest.IngestJobSettings;
|
||||
@ -759,7 +761,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang
|
||||
AutoIngestJob deletedJob = new AutoIngestJob(nodeData);
|
||||
deletedJob.setProcessingStatus(AutoIngestJob.ProcessingStatus.DELETED);
|
||||
this.updateCoordinationServiceNode(deletedJob);
|
||||
} catch (AutoIngestJobNodeData.InvalidDataException ex) {
|
||||
} catch (AutoIngestJobNodeData.InvalidDataException | AutoIngestJobException ex) {
|
||||
SYS_LOGGER.log(Level.WARNING, String.format("Invalid auto ingest job node data for %s", manifestPath), ex);
|
||||
return CaseDeletionResult.PARTIALLY_DELETED;
|
||||
} catch (InterruptedException | CoordinationServiceException ex) {
|
||||
@ -1015,92 +1017,103 @@ public final class AutoIngestManager extends Observable implements PropertyChang
|
||||
* @return TERMINATE if auto ingest is shutting down, CONTINUE if it has
|
||||
* not.
|
||||
*
|
||||
* @throws IOException if an I/O error occurs, but this implementation
|
||||
* does not throw.
|
||||
*/
|
||||
@Override
|
||||
public FileVisitResult visitFile(Path filePath, BasicFileAttributes attrs) throws IOException {
|
||||
public FileVisitResult visitFile(Path filePath, BasicFileAttributes attrs) {
|
||||
if (Thread.currentThread().isInterrupted()) {
|
||||
return TERMINATE;
|
||||
}
|
||||
|
||||
Manifest manifest = null;
|
||||
for (ManifestFileParser parser : Lookup.getDefault().lookupAll(ManifestFileParser.class)) {
|
||||
if (parser.fileIsManifest(filePath)) {
|
||||
try {
|
||||
manifest = parser.parse(filePath);
|
||||
break;
|
||||
} catch (ManifestFileParserException ex) {
|
||||
SYS_LOGGER.log(Level.SEVERE, String.format("Error attempting to parse %s with parser %s", filePath, parser.getClass().getCanonicalName()), ex);
|
||||
try {
|
||||
Manifest manifest = null;
|
||||
for (ManifestFileParser parser : Lookup.getDefault().lookupAll(ManifestFileParser.class)) {
|
||||
if (parser.fileIsManifest(filePath)) {
|
||||
try {
|
||||
manifest = parser.parse(filePath);
|
||||
break;
|
||||
} catch (ManifestFileParserException ex) {
|
||||
SYS_LOGGER.log(Level.SEVERE, String.format("Error attempting to parse %s with parser %s", filePath, parser.getClass().getCanonicalName()), ex);
|
||||
}
|
||||
}
|
||||
if (Thread.currentThread().isInterrupted()) {
|
||||
return TERMINATE;
|
||||
}
|
||||
}
|
||||
|
||||
if (Thread.currentThread().isInterrupted()) {
|
||||
return TERMINATE;
|
||||
}
|
||||
}
|
||||
|
||||
if (Thread.currentThread().isInterrupted()) {
|
||||
return TERMINATE;
|
||||
}
|
||||
|
||||
if (null != manifest) {
|
||||
/*
|
||||
if (null != manifest) {
|
||||
/*
|
||||
* Update the mapping of case names to manifest paths that is
|
||||
* used for case deletion.
|
||||
*/
|
||||
String caseName = manifest.getCaseName();
|
||||
Path manifestPath = manifest.getFilePath();
|
||||
if (casesToManifests.containsKey(caseName)) {
|
||||
Set<Path> manifestPaths = casesToManifests.get(caseName);
|
||||
manifestPaths.add(manifestPath);
|
||||
} else {
|
||||
Set<Path> manifestPaths = new HashSet<>();
|
||||
manifestPaths.add(manifestPath);
|
||||
casesToManifests.put(caseName, manifestPaths);
|
||||
}
|
||||
*/
|
||||
String caseName = manifest.getCaseName();
|
||||
Path manifestPath = manifest.getFilePath();
|
||||
if (casesToManifests.containsKey(caseName)) {
|
||||
Set<Path> manifestPaths = casesToManifests.get(caseName);
|
||||
manifestPaths.add(manifestPath);
|
||||
} else {
|
||||
Set<Path> manifestPaths = new HashSet<>();
|
||||
manifestPaths.add(manifestPath);
|
||||
casesToManifests.put(caseName, manifestPaths);
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* Add a job to the pending jobs queue, the completed jobs list,
|
||||
* or do crashed job recovery, as required.
|
||||
*/
|
||||
try {
|
||||
byte[] rawData = coordinationService.getNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestPath.toString());
|
||||
if (null != rawData && rawData.length > 0) {
|
||||
try {
|
||||
AutoIngestJobNodeData nodeData = new AutoIngestJobNodeData(rawData);
|
||||
AutoIngestJob.ProcessingStatus processingStatus = nodeData.getProcessingStatus();
|
||||
switch (processingStatus) {
|
||||
case PENDING:
|
||||
addPendingJob(manifest, nodeData);
|
||||
break;
|
||||
case PROCESSING:
|
||||
doRecoveryIfCrashed(manifest, nodeData);
|
||||
break;
|
||||
case COMPLETED:
|
||||
addCompletedJob(manifest, nodeData);
|
||||
break;
|
||||
case DELETED:
|
||||
/*
|
||||
*/
|
||||
try {
|
||||
byte[] rawData = coordinationService.getNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestPath.toString());
|
||||
if (null != rawData && rawData.length > 0) {
|
||||
try {
|
||||
AutoIngestJobNodeData nodeData = new AutoIngestJobNodeData(rawData);
|
||||
AutoIngestJob.ProcessingStatus processingStatus = nodeData.getProcessingStatus();
|
||||
switch (processingStatus) {
|
||||
case PENDING:
|
||||
addPendingJob(manifest, nodeData);
|
||||
break;
|
||||
case PROCESSING:
|
||||
doRecoveryIfCrashed(manifest, nodeData);
|
||||
break;
|
||||
case COMPLETED:
|
||||
addCompletedJob(manifest, nodeData);
|
||||
break;
|
||||
case DELETED:
|
||||
/*
|
||||
* Ignore jobs marked as "deleted."
|
||||
*/
|
||||
break;
|
||||
default:
|
||||
SYS_LOGGER.log(Level.SEVERE, "Unknown ManifestNodeData.ProcessingStatus");
|
||||
break;
|
||||
*/
|
||||
break;
|
||||
default:
|
||||
SYS_LOGGER.log(Level.SEVERE, "Unknown ManifestNodeData.ProcessingStatus");
|
||||
break;
|
||||
}
|
||||
} catch (AutoIngestJobNodeData.InvalidDataException | AutoIngestJobException ex) {
|
||||
SYS_LOGGER.log(Level.SEVERE, String.format("Invalid auto ingest job node data for %s", manifestPath), ex);
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
addNewPendingJob(manifest);
|
||||
} catch (AutoIngestJobException ex) {
|
||||
SYS_LOGGER.log(Level.SEVERE, String.format("Invalid manifest data for %s", manifestPath), ex);
|
||||
}
|
||||
} catch (AutoIngestJobNodeData.InvalidDataException ex) {
|
||||
SYS_LOGGER.log(Level.WARNING, String.format("Invalid auto ingest job node data for %s", manifestPath), ex);
|
||||
}
|
||||
} else {
|
||||
addNewPendingJob(manifest);
|
||||
} catch (CoordinationServiceException ex) {
|
||||
SYS_LOGGER.log(Level.SEVERE, String.format("Error transmitting node data for %s", manifestPath), ex);
|
||||
return CONTINUE;
|
||||
} catch (InterruptedException ex) {
|
||||
Thread.currentThread().interrupt();
|
||||
return TERMINATE;
|
||||
}
|
||||
} catch (CoordinationServiceException ex) {
|
||||
SYS_LOGGER.log(Level.SEVERE, String.format("Error transmitting node data for %s", manifestPath), ex);
|
||||
return CONTINUE;
|
||||
} catch (InterruptedException ex) {
|
||||
Thread.currentThread().interrupt();
|
||||
return TERMINATE;
|
||||
}
|
||||
|
||||
} catch (Exception ex) {
|
||||
// Catch all unhandled and unexpected exceptions. Otherwise one bad file
|
||||
// can stop the entire input folder scanning. Given that the exception is unexpected,
|
||||
// I'm hesitant to add logging which requires accessing or de-referencing data.
|
||||
SYS_LOGGER.log(Level.SEVERE, "Unexpected exception in file visitor", ex);
|
||||
return CONTINUE;
|
||||
}
|
||||
|
||||
if (!Thread.currentThread().isInterrupted()) {
|
||||
@ -1122,7 +1135,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang
|
||||
* blocked, i.e., if auto ingest is
|
||||
* shutting down.
|
||||
*/
|
||||
private void addPendingJob(Manifest manifest, AutoIngestJobNodeData nodeData) throws InterruptedException {
|
||||
private void addPendingJob(Manifest manifest, AutoIngestJobNodeData nodeData) throws InterruptedException, AutoIngestJobException {
|
||||
AutoIngestJob job;
|
||||
if (nodeData.getVersion() == AutoIngestJobNodeData.getCurrentVersion()) {
|
||||
job = new AutoIngestJob(nodeData);
|
||||
@ -1176,7 +1189,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang
|
||||
* blocked, i.e., if auto ingest is
|
||||
* shutting down.
|
||||
*/
|
||||
private void addNewPendingJob(Manifest manifest) throws InterruptedException {
|
||||
private void addNewPendingJob(Manifest manifest) throws InterruptedException, AutoIngestJobException {
|
||||
/*
|
||||
* Create the coordination service node data for the job. Note that
|
||||
* getting the lock will create the node for the job (with no data)
|
||||
@ -1218,7 +1231,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang
|
||||
* blocked, i.e., if auto ingest is
|
||||
* shutting down.
|
||||
*/
|
||||
private void doRecoveryIfCrashed(Manifest manifest, AutoIngestJobNodeData nodeData) throws InterruptedException {
|
||||
private void doRecoveryIfCrashed(Manifest manifest, AutoIngestJobNodeData nodeData) throws InterruptedException, AutoIngestJobException {
|
||||
/*
|
||||
* Try to get an exclusive lock on the coordination service node for
|
||||
* the job. If the lock cannot be obtained, another host in the auto
|
||||
@ -1314,7 +1327,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang
|
||||
* @throws CoordinationServiceException
|
||||
* @throws InterruptedException
|
||||
*/
|
||||
private void addCompletedJob(Manifest manifest, AutoIngestJobNodeData nodeData) throws CoordinationServiceException, InterruptedException {
|
||||
private void addCompletedJob(Manifest manifest, AutoIngestJobNodeData nodeData) throws CoordinationServiceException, InterruptedException, AutoIngestJobException {
|
||||
Path caseDirectoryPath = PathUtils.findCaseDirectory(rootOutputDirectory, manifest.getCaseName());
|
||||
if (null != caseDirectoryPath) {
|
||||
AutoIngestJob job;
|
||||
|
@ -265,7 +265,7 @@ public final class AutoIngestMonitor extends Observable implements PropertyChang
|
||||
}
|
||||
} catch (InterruptedException ex) {
|
||||
LOGGER.log(Level.SEVERE, String.format("Unexpected interrupt while retrieving coordination service node data for '%s'", node), ex);
|
||||
} catch (AutoIngestJobNodeData.InvalidDataException ex) {
|
||||
} catch (AutoIngestJobNodeData.InvalidDataException | AutoIngestJob.AutoIngestJobException ex) {
|
||||
LOGGER.log(Level.SEVERE, String.format("Unable to use node data for '%s'", node), ex);
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user