AIN/AID 1.0/AID 2.0 revisions

This commit is contained in:
Richard Cordovano 2017-09-14 19:51:37 -04:00
parent d7ab84988f
commit a43cebeca6
10 changed files with 1093 additions and 1158 deletions

View File

@ -304,7 +304,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
* text box.
*/
@Messages({
"# {0} - case db status", "# {1} - search svc Status", "# {2} - coord svc Status", "# {3} - msg broker status",
"# {0} - case db status", "# {1} - search svc Status", "# {2} - coord svc Status", "# {3} - msg broker status",
"AutoIngestControlPanel.tbServicesStatusMessage.Message=Case databases {0}, keyword search {1}, coordination {2}, messaging {3} ",
"AutoIngestControlPanel.tbServicesStatusMessage.Message.Up=up",
"AutoIngestControlPanel.tbServicesStatusMessage.Message.Down=down",
@ -679,7 +679,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
try {
manager.startUp();
autoIngestStarted = true;
} catch (AutoIngestManager.AutoIngestManagerStartupException ex) {
} catch (AutoIngestManager.AutoIngestManagerException ex) {
SYS_LOGGER.log(Level.SEVERE, "Dashboard error starting up auto ingest", ex);
tbStatusMessage.setText(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.AutoIngestStartupError"));
manager = null;
@ -982,7 +982,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
List<AutoIngestJob> completedJobs = new ArrayList<>();
manager.getJobs(pendingJobs, runningJobs, completedJobs);
// Sort the completed jobs list by completed date
Collections.sort(completedJobs, new AutoIngestJob.ReverseDateCompletedComparator());
Collections.sort(completedJobs, new AutoIngestJob.ReverseCompletedDateComparator());
EventQueue.invokeLater(new RefreshComponentsTask(pendingJobs, runningJobs, completedJobs));
}
}
@ -1075,7 +1075,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
* @return True or fale.
*/
private boolean isLocalJob(AutoIngestJob job) {
return job.getNodeName().equals(LOCAL_HOST_NAME); // RJCTODO: Is getProcessingHost a better name?
return job.getProcessingHostName().equals(LOCAL_HOST_NAME);
}
/**
@ -1147,15 +1147,15 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
tableModel.addRow(new Object[]{
job.getManifest().getCaseName(), // CASE
job.getManifest().getDataSourcePath().getFileName(), // DATA_SOURCE
job.getNodeName(), // HOST_NAME
job.getProcessingHostName(), // HOST_NAME
job.getManifest().getDateFileCreated(), // CREATED_TIME
job.getStageStartDate(), // STARTED_TIME
job.getProcessingStageStartDate(), // STARTED_TIME
job.getCompletedDate(), // COMPLETED_TIME
status.getDescription(), // ACTIVITY
job.hasErrors(), // STATUS
job.getErrorsOccurred(), // STATUS
((Date.from(Instant.now()).getTime()) - (status.getStartDate().getTime())), // ACTIVITY_TIME
job.getCaseDirectoryPath(), // CASE_DIRECTORY_PATH
job.getNodeName().equals(LOCAL_HOST_NAME), // IS_LOCAL_JOB
job.getProcessingHostName().equals(LOCAL_HOST_NAME), // IS_LOCAL_JOB
job.getManifest().getFilePath()}); // MANIFEST_FILE_PATH
}
} catch (Exception ex) {
@ -1701,11 +1701,17 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
*
* @param evt The button click event.
*/
@Messages({"AutoIngestControlPanel.casePrioritization.errorMessage=An error occurred when prioritizing the case. Some or all jobs may not have been prioritized."})
private void bnPrioritizeCaseActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bnPrioritizeCaseActionPerformed
if (pendingTableModel.getRowCount() > 0 && pendingTable.getSelectedRow() >= 0) {
this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR));
String caseName = (pendingTableModel.getValueAt(pendingTable.getSelectedRow(), JobsTableModelColumns.CASE.ordinal())).toString();
manager.prioritizeCase(caseName);
try {
manager.prioritizeCase(caseName);
} catch (AutoIngestManager.AutoIngestManagerException ex) {
SYS_LOGGER.log(Level.SEVERE, "Error prioritizing a case", ex);
MessageNotifyUtil.Message.error(Bundle.AutoIngestControlPanel_casePrioritization_errorMessage());
}
refreshTables();
pendingTable.clearSelection();
enablePendingTableButtons(false);
@ -1753,12 +1759,18 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
options[0]);
}
}//GEN-LAST:event_bnShowCaseLogActionPerformed
@Messages({"AutoIngestControlPanel.jobPrioritization.errorMessage=An error occurred when prioritizing the job."})
private void bnPrioritizeJobActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bnPrioritizeJobActionPerformed
if (pendingTableModel.getRowCount() > 0 && pendingTable.getSelectedRow() >= 0) {
this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR));
Path manifestFilePath = (Path) (pendingTableModel.getValueAt(pendingTable.getSelectedRow(), JobsTableModelColumns.MANIFEST_FILE_PATH.ordinal()));
manager.prioritizeJob(manifestFilePath);
try {
manager.prioritizeJob(manifestFilePath);
} catch (AutoIngestManager.AutoIngestManagerException ex) {
SYS_LOGGER.log(Level.SEVERE, "Error prioritizing a case", ex);
MessageNotifyUtil.Message.error(Bundle.AutoIngestControlPanel_jobPrioritization_errorMessage());
}
refreshTables();
pendingTable.clearSelection();
enablePendingTableButtons(false);

View File

@ -31,7 +31,6 @@ import java.util.logging.Level;
import javax.swing.DefaultListSelectionModel;
import java.awt.Color;
import java.beans.PropertyChangeEvent;
import java.util.Collections;
import javax.swing.JPanel;
import javax.swing.JTable;
import javax.swing.SwingWorker;
@ -438,11 +437,9 @@ public final class AutoIngestDashboard extends JPanel implements Observer {
List<AutoIngestJob> pendingJobs = jobsSnapshot.getPendingJobs();
List<AutoIngestJob> runningJobs = jobsSnapshot.getRunningJobs();
List<AutoIngestJob> completedJobs = jobsSnapshot.getCompletedJobs();
// DLG: DONE! Do the appropriate sorts for each table.
Collections.sort(pendingJobs, new AutoIngestJob.PriorityComparator());
runningJobs.sort(new AutoIngestJob.AlphabeticalComparator());
pendingJobs.sort(new AutoIngestJob.PriorityComparator());
runningJobs.sort(new AutoIngestJob.CaseNameAndProcessingHostComparator());
completedJobs.sort(new AutoIngestJob.ReverseCompletedDateComparator());
refreshTable(pendingJobs, pendingTable, pendingTableModel);
refreshTable(runningJobs, runningTable, runningTableModel);
refreshTable(completedJobs, completedTable, completedTableModel);
@ -470,17 +467,16 @@ public final class AutoIngestDashboard extends JPanel implements Observer {
AutoIngestJob.StageDetails status = job.getStageDetails();
tableModel.addRow(new Object[]{
job.getManifest().getCaseName(), // CASE
job.getManifest().getDataSourcePath().getFileName(), // DATA_SOURCE
job.getNodeName(), // HOST_NAME
job.getManifest().getDataSourcePath().getFileName(), job.getProcessingHostName(), // HOST_NAME
job.getManifest().getDateFileCreated(), // CREATED_TIME
job.getStageStartDate(), // STARTED_TIME // RJCTODO: add "processing" to method names?
job.getProcessingStageStartDate(), // STARTED_TIME
job.getCompletedDate(), // COMPLETED_TIME
status.getDescription(), // ACTIVITY
job.hasErrors(), // STATUS //RJCTODO: Change name to getErrorsOccurred for consistency?
job.getErrorsOccurred(), // STATUS
((Date.from(Instant.now()).getTime()) - (status.getStartDate().getTime())), // ACTIVITY_TIME
job.getCaseDirectoryPath(), // CASE_DIRECTORY_PATH
job.getManifest().getFilePath()//DLG: , // MANIFEST_FILE_PATH
//DLG: Put job object in the table RJCTODO
job.getManifest().getFilePath() // MANIFEST_FILE_PATH
//DLG: Put job object in the table
});
}
setSelectedEntry(table, tableModel, currentRow);
@ -591,7 +587,7 @@ public final class AutoIngestDashboard extends JPanel implements Observer {
STAGE_TIME.getColumnHeader(),
CASE_DIRECTORY_PATH.getColumnHeader(),
MANIFEST_FILE_PATH.getColumnHeader() //DLG: ,
//DLG: JOB.getColumnHeader()
//DLG: JOB.getColumnHeader()
};
};

View File

@ -42,7 +42,9 @@ public final class AutoIngestJob implements Comparable<AutoIngestJob>, Serializa
private static final long serialVersionUID = 1L;
private static final int CURRENT_VERSION = 1;
private static final int DEFAULT_PRIORITY = 0;
private static final String LOCAL_HOST_NAME = NetworkUtils.getLocalHostName();
private final int version;
private final Manifest manifest;
private final String nodeName;
@GuardedBy("this")
@ -54,6 +56,8 @@ public final class AutoIngestJob implements Comparable<AutoIngestJob>, Serializa
@GuardedBy("this")
private Date stageStartDate;
@GuardedBy("this")
private StageDetails stageDetails;
@GuardedBy("this")
transient private DataSourceProcessor dataSourceProcessor;
@GuardedBy("this")
transient private IngestJob ingestJob;
@ -65,40 +69,34 @@ public final class AutoIngestJob implements Comparable<AutoIngestJob>, Serializa
private Date completedDate;
@GuardedBy("this")
private boolean errorsOccurred;
private final int version;
@GuardedBy("this")
private ProcessingStatus processingStatus;
@GuardedBy("this")
private int numberOfCrashes;
/**
* Constructs an automated ingest job for a manifest. The manifest specifies
* a co-located data source and a case to which the data source is to be
* added.
* Constructs a new automated ingest job for a manifest. All job state not
* specified in the manifest is set to the default state for a new job.
*
* @param manifest The manifest
* @param caseDirectoryPath The path to the case directory for the job, may
* be null.
* @param priority The priority of the job. The higher the number,
* the higher the priority.
* @param nodeName If the job is in progress, the node doing the
* processing, otherwise the locla host.
* @param stage The processing stage for display purposes.
* @param completedDate The date when the job was completed. Use the
* epoch (January 1, 1970, 00:00:00 GMT) to
* indicate the the job is not completed, i.e., new
* Date(0L).
* @param manifest The manifest.
*/
AutoIngestJob(Manifest manifest, Path caseDirectoryPath, int priority, String nodeName, Stage stage, Date completedDate, boolean errorsOccurred) {
AutoIngestJob(Manifest manifest) {
this.version = CURRENT_VERSION;
this.manifest = manifest;
if (null != caseDirectoryPath) {
this.caseDirectoryPath = caseDirectoryPath.toString();
} else {
this.caseDirectoryPath = "";
}
this.priority = priority;
this.nodeName = nodeName;
this.stage = stage;
this.nodeName = AutoIngestJob.LOCAL_HOST_NAME;
this.caseDirectoryPath = "";
this.priority = DEFAULT_PRIORITY;
this.stage = Stage.PENDING;
this.stageStartDate = manifest.getDateFileCreated();
this.completedDate = completedDate;
this.errorsOccurred = errorsOccurred;
this.version = AutoIngestJob.CURRENT_VERSION;
this.stageDetails = this.getStageDetails();
this.dataSourceProcessor = null;
this.ingestJob = null;
this.cancelled = false;
this.completed = false;
this.completedDate = new Date(0);
this.errorsOccurred = false;
this.processingStatus = ProcessingStatus.PENDING;
this.numberOfCrashes = 0;
}
/**
@ -111,20 +109,27 @@ public final class AutoIngestJob implements Comparable<AutoIngestJob>, Serializa
*
* @param nodeData The node data.
*/
AutoIngestJob(AutoIngestJobData nodeData) {
AutoIngestJob(AutoIngestJobNodeData nodeData) {
this.version = nodeData.getVersion();
this.manifest = new Manifest(nodeData.getManifestFilePath(), nodeData.getManifestFileDate(), nodeData.getCaseName(), nodeData.getDeviceId(), nodeData.getDataSourcePath(), Collections.emptyMap());
this.nodeName = nodeData.getProcessingHostName();
this.caseDirectoryPath = nodeData.getCaseDirectoryPath().toString();
this.priority = nodeData.getPriority(); // RJCTODO: This should probably go into the manifest...
this.nodeName = nodeData.getProcessingHost();
// this.stage = nodeData.getProcessingStage(); // RJCTODO
this.priority = nodeData.getPriority();
this.stage = nodeData.getProcessingStage();
this.stageStartDate = nodeData.getProcessingStageStartDate();
this.stageDetails = this.getStageDetails();
this.dataSourceProcessor = null;
this.ingestJob = null;
this.cancelled = false;
this.completed = false;
this.completedDate = nodeData.getCompletedDate();
this.errorsOccurred = nodeData.getErrorsOccurred();
this.version = nodeData.getVersion();
this.processingStatus = nodeData.getProcessingStatus();
this.numberOfCrashes = nodeData.getNumberOfCrashes();
}
/**
* Gets the auto ingest jobmanifest.
* Gets the auto ingest job manifest.
*
* @return The manifest.
*/
@ -132,16 +137,6 @@ public final class AutoIngestJob implements Comparable<AutoIngestJob>, Serializa
return this.manifest;
}
/**
* Queries whether or not a case directory path has been set for this auto
* ingest job.
*
* @return True or false
*/
synchronized boolean hasCaseDirectoryPath() {
return (false == this.caseDirectoryPath.isEmpty());
}
/**
* Sets the path to the case directory of the case associated with this job.
*
@ -187,23 +182,19 @@ public final class AutoIngestJob implements Comparable<AutoIngestJob>, Serializa
}
synchronized void setStage(Stage newStage) {
setStage(newStage, Date.from(Instant.now()));
}
synchronized void setStage(Stage newStage, Date stageStartDate) {
if (Stage.CANCELLING == this.stage && Stage.COMPLETED != newStage) {
return;
}
this.stage = newStage;
this.stageStartDate = stageStartDate;
this.stageStartDate = Date.from(Instant.now());
}
synchronized Stage getStage() {
synchronized Stage getProcessingStage() {
return this.stage;
}
synchronized Date getStageStartDate() {
return this.stageStartDate;
synchronized Date getProcessingStageStartDate() {
return new Date(this.stageStartDate.getTime());
}
synchronized StageDetails getStageDetails() {
@ -240,9 +231,14 @@ public final class AutoIngestJob implements Comparable<AutoIngestJob>, Serializa
description = this.stage.getDisplayText();
startDate = this.stageStartDate;
}
return new StageDetails(description, startDate);
this.stageDetails = new StageDetails(description, startDate);
return this.stageDetails;
}
synchronized void setStageDetails(StageDetails stageDetails) {
this.stageDetails = stageDetails;
}
synchronized void setDataSourceProcessor(DataSourceProcessor dataSourceProcessor) {
this.dataSourceProcessor = dataSourceProcessor;
}
@ -287,7 +283,7 @@ public final class AutoIngestJob implements Comparable<AutoIngestJob>, Serializa
* @param completedDate The completion date.
*/
synchronized void setCompletedDate(Date completedDate) {
this.completedDate = completedDate;
this.completedDate = new Date(completedDate.getTime());
}
/**
@ -297,7 +293,7 @@ public final class AutoIngestJob implements Comparable<AutoIngestJob>, Serializa
* @return True or false.
*/
synchronized Date getCompletedDate() {
return completedDate;
return new Date(completedDate.getTime());
}
/**
@ -314,18 +310,34 @@ public final class AutoIngestJob implements Comparable<AutoIngestJob>, Serializa
*
* @return True or false.
*/
synchronized boolean hasErrors() {
synchronized boolean getErrorsOccurred() {
return this.errorsOccurred;
}
String getNodeName() {
synchronized String getProcessingHostName() {
return nodeName;
}
int getVersion() {
return this.version;
}
synchronized ProcessingStatus getProcessingStatus() {
return this.processingStatus;
}
synchronized void setProcessingStatus(ProcessingStatus processingStatus) {
this.processingStatus = processingStatus;
}
synchronized int getNumberOfCrashes() {
return this.numberOfCrashes;
}
synchronized void setNumberOfCrashes(int numberOfCrashes) {
this.numberOfCrashes = numberOfCrashes;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof AutoIngestJob)) {
@ -352,11 +364,11 @@ public final class AutoIngestJob implements Comparable<AutoIngestJob>, Serializa
* Custom comparator that allows us to sort List<AutoIngestJob> on reverse
* chronological date modified (descending)
*/
static class ReverseDateCompletedComparator implements Comparator<AutoIngestJob> {
static class ReverseCompletedDateComparator implements Comparator<AutoIngestJob> {
@Override
public int compare(AutoIngestJob o1, AutoIngestJob o2) {
return -o1.getStageStartDate().compareTo(o2.getStageStartDate());
return -o1.getCompletedDate().compareTo(o2.getCompletedDate());
}
}
@ -378,13 +390,13 @@ public final class AutoIngestJob implements Comparable<AutoIngestJob>, Serializa
* alphabetically except for jobs for the current host, which are placed at
* the top of the list.
*/
static class AlphabeticalComparator implements Comparator<AutoIngestJob> {
static class CaseNameAndProcessingHostComparator implements Comparator<AutoIngestJob> {
@Override
public int compare(AutoIngestJob o1, AutoIngestJob o2) {
if (o1.getNodeName().equalsIgnoreCase(LOCAL_HOST_NAME)) {
if (o1.getProcessingHostName().equalsIgnoreCase(LOCAL_HOST_NAME)) {
return -1; // o1 is for current case, float to top
} else if (o2.getNodeName().equalsIgnoreCase(LOCAL_HOST_NAME)) {
} else if (o2.getProcessingHostName().equalsIgnoreCase(LOCAL_HOST_NAME)) {
return 1; // o2 is for current case, float to top
} else {
return o1.getManifest().getCaseName().compareToIgnoreCase(o2.getManifest().getCaseName());
@ -393,6 +405,16 @@ public final class AutoIngestJob implements Comparable<AutoIngestJob>, Serializa
}
/**
* Processing status for the auto ingest job for the manifest.
*/
enum ProcessingStatus {
PENDING,
PROCESSING,
COMPLETED,
DELETED
}
enum Stage {
PENDING("Pending"),
@ -422,12 +444,13 @@ public final class AutoIngestJob implements Comparable<AutoIngestJob>, Serializa
}
@Immutable
static final class StageDetails {
static final class StageDetails implements Serializable {
private static final long serialVersionUID = 1L;
private final String description;
private final Date startDate;
private StageDetails(String description, Date startDate) {
StageDetails(String description, Date startDate) {
this.description = description;
this.startDate = startDate;
}
@ -437,7 +460,7 @@ public final class AutoIngestJob implements Comparable<AutoIngestJob>, Serializa
}
Date getStartDate() {
return this.startDate;
return new Date(this.startDate.getTime());
}
}

View File

@ -1,615 +0,0 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2017 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.experimental.autoingest;
import java.io.Serializable;
import java.nio.ByteBuffer;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Date;
import javax.lang.model.type.TypeKind;
/**
* A coordination service node data transfer object for an auto ingest job.
*/
final class AutoIngestJobData implements Serializable {
private static final long serialVersionUID = 1L;
private static final int NODE_DATA_VERSION = 1;
private static final int MAX_POSSIBLE_NODE_DATA_SIZE = 131493;
private static final int DEFAULT_PRIORITY = 0;
/*
* Version 0 fields.
*/
private final boolean coordSvcNodeDataWasSet;
private int processingStatus;
private int priority;
private int numberOfCrashes;
private long completedDate;
private boolean errorsOccurred;
/*
* Version 1 fields.
*/
private int version;
private String deviceId;
private String caseName;
private String caseDirectoryPath;
private long manifestFileDate;
private String manifestFilePath;
private String dataSourcePath;
private byte processingStage;
private long processingStageStartDate;
private String processingHost;
//DLG: Add caseDirectoryPath from AutoIngestJob
/*
* DLG: DONE! Rename class to AutoIngestJobData - Add String
* caseDirectoryPath. Needed to locate case auto ingest log and later, for
* case deletion
*
* DLG: Add String processingStage, long processingStageStartDate, String
* processingHost fields. These three fields are needed to populate running
* jobs table; use of auto ingest job data is not enough, because there
* would be no data until a status event was received by the auto ingest
* monitor.
*
* DLG: Update the AutoIngestManager code that creates ZK nodes for auto ingest
* jobs to write the new fields described above to new nodes
*
* DLG: Update the AutoIngestManager code that publishes auto ingest status
* events for the current job to update the the processing status fields
* described above in addition to publishing AutoIngestJobStatusEvents.
* Probably also need to write this data initially when a jo becomes the
* current job.
*/
/**
* Constructs a coordination service node data data transfer object for an
* auto ingest manifest from the raw bytes obtained from the coordination
* service.
*
* @param nodeData The raw bytes received from the coordination service.
*/
AutoIngestJobData(byte[] nodeData) throws AutoIngestJobDataException {
ByteBuffer buffer = ByteBuffer.wrap(nodeData);
this.coordSvcNodeDataWasSet = buffer.hasRemaining();
if (this.coordSvcNodeDataWasSet) {
this.processingStatus = buffer.getInt();
this.priority = buffer.getInt();
this.numberOfCrashes = buffer.getInt();
this.completedDate = buffer.getLong();
int errorFlag = buffer.getInt();
this.errorsOccurred = (1 == errorFlag);
} else {
this.processingStatus = ProcessingStatus.PENDING.ordinal();
this.priority = DEFAULT_PRIORITY;
this.numberOfCrashes = 0;
this.completedDate = 0L;
this.errorsOccurred = false;
}
if (buffer.hasRemaining()) {
/*
* There are more than 24 bytes in the buffer, so we assume the
* version is greater than '0'.
*/
this.version = buffer.getInt();
if (this.version > NODE_DATA_VERSION) {
throw new AutoIngestJobDataException(String.format("Node data version %d is not suppored.", this.version));
}
this.deviceId = getStringFromBuffer(buffer, TypeKind.BYTE);
this.caseName = getStringFromBuffer(buffer, TypeKind.BYTE);
this.caseDirectoryPath = getStringFromBuffer(buffer, TypeKind.SHORT);
this.manifestFileDate = buffer.getLong();
this.manifestFilePath = getStringFromBuffer(buffer, TypeKind.SHORT);
this.dataSourcePath = getStringFromBuffer(buffer, TypeKind.SHORT);
this.processingStage = buffer.get();
this.processingStageStartDate = buffer.getLong();
this.processingHost = getStringFromBuffer(buffer, TypeKind.SHORT);
} else {
this.version = 0;
this.deviceId = "";
this.caseName = "";
this.caseDirectoryPath = "";
this.manifestFileDate = 0L;
this.manifestFilePath = "";
this.dataSourcePath = "";
this.processingStage = (byte)AutoIngestJob.Stage.PENDING.ordinal();
this.processingStageStartDate = 0L;
this.processingHost = "";
}
}
/**
* Constructs a coordination service node data data transfer object for an
* auto ingest manifest from values provided by the auto ingest system.
*
* @param manifest The manifest
* @param status The processing status of the manifest.
* @param priority The priority of the manifest.
* @param numberOfCrashes The number of times auto ingest jobs for the
* manifest have crashed during processing.
* @param completedDate The date the auto ingest job for the manifest was
* completed.
* @param errorsOccurred Boolean to determine if errors have occurred.
*/
AutoIngestJobData(Manifest manifest, ProcessingStatus status, int priority, int numberOfCrashes, Date completedDate, boolean errorOccurred) {
this.coordSvcNodeDataWasSet = false;
this.processingStatus = status.ordinal();
this.priority = priority;
this.numberOfCrashes = numberOfCrashes;
this.completedDate = completedDate.getTime();
this.errorsOccurred = errorOccurred;
this.version = NODE_DATA_VERSION;
this.deviceId = manifest.getDeviceId();
this.caseName = manifest.getCaseName();
this.caseDirectoryPath = ""; //DLG: RJCTODO: completed job has a case directory
this.manifestFileDate = manifest.getDateFileCreated().getTime();
this.manifestFilePath = manifest.getFilePath().toString();
this.dataSourcePath = manifest.getDataSourcePath().toString();
this.processingStage = (byte)AutoIngestJob.Stage.PENDING.ordinal();
this.processingStageStartDate = 0L;
this.processingHost = "";
}
/**
* Indicates whether or not the coordination service node data was set,
* i.e., this object was constructed from raw bytes from the ccordination
* service node for the manifest.
*
* @return True or false.
*/
boolean coordSvcNodeDataWasSet() {
return this.coordSvcNodeDataWasSet;
}
/**
* Gets the processing status of the manifest
*
* @return The processing status of the manifest.
*/
ProcessingStatus getProcessingStatus() {
return ProcessingStatus.values()[this.processingStatus];
}
/**
* Sets the processing status of the manifest
*
* @param processingSatus The processing status of the manifest.
*/
void setProcessingStatus(ProcessingStatus processingStatus) {
this.processingStatus = processingStatus.ordinal();
}
/**
* Gets the priority of the manifest.
*
* @return The priority of the manifest.
*/
int getPriority() {
return this.priority;
}
/**
* Sets the priority of the manifest. A higher number indicates a higheer
* priority.
*
* @param priority The priority of the manifest.
*/
void setPriority(int priority) {
this.priority = priority;
}
/**
* Gets the number of times auto ingest jobs for the manifest have crashed
* during processing.
*
* @return The number of times auto ingest jobs for the manifest have
* crashed during processing.
*/
int getNumberOfCrashes() {
return this.numberOfCrashes;
}
/**
* Sets the number of times auto ingest jobs for the manifest have crashed
* during processing.
*
* @param numberOfCrashes The number of times auto ingest jobs for the
* manifest have crashed during processing.
*/
void setNumberOfCrashes(int numberOfCrashes) {
this.numberOfCrashes = numberOfCrashes;
}
/**
* Gets the date the auto ingest job for the manifest was completed.
*
* @return The date the auto ingest job for the manifest was completed. The
* epoch (January 1, 1970, 00:00:00 GMT) indicates the date is not
* set, i.e., Date.getTime() returns 0L.
*/
Date getCompletedDate() {
return new Date(this.completedDate);
}
/**
* Sets the date the auto ingest job for the manifest was completed.
*
* @param completedDate The date the auto ingest job for the manifest was
* completed. Use the epoch (January 1, 1970, 00:00:00
* GMT) to indicate the date is not set, i.e., new
* Date(0L).
*/
void setCompletedDate(Date completedDate) {
this.completedDate = completedDate.getTime();
}
/**
* Queries whether or not any errors occurred during the processing of the
* auto ingest job for the manifest.
*
* @return True or false.
*/
boolean getErrorsOccurred() {
return this.errorsOccurred;
}
/**
* Sets whether or not any errors occurred during the processing of the auto
* ingest job for the manifest.
*
* @param errorsOccurred True or false.
*/
void setErrorsOccurred(boolean errorsOccurred) {
this.errorsOccurred = errorsOccurred;
}
/**
* Get the node data version.
*
* @return The node data version.
*/
int getVersion() {
return this.version;
}
/**
* Set the node data version.
*
* @param version The node data version.
*/
void setVersion(int version) {
this.version = version;
}
/**
* Get the device ID.
*
* @return The device ID.
*/
String getDeviceId() {
return this.deviceId;
}
/**
* Set the device ID.
*
* @param deviceId The device ID.
*/
void setDeviceId(String deviceId) {
this.deviceId = deviceId;
}
/**
* Get the case name.
*
* @return The case name.
*/
String getCaseName() {
return this.caseName;
}
/**
* Set the case name.
*
* @param caseName The case name.
*/
void setCaseName(String caseName) {
this.caseName = caseName;
}
/**
* Queries whether or not a case directory path has been set for this auto
* ingest job.
*
* @return True or false
*/
synchronized boolean hasCaseDirectoryPath() {
return (false == this.caseDirectoryPath.isEmpty());
}
/**
* Sets the path to the case directory of the case associated with this job.
*
* @param caseDirectoryPath The path to the case directory.
*/
synchronized void setCaseDirectoryPath(Path caseDirectoryPath) {
if(caseDirectoryPath == null) {
this.caseDirectoryPath = "";
} else {
this.caseDirectoryPath = caseDirectoryPath.toString();
}
}
/**
* Gets the path to the case directory of the case associated with this job,
* may be null.
*
* @return The case directory path or null if the case directory has not
* been created yet.
*/
synchronized Path getCaseDirectoryPath() {
if (!caseDirectoryPath.isEmpty()) {
return Paths.get(caseDirectoryPath);
} else {
return null;
}
}
/**
* Gets the date the manifest was created.
*
* @return The date the manifest was created. The epoch (January 1, 1970,
* 00:00:00 GMT) indicates the date is not set, i.e., Date.getTime()
* returns 0L.
*/
Date getManifestFileDate() {
return new Date(this.manifestFileDate);
}
/**
* Sets the date the manifest was created.
*
* @param manifestFileDate The date the manifest was created. Use the epoch
* (January 1, 1970, 00:00:00 GMT) to indicate the
* date is not set, i.e., new Date(0L).
*/
void setManifestFileDate(Date manifestFileDate) {
this.manifestFileDate = manifestFileDate.getTime();
}
/**
* Get the manifest file path.
*
* @return The manifest file path.
*/
Path getManifestFilePath() {
return Paths.get(this.manifestFilePath);
}
/**
* Set the manifest file path.
*
* @param manifestFilePath The manifest file path.
*/
void setManifestFilePath(Path manifestFilePath) {
if (manifestFilePath != null) {
this.manifestFilePath = manifestFilePath.toString();
} else {
this.manifestFilePath = "";
}
}
/**
* Get the data source path.
*
* @return The data source path.
*/
Path getDataSourcePath() {
return Paths.get(dataSourcePath);
}
/**
* Get the file name portion of the data source path.
*
* @return The data source file name.
*/
public String getDataSourceFileName() {
return Paths.get(dataSourcePath).getFileName().toString();
}
/**
* Set the data source path.
*
* @param dataSourcePath The data source path.
*/
void setDataSourcePath(Path dataSourcePath) {
if (dataSourcePath != null) {
this.dataSourcePath = dataSourcePath.toString();
} else {
this.dataSourcePath = "";
}
}
/**
* Get the processing stage.
*
* @return The processing stage.
*/
AutoIngestJob.Stage getProcessingStage() {
return AutoIngestJob.Stage.values()[this.processingStage];
}
/**
* Set the processing stage.
*
* @param processingStage The processing stage.
*/
void setProcessingStage(AutoIngestJob.Stage processingStage) {
this.processingStage = (byte)processingStage.ordinal();
}
/**
* Get the processing stage start date.
*
* @return The processing stage start date.
*/
Date getProcessingStageStartDate() {
return new Date(this.processingStageStartDate);
}
/**
* Set the processing stage start date.
*
* @param processingStageStartDate The processing stage start date.
*/
void setProcessingStageStartDate(Date processingStageStartDate) {
this.processingStageStartDate = processingStageStartDate.getTime();
}
/**
* Get the processing host.
*
* @return The processing host.
*/
String getProcessingHost() {
return this.processingHost;
}
/**
* Set the processing host.
*
* @param processingHost The processing host.
*/
void setProcessingHost(String processingHost) {
this.processingHost = processingHost;
}
/**
* This method will upgrade the node data to the latest version.
*
* @param manifest The manifest.
* @param caseDirectoryPath The case directory path.
* @param processingHost The host name.
* @param processingStage The processing stage.
*/
public void upgradeNode(Manifest manifest, Path caseDirectoryPath, String processingHost, AutoIngestJob.Stage processingStage) {
if(this.version < NODE_DATA_VERSION) {
this.setVersion(NODE_DATA_VERSION);
this.setDeviceId(manifest.getDeviceId());
this.setCaseName(manifest.getCaseName());
this.setCaseDirectoryPath(caseDirectoryPath);
this.setManifestFileDate(manifest.getDateFileCreated());
this.setManifestFilePath(manifest.getFilePath());
this.setDataSourcePath(manifest.getDataSourcePath());
this.setProcessingStage(processingStage);
this.setProcessingStageStartDate(manifest.getDateFileCreated());
this.setProcessingHost(processingHost);
}
}
/**
* Gets the node data as raw bytes that can be sent to the coordination
* service.
*
* @return The manifest node data as a byte array.
*/
byte[] toArray() {
ByteBuffer buffer = ByteBuffer.allocate(MAX_POSSIBLE_NODE_DATA_SIZE);
// Write data (compatible with version 0)
buffer.putInt(this.processingStatus);
buffer.putInt(this.priority);
buffer.putInt(this.numberOfCrashes);
buffer.putLong(this.completedDate);
buffer.putInt(this.errorsOccurred ? 1 : 0);
if (this.version > 0) {
// Write version
buffer.putInt(this.version);
// Write data
putStringIntoBuffer(deviceId, buffer, TypeKind.BYTE);
putStringIntoBuffer(caseName, buffer, TypeKind.BYTE);
putStringIntoBuffer(caseDirectoryPath, buffer, TypeKind.SHORT);
buffer.putLong(this.manifestFileDate);
putStringIntoBuffer(manifestFilePath, buffer, TypeKind.SHORT);
putStringIntoBuffer(dataSourcePath, buffer, TypeKind.SHORT);
buffer.put(this.processingStage);
buffer.putLong(this.processingStageStartDate);
putStringIntoBuffer(processingHost, buffer, TypeKind.SHORT);
}
// Prepare the array
byte[] array = new byte[buffer.position()];
buffer.rewind();
buffer.get(array, 0, array.length);
return array;
}
private String getStringFromBuffer(ByteBuffer buffer, TypeKind lengthType) {
int length = 0;
String output = "";
switch (lengthType) {
case BYTE:
length = buffer.get();
break;
case SHORT:
length = buffer.getShort();
break;
}
if (length > 0) {
byte[] array = new byte[length];
buffer.get(array, 0, length);
output = new String(array);
}
return output;
}
private void putStringIntoBuffer(String stringValue, ByteBuffer buffer, TypeKind lengthType) {
switch (lengthType) {
case BYTE:
buffer.put((byte) stringValue.length());
break;
case SHORT:
buffer.putShort((short) stringValue.length());
break;
}
buffer.put(stringValue.getBytes());
}
/**
* Processing status for the auto ingest job for the manifest.
*/
enum ProcessingStatus {
PENDING,
PROCESSING,
COMPLETED,
DELETED
}
}

View File

@ -1,46 +0,0 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2017 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.experimental.autoingest;
/**
* Exception thrown when a manifest node contains incompatible data.
*/
public class AutoIngestJobDataException extends Exception {
/**
* Constructs an exception thrown when a manifest node contains incompatible
* data.
*
* @param message An error message.
*/
public AutoIngestJobDataException(String message) {
super(message);
}
/**
* Constructs an exception thrown when a manifest node contains incompatible
* data.
*
* @param message An error message.
* @param cause An exception that caused this exception to be thrown.
*/
public AutoIngestJobDataException(String message, Throwable cause) {
super(message, cause);
}
}

View File

@ -0,0 +1,570 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2017 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.experimental.autoingest;
import java.nio.BufferUnderflowException;
import java.nio.ByteBuffer;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Date;
import javax.lang.model.type.TypeKind;
/**
* An object that converts auto ingest job data for an auto ingest job
* coordination service node to and from byte arrays.
*/
final class AutoIngestJobNodeData {
private static final int CURRENT_VERSION = 1;
private static final int MAX_POSSIBLE_NODE_DATA_SIZE = 131629;
private static final int DEFAULT_PRIORITY = 0;
/*
* Version 0 fields.
*/
private int processingStatus;
private int priority;
private int numberOfCrashes;
private long completedDate;
private boolean errorsOccurred;
/*
* Version 1 fields.
*/
private int version;
private String manifestFilePath;
private long manifestFileDate;
private String caseName;
private String deviceId;
private String dataSourcePath;
private String caseDirectoryPath;
private String processingHostName;
private byte processingStage;
private long processingStageStartDate;
private String processingStageDetailsDescription;
private long processingStageDetailsStartDate;
/**
* Uses an auto ingest job to construct an object that converts auto ingest
* job data for an auto ingest job coordination service node to and from
* byte arrays.
*
* @param job The job.
*/
AutoIngestJobNodeData(AutoIngestJob job) {
setProcessingStatus(job.getProcessingStatus());
setPriority(job.getPriority());
setNumberOfCrashes(numberOfCrashes); // RJCTODO
setCompletedDate(job.getCompletedDate());
setErrorsOccurred(job.getErrorsOccurred());
this.version = CURRENT_VERSION;
Manifest manifest = job.getManifest();
setManifestFilePath(manifest.getFilePath());
setManifestFileDate(manifest.getDateFileCreated());
setCaseName(manifest.getCaseName());
setDeviceId(manifest.getDeviceId());
setDataSourcePath(manifest.getDataSourcePath());
setCaseDirectoryPath(job.getCaseDirectoryPath());
setProcessingHostName(job.getProcessingHostName());
setProcessingStage(job.getProcessingStage());
setProcessingStageStartDate(job.getProcessingStageStartDate());
setProcessingStageDetails(job.getStageDetails());
}
/**
* Uses a coordination service node data to construct an object that
* converts auto ingest job data for an auto ingest job coordination service
* node to and from byte arrays.
*
* @param nodeData The raw bytes received from the coordination service.
*/
AutoIngestJobNodeData(byte[] nodeData) throws InvalidDataException {
if (null == nodeData || nodeData.length == 0) {
throw new InvalidDataException(null == nodeData ? "Null nodeData byte array" : "Zero-length nodeData byte array");
}
/*
* Set default values for all fields.
*/
this.processingStatus = AutoIngestJob.ProcessingStatus.PENDING.ordinal();
this.priority = DEFAULT_PRIORITY;
this.numberOfCrashes = 0;
this.completedDate = 0L;
this.errorsOccurred = false;
this.version = CURRENT_VERSION;
this.manifestFilePath = "";
this.manifestFileDate = 0L;
this.caseName = "";
this.deviceId = "";
this.dataSourcePath = "";
this.caseDirectoryPath = "";
this.processingHostName = "";
this.processingStage = (byte) AutoIngestJob.Stage.PENDING.ordinal();
this.processingStageStartDate = 0L;
this.processingStageDetailsDescription = "";
this.processingStageDetailsStartDate = 0L;
/*
* Get fields from node data.
*/
ByteBuffer buffer = ByteBuffer.wrap(nodeData);
try {
if (buffer.hasRemaining()) {
/*
* Get version 0 fields.
*/
this.processingStatus = buffer.getInt();
this.priority = buffer.getInt();
this.numberOfCrashes = buffer.getInt();
this.completedDate = buffer.getLong();
int errorFlag = buffer.getInt();
this.errorsOccurred = (1 == errorFlag);
}
if (buffer.hasRemaining()) {
/*
* Get version 1 fields.
*/
this.version = buffer.getInt();
this.deviceId = getStringFromBuffer(buffer, TypeKind.BYTE);
this.caseName = getStringFromBuffer(buffer, TypeKind.BYTE);
this.caseDirectoryPath = getStringFromBuffer(buffer, TypeKind.SHORT);
this.manifestFileDate = buffer.getLong();
this.manifestFilePath = getStringFromBuffer(buffer, TypeKind.SHORT);
this.dataSourcePath = getStringFromBuffer(buffer, TypeKind.SHORT);
this.processingStage = buffer.get();
this.processingStageStartDate = buffer.getLong();
this.processingStageDetailsDescription = getStringFromBuffer(buffer, TypeKind.BYTE);
this.processingStageDetailsStartDate = buffer.getLong();;
this.processingHostName = getStringFromBuffer(buffer, TypeKind.SHORT);
}
} catch (BufferUnderflowException ex) {
throw new InvalidDataException("Node data is incomplete", ex);
}
}
/**
* Gets the processing status of the job.
*
* @return The processing status.
*/
AutoIngestJob.ProcessingStatus getProcessingStatus() {
return AutoIngestJob.ProcessingStatus.values()[this.processingStatus];
}
/**
* Sets the processing status of the job.
*
* @param processingSatus The processing status.
*/
void setProcessingStatus(AutoIngestJob.ProcessingStatus processingStatus) {
this.processingStatus = processingStatus.ordinal();
}
/**
* Gets the priority of the job.
*
* @return The priority.
*/
int getPriority() {
return this.priority;
}
/**
* Sets the priority of the job. A higher number indicates a higheer
* priority.
*
* @param priority The priority.
*/
void setPriority(int priority) {
this.priority = priority;
}
/**
* Gets the number of times the job has crashed during processing.
*
* @return The number of crashes.
*/
int getNumberOfCrashes() {
return this.numberOfCrashes;
}
/**
* Sets the number of times the job has crashed during processing.
*
* @param numberOfCrashes The number of crashes.
*/
void setNumberOfCrashes(int numberOfCrashes) {
this.numberOfCrashes = numberOfCrashes;
}
/**
* Gets the date the job was completed. A completion date equal to the epoch
* (January 1, 1970, 00:00:00 GMT), i.e., Date.getTime() returns 0L,
* indicates the job has not been completed.
*
* @return The job completion date.
*/
Date getCompletedDate() {
return new Date(this.completedDate);
}
/**
* Sets the date the job was completed. A completion date equal to the epoch
* (January 1, 1970, 00:00:00 GMT), i.e., Date.getTime() returns 0L,
* indicates the job has not been completed.
*
* @param completedDate The job completion date.
*/
void setCompletedDate(Date completedDate) {
this.completedDate = completedDate.getTime();
}
/**
* Gets whether or not any errors occurred during the processing of the job.
*
* @return True or false.
*/
boolean getErrorsOccurred() {
return this.errorsOccurred;
}
/**
* Sets whether or not any errors occurred during the processing of job.
*
* @param errorsOccurred True or false.
*/
void setErrorsOccurred(boolean errorsOccurred) {
this.errorsOccurred = errorsOccurred;
}
/**
* Gets the node data version number.
*
* @return The version number.
*/
int getVersion() {
return this.version;
}
/**
* Gets the device ID of the device associated with the data source for the
* job.
*
* @return The device ID.
*/
String getDeviceId() {
return this.deviceId;
}
/**
* Sets the device ID of the device associated with the data source for the
* job.
*
* @param deviceId The device ID.
*/
void setDeviceId(String deviceId) {
this.deviceId = deviceId;
}
/**
* Gets the case name.
*
* @return The case name.
*/
String getCaseName() {
return this.caseName;
}
/**
* Sets the case name.
*
* @param caseName The case name.
*/
void setCaseName(String caseName) {
this.caseName = caseName;
}
/**
* Sets the path to the case directory of the case associated with the job.
*
* @param caseDirectoryPath The path to the case directory.
*/
synchronized void setCaseDirectoryPath(Path caseDirectoryPath) {
if (caseDirectoryPath == null) {
this.caseDirectoryPath = "";
} else {
this.caseDirectoryPath = caseDirectoryPath.toString();
}
}
/**
* Gets the path to the case directory of the case associated with the job.
*
* @return The case directory path or null if the case directory has not
* been created yet.
*/
synchronized Path getCaseDirectoryPath() {
if (!caseDirectoryPath.isEmpty()) {
return Paths.get(caseDirectoryPath);
} else {
return null;
}
}
/**
* Gets the date the manifest was created.
*
* @return The date the manifest was created.
*/
Date getManifestFileDate() {
return new Date(this.manifestFileDate);
}
/**
* Sets the date the manifest was created.
*
* @param manifestFileDate The date the manifest was created.
*/
void setManifestFileDate(Date manifestFileDate) {
this.manifestFileDate = manifestFileDate.getTime();
}
/**
* Gets the manifest file path.
*
* @return The manifest file path.
*/
Path getManifestFilePath() {
return Paths.get(this.manifestFilePath);
}
/**
* Sets the manifest file path.
*
* @param manifestFilePath The manifest file path.
*/
void setManifestFilePath(Path manifestFilePath) {
if (manifestFilePath != null) {
this.manifestFilePath = manifestFilePath.toString();
} else {
this.manifestFilePath = "";
}
}
/**
* Gets the path of the data source for the job.
*
* @return The data source path.
*/
Path getDataSourcePath() {
return Paths.get(dataSourcePath);
}
/**
* Get the file name portion of the path of the data source for the job.
*
* @return The data source file name.
*/
public String getDataSourceFileName() {
return Paths.get(dataSourcePath).getFileName().toString();
}
/**
* Sets the path of the data source for the job.
*
* @param dataSourcePath The data source path.
*/
void setDataSourcePath(Path dataSourcePath) {
if (dataSourcePath != null) {
this.dataSourcePath = dataSourcePath.toString();
} else {
this.dataSourcePath = "";
}
}
/**
* Get the processing stage of the job.
*
* @return The processing stage.
*/
AutoIngestJob.Stage getProcessingStage() {
return AutoIngestJob.Stage.values()[this.processingStage];
}
/**
* Sets the processing stage job.
*
* @param processingStage The processing stage.
*/
void setProcessingStage(AutoIngestJob.Stage processingStage) {
this.processingStage = (byte) processingStage.ordinal();
}
/**
* Gets the processing stage start date.
*
* @return The processing stage start date.
*/
Date getProcessingStageStartDate() {
return new Date(this.processingStageStartDate);
}
/**
* Sets the processing stage start date.
*
* @param processingStageStartDate The processing stage start date.
*/
void setProcessingStageStartDate(Date processingStageStartDate) {
this.processingStageStartDate = processingStageStartDate.getTime();
}
/**
* Get the processing stage details.
*
* @return A processing stage details object.
*/
AutoIngestJob.StageDetails getProcessingStageDetails() {
return new AutoIngestJob.StageDetails(this.processingStageDetailsDescription, new Date(this.processingStageDetailsStartDate));
}
/**
* Sets the details of the current processing stage.
*
* @param stageDetails A stage details object.
*/
void setProcessingStageDetails(AutoIngestJob.StageDetails stageDetails) {
this.processingStageDetailsDescription = stageDetails.getDescription();
this.processingStageDetailsStartDate = stageDetails.getStartDate().getTime();
}
/**
* Gets the processing host name, may be the empty string.
*
* @return The processing host. The empty string if the job is not currently
* being processed.
*/
String getProcessingHostName() {
return this.processingHostName;
}
/**
* Sets the processing host name. May be the empty string.
*
* @param processingHost The processing host name. The empty string if the
* job is not currently being processed.
*/
void setProcessingHostName(String processingHost) {
this.processingHostName = processingHost;
}
/**
* Gets the node data as a byte array that can be sent to the coordination
* service.
*
* @return The node data as a byte array.
*/
byte[] toArray() {
ByteBuffer buffer = ByteBuffer.allocate(MAX_POSSIBLE_NODE_DATA_SIZE);
// Write data (compatible with version 0)
buffer.putInt(this.processingStatus);
buffer.putInt(this.priority);
buffer.putInt(this.numberOfCrashes);
buffer.putLong(this.completedDate);
buffer.putInt(this.errorsOccurred ? 1 : 0);
if (this.version > 0) {
// Write version
buffer.putInt(this.version);
// Write data
putStringIntoBuffer(deviceId, buffer, TypeKind.BYTE);
putStringIntoBuffer(caseName, buffer, TypeKind.BYTE);
putStringIntoBuffer(caseDirectoryPath, buffer, TypeKind.SHORT);
buffer.putLong(this.manifestFileDate);
putStringIntoBuffer(manifestFilePath, buffer, TypeKind.SHORT);
putStringIntoBuffer(dataSourcePath, buffer, TypeKind.SHORT);
buffer.put(this.processingStage);
buffer.putLong(this.processingStageStartDate);
putStringIntoBuffer(this.processingStageDetailsDescription, buffer, TypeKind.BYTE);
buffer.putLong(this.processingStageDetailsStartDate);
putStringIntoBuffer(processingHostName, buffer, TypeKind.SHORT);
}
// Prepare the array
byte[] array = new byte[buffer.position()];
buffer.rewind();
buffer.get(array, 0, array.length);
return array;
}
// DGL: Document what is going on here and how the max buffer sie constant is calculated.
private String getStringFromBuffer(ByteBuffer buffer, TypeKind lengthType) {
int length = 0;
String output = "";
switch (lengthType) {
case BYTE:
length = buffer.get();
break;
case SHORT:
length = buffer.getShort();
break;
}
if (length > 0) {
byte[] array = new byte[length];
buffer.get(array, 0, length);
output = new String(array);
}
return output;
}
// DGL: Document what is going on here and how the max buffer sie constant is calculated.
private void putStringIntoBuffer(String stringValue, ByteBuffer buffer, TypeKind lengthType) {
switch (lengthType) {
case BYTE:
buffer.put((byte) stringValue.length());
break;
case SHORT:
buffer.putShort((short) stringValue.length());
break;
}
buffer.put(stringValue.getBytes());
}
final static class InvalidDataException extends Exception {
private static final long serialVersionUID = 1L;
private InvalidDataException(String message) {
super(message);
}
private InvalidDataException(String message, Throwable cause) {
super(message, cause);
}
}
}

View File

@ -61,7 +61,6 @@ import java.util.stream.Collectors;
import javax.annotation.concurrent.GuardedBy;
import javax.annotation.concurrent.Immutable;
import javax.annotation.concurrent.ThreadSafe;
import org.openide.util.Exceptions;
import org.openide.util.Lookup;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.Case.CaseType;
@ -85,11 +84,10 @@ import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestAlertFile.AutoIng
import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJobLogger.AutoIngestJobLoggerException;
import org.sleuthkit.autopsy.experimental.autoingest.FileExporter.FileExportException;
import org.sleuthkit.autopsy.experimental.autoingest.ManifestFileParser.ManifestFileParserException;
import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJobData.ProcessingStatus;
import static org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJobData.ProcessingStatus.COMPLETED;
import static org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJobData.ProcessingStatus.DELETED;
import static org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJobData.ProcessingStatus.PENDING;
import static org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJobData.ProcessingStatus.PROCESSING;
import static org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJob.ProcessingStatus.COMPLETED;
import static org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJob.ProcessingStatus.DELETED;
import static org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJob.ProcessingStatus.PENDING;
import static org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJob.ProcessingStatus.PROCESSING;
import org.sleuthkit.autopsy.experimental.configuration.AutoIngestUserPreferences;
import org.sleuthkit.autopsy.experimental.configuration.SharedConfiguration;
import org.sleuthkit.autopsy.experimental.configuration.SharedConfiguration.SharedConfigurationException;
@ -125,7 +123,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang
private static int DEFAULT_JOB_PRIORITY = 0;
private static final String AUTO_INGEST_THREAD_NAME = "AIM-job-processing-%d";
private static final String LOCAL_HOST_NAME = NetworkUtils.getLocalHostName();
private static final String EVENT_CHANNEL_NAME = "Auto-Ingest-Events";
private static final String EVENT_CHANNEL_NAME = "Auto-Ingest-Manager-Events";
private static final Set<String> EVENT_LIST = new HashSet<>(Arrays.asList(new String[]{
Event.JOB_STATUS_UPDATED.toString(),
Event.JOB_COMPLETED.toString(),
@ -206,22 +204,22 @@ public final class AutoIngestManager extends Observable implements PropertyChang
/**
* Starts up auto ingest.
*
* @throws AutoIngestManagerStartupException if there is a problem starting
* auto ingest.
* @throws AutoIngestManagerException if there is a problem starting auto
* ingest.
*/
void startUp() throws AutoIngestManagerStartupException {
void startUp() throws AutoIngestManagerException {
SYS_LOGGER.log(Level.INFO, "Auto ingest starting");
try {
coordinationService = CoordinationService.getInstance();
} catch (CoordinationServiceException ex) {
throw new AutoIngestManagerStartupException("Failed to get coordination service", ex);
throw new AutoIngestManagerException("Failed to get coordination service", ex);
}
try {
eventPublisher.openRemoteEventChannel(EVENT_CHANNEL_NAME);
SYS_LOGGER.log(Level.INFO, "Opened auto ingest event channel");
} catch (AutopsyEventException ex) {
SYS_LOGGER.log(Level.SEVERE, "Failed to open auto ingest event channel", ex);
throw new AutoIngestManagerStartupException("Failed to open auto ingest event channel", ex);
throw new AutoIngestManagerException("Failed to open auto ingest event channel", ex);
}
rootInputDirectory = Paths.get(AutoIngestUserPreferences.getAutoModeImageFolder());
rootOutputDirectory = Paths.get(AutoIngestUserPreferences.getAutoModeResultsFolder());
@ -287,7 +285,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang
* @param event A job started from another auto ingest node.
*/
private void handleRemoteJobStartedEvent(AutoIngestJobStartedEvent event) {
String hostName = event.getJob().getNodeName();
String hostName = event.getJob().getProcessingHostName();
hostNamesToLastMsgTime.put(hostName, Instant.now());
synchronized (jobsLock) {
Path manifestFilePath = event.getJob().getManifest().getFilePath();
@ -314,9 +312,17 @@ public final class AutoIngestManager extends Observable implements PropertyChang
* @param event An job status event from another auto ingest node.
*/
private void handleRemoteJobStatusEvent(AutoIngestJobStatusEvent event) {
String hostName = event.getJob().getNodeName();
AutoIngestJob job = event.getJob();
for (Iterator<AutoIngestJob> iterator = pendingJobs.iterator(); iterator.hasNext();) {
AutoIngestJob pendingJob = iterator.next();
if (job.equals(pendingJob)) {
iterator.remove();
break;
}
}
String hostName = job.getProcessingHostName();
hostNamesToLastMsgTime.put(hostName, Instant.now());
hostNamesToRunningJobs.put(hostName, event.getJob());
hostNamesToRunningJobs.put(hostName, job);
setChanged();
notifyObservers(Event.JOB_STATUS_UPDATED);
}
@ -332,7 +338,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang
* @param event An job completed event from another auto ingest node.
*/
private void handleRemoteJobCompletedEvent(AutoIngestJobCompletedEvent event) {
String hostName = event.getJob().getNodeName();
String hostName = event.getJob().getProcessingHostName();
hostNamesToLastMsgTime.put(hostName, Instant.now());
hostNamesToRunningJobs.remove(hostName);
if (event.shouldRetry() == false) {
@ -340,7 +346,6 @@ public final class AutoIngestManager extends Observable implements PropertyChang
completedJobs.add(event.getJob());
}
}
//scanInputDirsNow();
setChanged();
notifyObservers(Event.JOB_COMPLETED);
}
@ -463,7 +468,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang
}
for (AutoIngestJob job : hostNamesToRunningJobs.values()) {
runningJobs.add(job);
runningJobs.sort(new AutoIngestJob.AlphabeticalComparator());
runningJobs.sort(new AutoIngestJob.CaseNameAndProcessingHostComparator());
}
}
if (null != completedJobs) {
@ -517,12 +522,17 @@ public final class AutoIngestManager extends Observable implements PropertyChang
jobProcessingTask.requestResume();
}
/**
*/
/**
* Bumps the priority of all pending ingest jobs for a specified case.
*
* @param caseName The name of the case to be prioritized.
*
* @throws AutoIngestManagerException If there is an error bumping the
* priority of the jobs for the case.
*/
void prioritizeCase(final String caseName) {
void prioritizeCase(final String caseName) throws AutoIngestManagerException {
if (state != State.RUNNING) {
return;
@ -542,19 +552,12 @@ public final class AutoIngestManager extends Observable implements PropertyChang
if (!prioritizedJobs.isEmpty()) {
++maxPriority;
for (AutoIngestJob job : prioritizedJobs) {
String manifestNodePath = job.getManifest().getFilePath().toString();
try {
AutoIngestJobData nodeData = new AutoIngestJobData(coordinationService.getNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestNodePath));
nodeData.setPriority(maxPriority);
coordinationService.setNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestNodePath, nodeData.toArray());
} catch (AutoIngestJobDataException ex) {
SYS_LOGGER.log(Level.WARNING, String.format("Unable to use node data for %s", manifestNodePath), ex);
} catch (CoordinationServiceException ex) {
SYS_LOGGER.log(Level.SEVERE, String.format("Coordination service error while prioritizing %s", manifestNodePath), ex);
} catch (InterruptedException ex) {
SYS_LOGGER.log(Level.SEVERE, "Unexpected interrupt while updating coordination service node data for {0}", manifestNodePath);
this.updateCoordinationServiceNode(job);
job.setPriority(maxPriority);
} catch (CoordinationServiceException | InterruptedException ex) {
throw new AutoIngestManagerException("Error updating case priority", ex);
}
job.setPriority(maxPriority);
}
}
@ -572,8 +575,11 @@ public final class AutoIngestManager extends Observable implements PropertyChang
* Bumps the priority of an auto ingest job.
*
* @param manifestPath The manifest file path for the job to be prioritized.
*
* @throws AutoIngestManagerException If there is an error bumping the
* priority of the job.
*/
void prioritizeJob(Path manifestPath) {
void prioritizeJob(Path manifestPath) throws AutoIngestManagerException {
if (state != State.RUNNING) {
return;
}
@ -581,6 +587,10 @@ public final class AutoIngestManager extends Observable implements PropertyChang
int maxPriority = 0;
AutoIngestJob prioritizedJob = null;
synchronized (jobsLock) {
/*
* Find the job in the pending jobs list and record the highest
* existing priority.
*/
for (AutoIngestJob job : pendingJobs) {
if (job.getPriority() > maxPriority) {
maxPriority = job.getPriority();
@ -589,19 +599,17 @@ public final class AutoIngestManager extends Observable implements PropertyChang
prioritizedJob = job;
}
}
/*
* Bump the priority by one and update the coordination service node
* data for the job.
*/
if (null != prioritizedJob) {
++maxPriority;
String manifestNodePath = prioritizedJob.getManifest().getFilePath().toString();
try {
AutoIngestJobData nodeData = new AutoIngestJobData(coordinationService.getNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestNodePath));
nodeData.setPriority(maxPriority);
coordinationService.setNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestNodePath, nodeData.toArray());
} catch (AutoIngestJobDataException ex) {
SYS_LOGGER.log(Level.WARNING, String.format("Unable to use node data for %s", manifestPath), ex);
} catch (CoordinationServiceException ex) {
SYS_LOGGER.log(Level.SEVERE, String.format("Coordination service error while prioritizing %s", manifestNodePath), ex);
} catch (InterruptedException ex) {
SYS_LOGGER.log(Level.SEVERE, "Unexpected interrupt while updating coordination service node data for {0}", manifestNodePath);
this.updateCoordinationServiceNode(prioritizedJob);
} catch (CoordinationServiceException | InterruptedException ex) {
throw new AutoIngestManagerException("Error updating job priority", ex);
}
prioritizedJob.setPriority(maxPriority);
}
@ -626,6 +634,9 @@ public final class AutoIngestManager extends Observable implements PropertyChang
void reprocessJob(Path manifestPath) {
AutoIngestJob completedJob = null;
synchronized (jobsLock) {
/*
* Find the job in the completed jobs list.
*/
for (Iterator<AutoIngestJob> iterator = completedJobs.iterator(); iterator.hasNext();) {
AutoIngestJob job = iterator.next();
if (job.getManifest().getFilePath().equals(manifestPath)) {
@ -635,11 +646,17 @@ public final class AutoIngestManager extends Observable implements PropertyChang
}
}
/*
* Add the job to the pending jobs queue and update the coordinatino
* service node data for the job.
*/
if (null != completedJob && null != completedJob.getCaseDirectoryPath()) {
try {
AutoIngestJobData nodeData = new AutoIngestJobData(completedJob.getManifest(), PENDING, DEFAULT_JOB_PRIORITY, 0, new Date(0), true);
coordinationService.setNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestPath.toString(), nodeData.toArray());
pendingJobs.add(new AutoIngestJob(nodeData));
completedJob.setErrorsOccurred(false);
completedJob.setCompletedDate(new Date(0));
completedJob.setPriority(DEFAULT_JOB_PRIORITY);
updateCoordinationServiceNode(completedJob);
pendingJobs.add(completedJob);
} catch (CoordinationServiceException ex) {
SYS_LOGGER.log(Level.SEVERE, String.format("Coordination service error while reprocessing %s", manifestPath), ex);
completedJobs.add(completedJob);
@ -729,11 +746,13 @@ public final class AutoIngestManager extends Observable implements PropertyChang
*/
for (Path manifestPath : manifestPaths) {
try {
AutoIngestJobData nodeData = new AutoIngestJobData(coordinationService.getNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestPath.toString()));
nodeData.setProcessingStatus(AutoIngestJobData.ProcessingStatus.DELETED);
coordinationService.setNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestPath.toString(), nodeData.toArray());
} catch (AutoIngestJobDataException ex) {
SYS_LOGGER.log(Level.WARNING, String.format("Unable to use node data for %s", manifestPath), ex);
AutoIngestJobNodeData nodeData = new AutoIngestJobNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestPath.toString()));
AutoIngestJob deletedJob = new AutoIngestJob(nodeData);
deletedJob.setProcessingStatus(AutoIngestJob.ProcessingStatus.DELETED);
this.updateCoordinationServiceNode(deletedJob);
} catch (AutoIngestJobNodeData.InvalidDataException ex) {
SYS_LOGGER.log(Level.WARNING, String.format("Invalid auto ingest job node data for %s", manifestPath), ex);
return CaseDeletionResult.PARTIALLY_DELETED;
} catch (InterruptedException | CoordinationServiceException ex) {
SYS_LOGGER.log(Level.SEVERE, String.format("Error attempting to set delete flag on manifest data for %s for case %s at %s", manifestPath, caseName, caseDirectoryPath), ex);
return CaseDeletionResult.PARTIALLY_DELETED;
@ -839,6 +858,22 @@ public final class AutoIngestManager extends Observable implements PropertyChang
}
}
/**
* Sets the coordination service node data for an auto ingest job.
*
* Note that a new auto ingest node data object will be created from the job
* passed in. Thus, if the data version of the node has changed, the node
* will be "upgraded" as well as updated.
*
* @param job The auto ingest job.
*/
void updateCoordinationServiceNode(AutoIngestJob job) throws CoordinationServiceException, InterruptedException {
AutoIngestJobNodeData nodeData = new AutoIngestJobNodeData(job);
String manifestNodePath = job.getManifest().getFilePath().toString();
byte[] rawData = nodeData.toArray();
coordinationService.setNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestNodePath, rawData);
}
/**
* A task that submits an input directory scan task to the input directory
* scan task executor.
@ -1021,33 +1056,31 @@ public final class AutoIngestManager extends Observable implements PropertyChang
*/
try {
byte[] rawData = coordinationService.getNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestPath.toString());
if (null != rawData) {
if (null != rawData && rawData.length > 0) {
try {
AutoIngestJobData nodeData = new AutoIngestJobData(rawData);
if (nodeData.coordSvcNodeDataWasSet()) {
ProcessingStatus processingStatus = nodeData.getProcessingStatus();
switch (processingStatus) {
case PENDING:
addPendingJob(nodeData, manifest);
break;
case PROCESSING:
doRecoveryIfCrashed(nodeData, manifest);
break;
case COMPLETED:
addCompletedJob(nodeData, manifest);
break;
case DELETED:
// Do nothing - we dont'want to add it to any job list or do recovery
break;
default:
SYS_LOGGER.log(Level.SEVERE, "Unknown ManifestNodeData.ProcessingStatus");
break;
}
} else {
addNewPendingJob(manifest);
AutoIngestJobNodeData nodeData = new AutoIngestJobNodeData(rawData);
AutoIngestJob.ProcessingStatus processingStatus = nodeData.getProcessingStatus();
switch (processingStatus) {
case PENDING:
addPendingJob(manifest, nodeData);
break;
case PROCESSING:
doRecoveryIfCrashed(manifest, nodeData);
break;
case COMPLETED:
addCompletedJob(manifest, nodeData);
break;
case DELETED:
/*
* Ignore jobs marked as "deleted."
*/
break;
default:
SYS_LOGGER.log(Level.SEVERE, "Unknown ManifestNodeData.ProcessingStatus");
break;
}
} catch (AutoIngestJobDataException ex) {
SYS_LOGGER.log(Level.WARNING, String.format("Unable to use node data for %s", manifestPath), ex);
} catch (AutoIngestJobNodeData.InvalidDataException ex) {
SYS_LOGGER.log(Level.WARNING, String.format("Invalid auto ingest job node data for %s", manifestPath), ex);
}
} else {
addNewPendingJob(manifest);
@ -1069,51 +1102,49 @@ public final class AutoIngestManager extends Observable implements PropertyChang
}
/**
* This method will push a node to the coordination service if it does
* not exist. If the node is an older version, it will be upgraded prior
* to being pushed.
*
* @param nodeData The node data to upgrade.
* @param manifest The manifest.
* @param caseDirectoryPath The case directory path.
* @param processingStage The processing stage.
*
* @throws CoordinationServiceException
* @throws InterruptedException
*/
private void pushNodeToCoordinationService(AutoIngestJobData nodeData, Manifest manifest, Path caseDirectoryPath, AutoIngestJob.Stage processingStage)
throws CoordinationServiceException, InterruptedException {
if (nodeData.getVersion() < 1) {
nodeData.upgradeNode(manifest, caseDirectoryPath, LOCAL_HOST_NAME, processingStage);
}
if (!nodeData.coordSvcNodeDataWasSet()) {
byte[] rawData = nodeData.toArray();
coordinationService.setNodeData(CoordinationService.CategoryNode.MANIFESTS, manifest.getFilePath().toString(), rawData);
}
}
/**
* Adds a job to process a manifest to the pending jobs queue.
* Adds an existing job to the pending jobs queue.
*
* @param manifest The manifest for the job.
* @param nodeData The data stored in the coordination service node for
* the manifest.
* @param manifest The manifest for upgrading the node.
* the job.
*
* @throws CoordinationServiceException
* @throws InterruptedException
* @throws InterruptedException if the thread running the input
* directory scan task is interrupted while
* blocked, i.e., if auto ingest is
* shutting down.
*/
private void addPendingJob(AutoIngestJobData nodeData, Manifest manifest) throws CoordinationServiceException, InterruptedException {
Path caseDirectoryPath = PathUtils.findCaseDirectory(rootOutputDirectory, manifest.getCaseName());
nodeData.setCompletedDate(new Date(0));
nodeData.setErrorsOccurred(false);
pushNodeToCoordinationService(nodeData, manifest, caseDirectoryPath, AutoIngestJob.Stage.PENDING);
newPendingJobsList.add(new AutoIngestJob(nodeData));
private void addPendingJob(Manifest manifest, AutoIngestJobNodeData nodeData) throws InterruptedException {
AutoIngestJob job = new AutoIngestJob(manifest);
job.setPriority(nodeData.getPriority());
Path caseDirectory = PathUtils.findCaseDirectory(rootOutputDirectory, manifest.getCaseName());
job.setCaseDirectoryPath(caseDirectory);
newPendingJobsList.add(job);
/*
* Try to upgrade/update the coordination service node data for the
* job.
*
* An exclusive lock is obtained before doing so because another
* host may have already found the job, obtained an exclusive lock,
* and started processing it. However, this locking does make it
* possible that two hosts will both try to obtain the lock to do
* the upgrade/update operation at the same time. If this happens,
* the host that is holding the lock will complete the
* update/upgrade operation, so there is nothing more to do.
*/
try (Lock manifestLock = coordinationService.tryGetExclusiveLock(CoordinationService.CategoryNode.MANIFESTS, manifest.getFilePath().toString())) {
if (null != manifestLock) {
AutoIngestManager.this.updateCoordinationServiceNode(job);
}
} catch (CoordinationServiceException ex) {
SYS_LOGGER.log(Level.SEVERE, String.format("Error attempting to set node data for %s", manifest.getFilePath()), ex);
}
}
/**
* Adds a job to process a manifest to the pending jobs queue.
* Adds a new job to the pending jobs queue.
*
* @param manifest The manifest.
* @param manifest The manifest for the job.
*
* @throws InterruptedException if the thread running the input
* directory scan task is interrupted while
@ -1121,13 +1152,25 @@ public final class AutoIngestManager extends Observable implements PropertyChang
* shutting down.
*/
private void addNewPendingJob(Manifest manifest) throws InterruptedException {
// TODO (JIRA-1960): This is something of a hack, grabbing the lock to create the node.
// Is use of Curator.create().forPath() possible instead?
/*
* Create the coordination service node data for the job. Note that
* getting the lock will create the node for the job (with no data)
* if it does not already exist.
*
* An exclusive lock is obtained before creating the node data
* because another host may have already found the job, obtained an
* exclusive lock, and started processing it. However, this locking
* does make it possible that two hosts will both try to obtain the
* lock to do the create operation at the same time. If this
* happens, the host that is locked out will not add the job to its
* pending queue for this scan of the input directory, but it will
* be picked up on the next scan.
*/
try (Lock manifestLock = coordinationService.tryGetExclusiveLock(CoordinationService.CategoryNode.MANIFESTS, manifest.getFilePath().toString())) {
if (null != manifestLock) {
AutoIngestJobData newNodeData = new AutoIngestJobData(manifest, PENDING, DEFAULT_JOB_PRIORITY, 0, new Date(0), false);
pushNodeToCoordinationService(newNodeData, manifest, null, AutoIngestJob.Stage.PENDING);
newPendingJobsList.add(new AutoIngestJob(newNodeData));
AutoIngestJob job = new AutoIngestJob(manifest);
AutoIngestManager.this.updateCoordinationServiceNode(job);
newPendingJobsList.add(job);
}
} catch (CoordinationServiceException ex) {
SYS_LOGGER.log(Level.SEVERE, String.format("Error attempting to set node data for %s", manifest.getFilePath()), ex);
@ -1142,67 +1185,87 @@ public final class AutoIngestManager extends Observable implements PropertyChang
* the node that was processing the job crashed and the processing
* status was not updated.
*
* @param nodeData The node data.
* @param manifest The manifest for upgrading the node.
* @param nodeData The node data.
*
* @throws InterruptedException if the thread running the input
* directory scan task is interrupted while
* blocked, i.e., if auto ingest is
* shutting down.
*/
private void doRecoveryIfCrashed(AutoIngestJobData nodeData, Manifest manifest) throws InterruptedException {
String manifestPath = nodeData.getManifestFilePath().toString();
try {
Lock manifestLock = coordinationService.tryGetExclusiveLock(CoordinationService.CategoryNode.MANIFESTS, manifestPath);
private void doRecoveryIfCrashed(Manifest manifest, AutoIngestJobNodeData nodeData) throws InterruptedException {
/*
* Try to get an exclusive lock on the coordination service node for
* the job. If the lock cannot be obtained, another host in the auto
* ingest cluster is already doing the recovery.
*/
String manifestPath = manifest.getFilePath().toString();
try (Lock manifestLock = coordinationService.tryGetExclusiveLock(CoordinationService.CategoryNode.MANIFESTS, manifestPath)) {
if (null != manifestLock) {
SYS_LOGGER.log(Level.SEVERE, "Attempting crash recovery for {0}", manifestPath);
try {
if (nodeData.coordSvcNodeDataWasSet() && ProcessingStatus.PROCESSING == nodeData.getProcessingStatus()) {
SYS_LOGGER.log(Level.SEVERE, "Attempting crash recovery for {0}", manifestPath);
int numberOfCrashes = nodeData.getNumberOfCrashes();
++numberOfCrashes;
Path caseDirectoryPath = PathUtils.findCaseDirectory(rootOutputDirectory, manifest.getCaseName());
nodeData.setNumberOfCrashes(numberOfCrashes);
nodeData.setCompletedDate(new Date(0));
nodeData.setErrorsOccurred(true);
/*
* Create the recovery job.
*/
AutoIngestJob job = new AutoIngestJob(nodeData);
int numberOfCrashes = job.getNumberOfCrashes();
++numberOfCrashes;
job.setNumberOfCrashes(numberOfCrashes);
job.setCompletedDate(new Date(0));
Path caseDirectoryPath = PathUtils.findCaseDirectory(rootOutputDirectory, manifest.getCaseName());
if (null != caseDirectoryPath) {
job.setCaseDirectoryPath(caseDirectoryPath);
job.setErrorsOccurred(true);
} else {
job.setErrorsOccurred(false);
}
/*
* Update the coordination service node for the job. If
* this fails, leave the recovery to anoterh host.
*/
try {
updateCoordinationServiceNode(job);
if (numberOfCrashes <= AutoIngestUserPreferences.getMaxNumTimesToProcessImage()) {
nodeData.setProcessingStatus(PENDING);
pushNodeToCoordinationService(nodeData, manifest, caseDirectoryPath, AutoIngestJob.Stage.PENDING);
newPendingJobsList.add(new AutoIngestJob(nodeData));
if (null != nodeData.getCaseDirectoryPath()) {
try {
AutoIngestAlertFile.create(nodeData.getCaseDirectoryPath());
} catch (AutoIngestAlertFileException ex) {
SYS_LOGGER.log(Level.SEVERE, String.format("Error creating alert file for crashed job for %s", manifestPath), ex);
}
try {
new AutoIngestJobLogger(nodeData.getManifestFilePath(), nodeData.getDataSourceFileName(), caseDirectoryPath).logCrashRecoveryWithRetry();
} catch (AutoIngestJobLoggerException ex) {
SYS_LOGGER.log(Level.SEVERE, String.format("Error creating case auto ingest log entry for crashed job for %s", manifestPath), ex);
}
}
newPendingJobsList.add(job);
} else {
nodeData.setProcessingStatus(COMPLETED);
pushNodeToCoordinationService(nodeData, manifest, caseDirectoryPath, AutoIngestJob.Stage.COMPLETED);
newCompletedJobsList.add(new AutoIngestJob(nodeData));
if (null != nodeData.getCaseDirectoryPath()) {
try {
AutoIngestAlertFile.create(nodeData.getCaseDirectoryPath());
} catch (AutoIngestAlertFileException ex) {
SYS_LOGGER.log(Level.SEVERE, String.format("Error creating alert file for crashed job for %s", manifestPath), ex);
}
try {
new AutoIngestJobLogger(nodeData.getManifestFilePath(), nodeData.getDataSourceFileName(), nodeData.getCaseDirectoryPath()).logCrashRecoveryNoRetry();
} catch (AutoIngestJobLoggerException ex) {
SYS_LOGGER.log(Level.SEVERE, String.format("Error creating case auto ingest log entry for crashed job for %s", manifestPath), ex);
}
}
}
} catch (CoordinationServiceException ex) {
SYS_LOGGER.log(Level.SEVERE, String.format("Error attempting to set node data for %s", manifestPath), ex);
return;
}
/*
* Write the alert file and do the logging.
*/
if (null != caseDirectoryPath) {
try {
coordinationService.setNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestPath, nodeData.toArray());
} catch (CoordinationServiceException ex) {
SYS_LOGGER.log(Level.SEVERE, String.format("Error attempting to set node data for %s", manifestPath), ex);
AutoIngestAlertFile.create(nodeData.getCaseDirectoryPath());
} catch (AutoIngestAlertFileException ex) {
SYS_LOGGER.log(Level.SEVERE, String.format("Error creating alert file for crashed job for %s", manifestPath), ex);
}
}
if (numberOfCrashes <= AutoIngestUserPreferences.getMaxNumTimesToProcessImage()) {
job.setProcessingStatus(AutoIngestJob.ProcessingStatus.PENDING);
if (null != caseDirectoryPath) {
try {
new AutoIngestJobLogger(manifest.getFilePath(), manifest.getDataSourceFileName(), caseDirectoryPath).logCrashRecoveryWithRetry();
} catch (AutoIngestJobLoggerException ex) {
SYS_LOGGER.log(Level.SEVERE, String.format("Error creating case auto ingest log entry for crashed job for %s", manifestPath), ex);
}
}
} else {
job.setProcessingStatus(AutoIngestJob.ProcessingStatus.COMPLETED);
if (null != caseDirectoryPath) {
try {
new AutoIngestJobLogger(manifest.getFilePath(), manifest.getDataSourceFileName(), nodeData.getCaseDirectoryPath()).logCrashRecoveryNoRetry();
} catch (AutoIngestJobLoggerException ex) {
SYS_LOGGER.log(Level.SEVERE, String.format("Error creating case auto ingest log entry for crashed job for %s", manifestPath), ex);
}
}
}
} finally {
try {
manifestLock.release();
@ -1226,11 +1289,37 @@ public final class AutoIngestManager extends Observable implements PropertyChang
* @throws CoordinationServiceException
* @throws InterruptedException
*/
private void addCompletedJob(AutoIngestJobData nodeData, Manifest manifest) throws CoordinationServiceException, InterruptedException {
private void addCompletedJob(Manifest manifest, AutoIngestJobNodeData nodeData) throws CoordinationServiceException, InterruptedException {
Path caseDirectoryPath = PathUtils.findCaseDirectory(rootOutputDirectory, manifest.getCaseName());
if (null != caseDirectoryPath) {
pushNodeToCoordinationService(nodeData, manifest, caseDirectoryPath, AutoIngestJob.Stage.COMPLETED);
AutoIngestJob job = new AutoIngestJob(manifest);
job.setCaseDirectoryPath(caseDirectoryPath);
job.setProcessingStatus(AutoIngestJob.ProcessingStatus.COMPLETED);
job.setStage(AutoIngestJob.Stage.COMPLETED);
job.setCompletedDate(nodeData.getCompletedDate());
job.setErrorsOccurred(true);
newCompletedJobsList.add(new AutoIngestJob(nodeData));
/*
* Try to upgrade/update the coordination service node data for
* the job.
*
* An exclusive lock is obtained before doing so because another
* host may have already found the job, obtained an exclusive
* lock, and started processing it. However, this locking does
* make it possible that two hosts will both try to obtain the
* lock to do the upgrade/update operation at the same time. If
* this happens, the host that is holding the lock will complete
* the update/upgrade operation, so there is nothing more to do.
*/
try (Lock manifestLock = coordinationService.tryGetExclusiveLock(CoordinationService.CategoryNode.MANIFESTS, manifest.getFilePath().toString())) {
if (null != manifestLock) {
updateCoordinationServiceNode(job);
}
} catch (CoordinationServiceException ex) {
SYS_LOGGER.log(Level.SEVERE, String.format("Error attempting to set node data for %s", manifest.getFilePath()), ex);
}
} else {
SYS_LOGGER.log(Level.WARNING, String.format("Job completed for %s, but cannot find case directory, ignoring job", nodeData.getManifestFilePath()));
}
@ -1355,7 +1444,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang
if (jobProcessingTaskFuture.isCancelled()) {
break;
}
if (ex instanceof CoordinationServiceException) {
if (ex instanceof CoordinationServiceException || ex instanceof AutoIngestJobNodeData.InvalidDataException) {
errorState = ErrorState.COORDINATION_SERVICE_ERROR;
} else if (ex instanceof SharedConfigurationException) {
errorState = ErrorState.SHARED_CONFIGURATION_DOWNLOAD_ERROR;
@ -1521,42 +1610,60 @@ public final class AutoIngestManager extends Observable implements PropertyChang
/**
* Processes jobs until the pending jobs queue is empty.
*
* @throws CoordinationServiceException if there is an error
* acquiring or releasing
* coordination service locks
* or setting coordination
* service node data.
* @throws SharedConfigurationException if there is an error while
* downloading shared
* configuration.
* @throws ServicesMonitorException if there is an error
* querying the services
* monitor.
* @throws DatabaseServerDownException if the database server is
* down.
* @throws KeywordSearchServerDownException if the Solr server is down.
* @throws CaseManagementException if there is an error
* creating, opening or closing
* the case for the job.
* @throws AnalysisStartupException if there is an error
* starting analysis of the
* data source by the data
* source level and file level
* ingest modules.
* @throws FileExportException if there is an error
* exporting files.
* @throws AutoIngestAlertFileException if there is an error
* creating an alert file.
* @throws AutoIngestJobLoggerException if there is an error writing
* to the auto ingest log for
* the case.
* @throws InterruptedException if the thread running the
* job processing task is
* interrupted while blocked,
* i.e., if auto ingest is
* shutting down.
* @throws CoordinationServiceException if there is an
* error acquiring or
* releasing
* coordination
* service locks or
* setting
* coordination
* service node data.
* @throws SharedConfigurationException if there is an
* error while
* downloading shared
* configuration.
* @throws ServicesMonitorException if there is an
* error querying the
* services monitor.
* @throws DatabaseServerDownException if the database
* server is down.
* @throws KeywordSearchServerDownException if the Solr server
* is down.
* @throws CaseManagementException if there is an
* error creating,
* opening or closing
* the case for the
* job.
* @throws AnalysisStartupException if there is an
* error starting
* analysis of the
* data source by the
* data source level
* and file level
* ingest modules.
* @throws FileExportException if there is an
* error exporting
* files.
* @throws AutoIngestAlertFileException if there is an
* error creating an
* alert file.
* @throws AutoIngestJobLoggerException if there is an
* error writing to
* the auto ingest
* log for the case.
* @throws InterruptedException if the thread
* running the job
* processing task is
* interrupted while
* blocked, i.e., if
* auto ingest is
* shutting down.
* @throws AutoIngestJobNodeData.InvalidDataException if there is an
* error constructing
* auto ingest node
* data objects.
*/
private void processJobs() throws CoordinationServiceException, SharedConfigurationException, ServicesMonitorException, DatabaseServerDownException, KeywordSearchServerDownException, CaseManagementException, AnalysisStartupException, FileExportException, AutoIngestAlertFileException, AutoIngestJobLoggerException, InterruptedException, AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException {
private void processJobs() throws CoordinationServiceException, SharedConfigurationException, ServicesMonitorException, DatabaseServerDownException, KeywordSearchServerDownException, CaseManagementException, AnalysisStartupException, FileExportException, AutoIngestAlertFileException, AutoIngestJobLoggerException, InterruptedException, AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException, AutoIngestJobNodeData.InvalidDataException {
SYS_LOGGER.log(Level.INFO, "Started processing pending jobs queue");
Lock manifestLock = JobProcessingTask.this.dequeueAndLockNextJob();
while (null != manifestLock) {
@ -1565,8 +1672,6 @@ public final class AutoIngestManager extends Observable implements PropertyChang
return;
}
processJob();
} catch (AutoIngestJobDataException ex) {
SYS_LOGGER.log(Level.WARNING, String.format("Unable to use node data"), ex);
} finally {
manifestLock.release();
}
@ -1666,7 +1771,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang
}
try {
AutoIngestJobData nodeData = new AutoIngestJobData(coordinationService.getNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestPath.toString()));
AutoIngestJobNodeData nodeData = new AutoIngestJobNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestPath.toString()));
if (!nodeData.getProcessingStatus().equals(PENDING)) {
/*
* Due to a timing issue or a missed event, a
@ -1693,8 +1798,8 @@ public final class AutoIngestManager extends Observable implements PropertyChang
iterator.remove();
currentJob = job;
break;
} catch (AutoIngestJobDataException ex) {
SYS_LOGGER.log(Level.WARNING, String.format("Unable to use node data for %s", manifestPath), ex);
} catch (AutoIngestJobNodeData.InvalidDataException ex) {
SYS_LOGGER.log(Level.WARNING, String.format("Unable to use node data for %s", manifestPath), ex); // JCTODO: Is this right?
}
}
}
@ -1704,46 +1809,63 @@ public final class AutoIngestManager extends Observable implements PropertyChang
/**
* Processes and auto ingest job.
*
* @throws CoordinationServiceException if there is an error
* acquiring or releasing
* coordination service locks
* or setting coordination
* service node data.
* @throws SharedConfigurationException if there is an error while
* downloading shared
* configuration.
* @throws ServicesMonitorException if there is an error
* querying the services
* monitor.
* @throws DatabaseServerDownException if the database server is
* down.
* @throws KeywordSearchServerDownException if the Solr server is down.
* @throws CaseManagementException if there is an error
* creating, opening or closing
* the case for the job.
* @throws AnalysisStartupException if there is an error
* starting analysis of the
* data source by the data
* source level and file level
* ingest modules.
* @throws FileExportException if there is an error
* exporting files.
* @throws AutoIngestAlertFileException if there is an error
* creating an alert file.
* @throws AutoIngestJobLoggerException if there is an error writing
* to the auto ingest log for
* the case.
* @throws InterruptedException if the thread running the
* job processing task is
* interrupted while blocked,
* i.e., if auto ingest is
* shutting down.
* @throws CoordinationServiceException if there is an
* error acquiring or
* releasing
* coordination
* service locks or
* setting
* coordination
* service node data.
* @throws SharedConfigurationException if there is an
* error while
* downloading shared
* configuration.
* @throws ServicesMonitorException if there is an
* error querying the
* services monitor.
* @throws DatabaseServerDownException if the database
* server is down.
* @throws KeywordSearchServerDownException if the Solr server
* is down.
* @throws CaseManagementException if there is an
* error creating,
* opening or closing
* the case for the
* job.
* @throws AnalysisStartupException if there is an
* error starting
* analysis of the
* data source by the
* data source level
* and file level
* ingest modules.
* @throws FileExportException if there is an
* error exporting
* files.
* @throws AutoIngestAlertFileException if there is an
* error creating an
* alert file.
* @throws AutoIngestJobLoggerException if there is an
* error writing to
* the auto ingest
* log for the case.
* @throws InterruptedException if the thread
* running the job
* processing task is
* interrupted while
* blocked, i.e., if
* auto ingest is
* shutting down.
* @throws AutoIngestJobNodeData.InvalidDataException if there is an
* error constructing
* auto ingest node
* data objects.
*/
private void processJob() throws CoordinationServiceException, SharedConfigurationException, ServicesMonitorException, DatabaseServerDownException, KeywordSearchServerDownException, CaseManagementException, AnalysisStartupException, FileExportException, AutoIngestAlertFileException, AutoIngestJobLoggerException, InterruptedException, AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException, AutoIngestJobDataException {
private void processJob() throws CoordinationServiceException, SharedConfigurationException, ServicesMonitorException, DatabaseServerDownException, KeywordSearchServerDownException, CaseManagementException, AnalysisStartupException, FileExportException, AutoIngestAlertFileException, AutoIngestJobLoggerException, InterruptedException, AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException, AutoIngestJobNodeData.InvalidDataException {
Path manifestPath = currentJob.getManifest().getFilePath();
AutoIngestJobData nodeData = new AutoIngestJobData(coordinationService.getNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestPath.toString()));
nodeData.setProcessingStatus(PROCESSING);
coordinationService.setNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestPath.toString(), nodeData.toArray());
currentJob.setProcessingStatus(AutoIngestJob.ProcessingStatus.PROCESSING);
updateCoordinationServiceNode(currentJob);
SYS_LOGGER.log(Level.INFO, "Started processing of {0}", manifestPath);
currentJob.setStage(AutoIngestJob.Stage.STARTING);
setChanged();
@ -1760,18 +1882,15 @@ public final class AutoIngestManager extends Observable implements PropertyChang
currentJob.cancel();
}
nodeData = new AutoIngestJobData(coordinationService.getNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestPath.toString()));
if (currentJob.isCompleted() || currentJob.isCanceled()) {
nodeData.setProcessingStatus(COMPLETED);
currentJob.setProcessingStatus(AutoIngestJob.ProcessingStatus.COMPLETED);
Date completedDate = new Date();
currentJob.setCompletedDate(completedDate);
nodeData.setCompletedDate(currentJob.getCompletedDate());
nodeData.setErrorsOccurred(currentJob.hasErrors());
} else {
// The job may get retried
nodeData.setProcessingStatus(PENDING);
currentJob.setProcessingStatus(AutoIngestJob.ProcessingStatus.PENDING);
}
coordinationService.setNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestPath.toString(), nodeData.toArray());
updateCoordinationServiceNode(currentJob);
boolean retry = (!currentJob.isCanceled() && !currentJob.isCompleted());
SYS_LOGGER.log(Level.INFO, "Completed processing of {0}, retry = {1}", new Object[]{manifestPath, retry});
@ -1779,7 +1898,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang
Path caseDirectoryPath = currentJob.getCaseDirectoryPath();
if (null != caseDirectoryPath) {
AutoIngestAlertFile.create(caseDirectoryPath); // Do this first, it is more important than the case log
AutoIngestJobLogger jobLogger = new AutoIngestJobLogger(nodeData.getManifestFilePath(), nodeData.getDataSourceFileName(), caseDirectoryPath);
AutoIngestJobLogger jobLogger = new AutoIngestJobLogger(manifestPath, currentJob.getManifest().getDataSourceFileName(), caseDirectoryPath);
jobLogger.logJobCancelled();
}
}
@ -1974,7 +2093,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang
}
}
}
/**
* Runs the ingest process for the current job.
*
@ -2651,6 +2770,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang
try {
synchronized (jobsLock) {
if (currentJob != null) {
currentJob.getStageDetails();
setChanged();
notifyObservers(Event.JOB_STATUS_UPDATED);
eventPublisher.publishRemotely(new AutoIngestJobStatusEvent(currentJob));
@ -2680,7 +2800,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang
// check whether any remote nodes have timed out
for (AutoIngestJob job : hostNamesToRunningJobs.values()) {
if (isStale(hostNamesToLastMsgTime.get(job.getNodeName()))) {
if (isStale(hostNamesToLastMsgTime.get(job.getProcessingHostName()))) {
// remove the job from remote job running map.
/*
* NOTE: there is theoretically a check-then-act race
@ -2692,7 +2812,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang
* back into hostNamesToRunningJobs as a result of
* processing the job status update.
*/
hostNamesToRunningJobs.remove(job.getNodeName());
hostNamesToRunningJobs.remove(job.getProcessingHostName());
setChanged();
notifyObservers(Event.JOB_COMPLETED);
}
@ -2873,15 +2993,15 @@ public final class AutoIngestManager extends Observable implements PropertyChang
}
static final class AutoIngestManagerStartupException extends Exception {
static final class AutoIngestManagerException extends Exception {
private static final long serialVersionUID = 1L;
private AutoIngestManagerStartupException(String message) {
private AutoIngestManagerException(String message) {
super(message);
}
private AutoIngestManagerStartupException(String message, Throwable cause) {
private AutoIngestManagerException(String message, Throwable cause) {
super(message, cause);
}

View File

@ -41,7 +41,7 @@ import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.NetworkUtils;
import org.sleuthkit.autopsy.events.AutopsyEventException;
import org.sleuthkit.autopsy.events.AutopsyEventPublisher;
import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJobData.ProcessingStatus;
import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJob.ProcessingStatus;
/**
* An auto ingest monitor responsible for monitoring and reporting the
@ -160,8 +160,7 @@ public final class AutoIngestMonitor extends Observable implements PropertyChang
private void handleJobStatusEvent(AutoIngestJobStatusEvent event) {
synchronized (jobsLock) {
// DLG: TEST! Replace job in running list with job from event
jobsSnapshot.getRunningJobs().remove((AutoIngestJob)event.getOldValue());
jobsSnapshot.getRunningJobs().add(event.getJob());
jobsSnapshot.addOrReplaceRunningJob(event.getJob());
setChanged();
notifyObservers(jobsSnapshot);
}
@ -189,26 +188,7 @@ public final class AutoIngestMonitor extends Observable implements PropertyChang
* @param event A job/case prioritization event.
*/
private void handleCasePrioritizationEvent(AutoIngestCasePrioritizedEvent event) {
synchronized (jobsLock) {
// DLG: TEST! Replace job in pending queue with job from event
jobsSnapshot.getPendingJobs().remove((AutoIngestJob)event.getOldValue());
jobsSnapshot.getPendingJobs().add((AutoIngestJob)event.getNewValue());
/* DLG: List<AutoIngestJob> pendingJobsList = jobsSnapshot.getPendingJobs();
for(int i=0; i < pendingJobsList.size(); i++) {
AutoIngestJob job = pendingJobsList.get(i);
if(job.getNodeName().equalsIgnoreCase(event.getNodeName())) {
if(job.getNodeData().getCaseName().equalsIgnoreCase(event.getCaseName())) {
int newPriority = ((AutoIngestJob)event.getNewValue()).getNodeData().getPriority();
jobsSnapshot.getPendingJobs().get(i).getNodeData().setPriority(newPriority);
}
}
}*/
setChanged();
notifyObservers(jobsSnapshot);
}
coordSvcQueryExecutor.submit(new CoordinationServiceQueryTask());
}
/**
@ -260,12 +240,7 @@ public final class AutoIngestMonitor extends Observable implements PropertyChang
List<String> nodeList = coordinationService.getNodeList(CoordinationService.CategoryNode.MANIFESTS);
for (String node : nodeList) {
try {
// DLG: DONE! Do not need a lock here
// DLG: DONE! Get the node data and construct an AutoIngestJobData object (DONE! rename AutoIngestJobData => AutoIngestJobData)
AutoIngestJobData nodeData = new AutoIngestJobData(coordinationService.getNodeData(CoordinationService.CategoryNode.MANIFESTS, node));
// DLG: DONE! Construct an AutoIngestJob object from the AutoIngestJobData object, need new AutoIngestJob constructor
AutoIngestJobNodeData nodeData = new AutoIngestJobNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.MANIFESTS, node));
AutoIngestJob job = new AutoIngestJob(nodeData);
ProcessingStatus processingStatus = nodeData.getProcessingStatus();
switch (processingStatus) {
@ -279,16 +254,15 @@ public final class AutoIngestMonitor extends Observable implements PropertyChang
newJobsSnapshot.addOrReplaceCompletedJob(job);
break;
case DELETED:
// Do nothing - we dont'want to add it to any job list or do recovery
break;
default:
LOGGER.log(Level.SEVERE, "Unknown AutoIngestJobData.ProcessingStatus");
break;
}
} catch (InterruptedException ex) {
LOGGER.log(Level.SEVERE, "Unexpected interrupt while retrieving coordination service node data for '{0}'", node);
} catch (AutoIngestJobDataException ex) {
LOGGER.log(Level.WARNING, String.format("Unable to use node data for '%s'", node), ex);
LOGGER.log(Level.SEVERE, String.format("Unexpected interrupt while retrieving coordination service node data for '%s'", node), ex);
} catch (AutoIngestJobNodeData.InvalidDataException ex) {
LOGGER.log(Level.SEVERE, String.format("Unable to use node data for '%s'", node), ex);
}
}
return newJobsSnapshot;
@ -328,10 +302,10 @@ public final class AutoIngestMonitor extends Observable implements PropertyChang
++highestPriority;
String manifestNodePath = prioritizedJob.getManifest().getFilePath().toString();
try {
AutoIngestJobData nodeData = new AutoIngestJobData(coordinationService.getNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestNodePath));
AutoIngestJobNodeData nodeData = new AutoIngestJobNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestNodePath));
nodeData.setPriority(highestPriority);
coordinationService.setNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestNodePath, nodeData.toArray());
} catch (AutoIngestJobDataException | CoordinationServiceException | InterruptedException ex) {
} catch (AutoIngestJobNodeData.InvalidDataException | CoordinationServiceException | InterruptedException ex) {
throw new AutoIngestMonitorException("Error bumping priority for job " + prioritizedJob.toString(), ex);
}
prioritizedJob.setPriority(highestPriority);

View File

@ -18,11 +18,11 @@
*/
package org.sleuthkit.autopsy.experimental.autoingest;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.Date;
import java.util.HashMap;
import javax.annotation.concurrent.Immutable;
import javax.xml.parsers.DocumentBuilder;
@ -42,24 +42,11 @@ import org.xml.sax.SAXException;
public final class AutopsyManifestFileParser implements ManifestFileParser {
private static final String MANIFEST_FILE_NAME_SIGNATURE = "_MANIFEST.XML";
private static final String NMEC_MANIFEST_ELEM_TAG_NAME = "NMEC_Manifest";
private static final String MANIFEST_ELEM_TAG_NAME = "Manifest";
private static final String CASE_NAME_XPATH = "/Collection/Name/text()";
private static final String DEVICE_ID_XPATH = "/Collection/Image/ID/text()";
private static final String IMAGE_NAME_XPATH = "/Collection/Image/Name/text()";
private static final String IMAGE_FULL_NAME_XPATH = "/Collection/Image/FullName/text()";
private static final String IMAGE_RELATIVE_PATH_XPATH = "/Collection/Image/RelativePath/text()";
private String actualRootElementTag = "";
private static final String ROOT_ELEM_TAG_NAME = "AutopsyManifest";
private static final String CASE_NAME_XPATH = "/AutopsyManifest/CaseName/text()";
private static final String DEVICE_ID_XPATH = "/AutopsyManifest/DeviceId/text()";
private static final String DATA_SOURCE_NAME_XPATH = "/AutopsyManifest/DataSource/text()";
/**
* Determine whether the given file is a supported manifest file.
*
* @param filePath
*
* @return true if this is a supported manifest file, otherwise false
*/
@Override
public boolean fileIsManifest(Path filePath) {
boolean fileIsManifest = false;
@ -68,9 +55,7 @@ public final class AutopsyManifestFileParser implements ManifestFileParser {
if (fileName.toString().toUpperCase().endsWith(MANIFEST_FILE_NAME_SIGNATURE)) {
Document doc = this.createManifestDOM(filePath);
Element docElement = doc.getDocumentElement();
actualRootElementTag = docElement.getTagName();
fileIsManifest = actualRootElementTag.equals(MANIFEST_ELEM_TAG_NAME) ||
actualRootElementTag.equals(NMEC_MANIFEST_ELEM_TAG_NAME);
fileIsManifest = docElement.getTagName().equals(ROOT_ELEM_TAG_NAME);
}
} catch (Exception unused) {
fileIsManifest = false;
@ -78,95 +63,30 @@ public final class AutopsyManifestFileParser implements ManifestFileParser {
return fileIsManifest;
}
/**
* Parse the given manifest file and create a Manifest object.
*
* @param filePath Fully qualified path to manifest file
*
* @return A Manifest object representing the parsed manifest file.
*
* @throws ManifestFileParserException
*/
@Override
public Manifest parse(Path filePath) throws ManifestFileParserException {
try {
BasicFileAttributes attrs = Files.readAttributes(filePath, BasicFileAttributes.class);
Date dateFileCreated = new Date(attrs.creationTime().toMillis());
Document doc = this.createManifestDOM(filePath);
XPath xpath = XPathFactory.newInstance().newXPath();
XPathExpression expr = xpath.compile(constructXPathExpression(CASE_NAME_XPATH));
String caseName = (String) expr.evaluate(doc, XPathConstants.STRING);
expr = xpath.compile(constructXPathExpression(DEVICE_ID_XPATH));
XPathExpression expr = xpath.compile(CASE_NAME_XPATH);
String caseName = (String) expr.evaluate(doc, XPathConstants.STRING);
expr = xpath.compile(DEVICE_ID_XPATH);
String deviceId = (String) expr.evaluate(doc, XPathConstants.STRING);
Path dataSourcePath = determineDataSourcePath(filePath, doc);
return new Manifest(filePath, caseName, deviceId, dataSourcePath, new HashMap<>());
expr = xpath.compile(DATA_SOURCE_NAME_XPATH);
String dataSourceName = (String) expr.evaluate(doc, XPathConstants.STRING);
Path dataSourcePath = filePath.getParent().resolve(dataSourceName);
return new Manifest(filePath, dateFileCreated, caseName, deviceId, dataSourcePath, new HashMap<>());
} catch (Exception ex) {
throw new ManifestFileParserException(String.format("Error parsing manifest %s", filePath), ex);
}
}
/**
* Create a new DOM document object for the given manifest file.
*
* @param manifestFilePath Fully qualified path to manifest file.
*
* @return DOM document object
*
* @throws ParserConfigurationException
* @throws SAXException
* @throws IOException
*/
private Document createManifestDOM(Path manifestFilePath) throws ParserConfigurationException, SAXException, IOException {
DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder docBuilder = docBuilderFactory.newDocumentBuilder();
return docBuilder.parse(manifestFilePath.toFile());
}
/**
* Creates an XPath expression string relative to the actual root
* element of the manifest for the given path.
*
* @param path
* @return XPath expression string.
*/
private String constructXPathExpression(String path) {
return "/" + actualRootElementTag + path;
}
/**
* Attempt to find a valid (existing) data source for the manifest file.
*
* @param manifestFilePath Fully qualified path to manifest file.
* @param doc DOM document object for the manifest file.
* @return Path to an existing data source.
* @throws ManifestFileParserException if an error occurred while parsing manifest file.
*/
private Path determineDataSourcePath(Path manifestFilePath, Document doc) throws ManifestFileParserException {
String dataSourcePath = "";
try {
for (String element : Arrays.asList(IMAGE_NAME_XPATH, IMAGE_FULL_NAME_XPATH, IMAGE_RELATIVE_PATH_XPATH)) {
XPath xpath = XPathFactory.newInstance().newXPath();
XPathExpression expr = xpath.compile(constructXPathExpression(element));
String fileName = (String) expr.evaluate(doc, XPathConstants.STRING);
if (fileName.contains("\\")) {
fileName = fileName.substring(fileName.lastIndexOf("\\") + 1);
}
try {
dataSourcePath = manifestFilePath.getParent().resolve(fileName).toString();
} catch (Exception ignore) {
// NOTE: exceptions can be thrown by resolve() method based on contents of the manifest file.
// For example if file name is "test .txt" and in one of the path fields they only enter "test "
// i.e. the file name without extension.
// We should continue on to the next XML path field
continue;
}
if (new File(dataSourcePath).exists()) {
// found the data source
return Paths.get(dataSourcePath);
}
// keep trying other XML fields
}
return Paths.get(dataSourcePath);
} catch (Exception ex) {
throw new ManifestFileParserException(String.format("Error parsing manifest %s", manifestFilePath), ex);
}
}
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2015 Basis Technology Corp.
* Copyright 2011-2017 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -18,12 +18,9 @@
*/
package org.sleuthkit.autopsy.experimental.autoingest;
import java.io.IOException;
import java.io.Serializable;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
@ -40,22 +37,6 @@ public final class Manifest implements Serializable {
private final String dataSourcePath;
private final Map<String, String> manifestProperties;
// RJCTODO
public Manifest(Path manifestFilePath, String caseName, String deviceId, Path dataSourcePath, Map<String, String> manifestProperties) throws IOException {
this.filePath = manifestFilePath.toString();
BasicFileAttributes attrs = Files.readAttributes(manifestFilePath, BasicFileAttributes.class);
this.dateFileCreated = new Date(attrs.creationTime().toMillis());
this.caseName = caseName;
this.deviceId = deviceId;
if (dataSourcePath != null) {
this.dataSourcePath = dataSourcePath.toString();
} else {
this.dataSourcePath = "";
}
this.manifestProperties = new HashMap<>(manifestProperties);
}
// RJCTODO
public Manifest(Path manifestFilePath, Date dateFileCreated, String caseName, String deviceId, Path dataSourcePath, Map<String, String> manifestProperties) {
this.filePath = manifestFilePath.toString();
this.dateFileCreated = dateFileCreated;