diff --git a/CentralRepository/Central Repository User Guide.pdf b/CentralRepository/Central Repository User Guide.pdf deleted file mode 100755 index 0b597fbfbe..0000000000 Binary files a/CentralRepository/Central Repository User Guide.pdf and /dev/null differ diff --git a/CentralRepository/ivy.xml b/CentralRepository/ivy.xml deleted file mode 100755 index abf18b3da4..0000000000 --- a/CentralRepository/ivy.xml +++ /dev/null @@ -1,15 +0,0 @@ - - - - - - - - - - - - - - - diff --git a/CentralRepository/ivysettings.xml b/CentralRepository/ivysettings.xml deleted file mode 100755 index e3e086637b..0000000000 --- a/CentralRepository/ivysettings.xml +++ /dev/null @@ -1,10 +0,0 @@ - - - - - - - - - - diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java old mode 100755 new mode 100644 index afef305ace..1f5af1104c --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java @@ -33,8 +33,6 @@ import java.time.LocalDate; import java.util.HashMap; import java.util.Map; import java.util.Set; -import java.util.logging.Level; -import javafx.animation.KeyValue; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.Logger; @@ -1062,8 +1060,9 @@ public abstract class AbstractSqlEamDb implements EamDb { } /** - * Sets an eamArtifact instance to the given known status. If eamArtifact - * exists, it is updated. If eamArtifact does not exist nothing happens + * Sets an eamArtifact instance to the given knownStatus. If eamArtifact + * exists, it is updated. If eamArtifact does not exist it is added + * with the given status. * * @param eamArtifact Artifact containing exactly one (1) ArtifactInstance. * @param FileKnown The status to change the artifact to diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamArtifactUtil.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamArtifactUtil.java index a582418911..670067ac0a 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamArtifactUtil.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamArtifactUtil.java @@ -27,6 +27,7 @@ import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardAttribute; +import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; import org.sleuthkit.datamodel.TskDataException; @@ -192,4 +193,73 @@ public class EamArtifactUtil { return null; } } + + /** + * Create an EamArtifact from the given Content. + * Will return null if an artifact can not be created. Does not + * add the artifact to the database. + * + * @param content The content object + * @param knownStatus Unknown, known bad, or known + * @param comment The comment for the new artifact (generally used for a tag comment) + * @return The new EamArtifact or null if creation failed + */ + public static EamArtifact getEamArtifactFromContent(Content content, TskData.FileKnown knownStatus, String comment){ + + if(! (content instanceof AbstractFile)){ + return null; + } + + final AbstractFile af = (AbstractFile) content; + + if ((af.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) + || (af.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) + || (af.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.SLACK) + || (af.getKnown() == TskData.FileKnown.KNOWN) + || (af.isDir() == true) + || (!af.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.ALLOC))) { + return null; + } + + String dsName; + try { + dsName = af.getDataSource().getName(); + } catch (TskCoreException ex) { + LOGGER.log(Level.SEVERE, "Error, unable to get name of data source from abstract file.", ex); + return null; + } + + // We need a hash to make the artifact + String md5 = af.getMd5Hash(); + if (md5 == null || md5.isEmpty()) { + return null; + } + + String deviceId; + try { + deviceId = Case.getCurrentCase().getSleuthkitCase().getDataSource(af.getDataSource().getId()).getDeviceId(); + } catch (TskCoreException | TskDataException ex) { + LOGGER.log(Level.SEVERE, "Error, failed to get deviceID or data source from current case.", ex); + return null; + } + + EamArtifact eamArtifact; + try { + EamArtifact.Type filesType = EamDb.getInstance().getCorrelationTypeById(EamArtifact.FILES_TYPE_ID); + eamArtifact = new EamArtifact(filesType, af.getMd5Hash()); + EamArtifactInstance cei = new EamArtifactInstance( + new EamCase(Case.getCurrentCase().getName(), Case.getCurrentCase().getDisplayName()), + new EamDataSource(deviceId, dsName), + af.getParentPath() + af.getName(), + comment, + TskData.FileKnown.BAD, + EamArtifactInstance.GlobalStatus.LOCAL + ); + eamArtifact.addInstance(cei); + return eamArtifact; + } catch (EamDbException ex) { + LOGGER.log(Level.SEVERE, "Error, unable to get FILES correlation type.", ex); + return null; + } + } } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamDb.java index 14ee9387b2..fff8bbf540 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamDb.java @@ -327,7 +327,7 @@ public interface EamDb { * @param FileKnown The status to change the artifact to */ void setArtifactInstanceKnownStatus(EamArtifact eamArtifact, TskData.FileKnown knownStatus) throws EamDbException; - + /** * Gets list of matching eamArtifact instances that have knownStatus = * "Bad". diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java index 102fdf227f..1e1bc7227e 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java @@ -600,7 +600,7 @@ public class SqliteEamDb extends AbstractSqlEamDb { } finally { releaseExclusiveLock(); } - } + } /** * Gets list of matching eamArtifact instances that have knownStatus = diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java old mode 100755 new mode 100644 index e2d253cc28..d333a95100 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java @@ -30,11 +30,9 @@ import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagDeletedEvent import org.sleuthkit.autopsy.casemodule.events.ContentTagAddedEvent; import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent; import org.sleuthkit.autopsy.casemodule.events.DataSourceAddedEvent; -import org.sleuthkit.autopsy.casemodule.services.Services; import org.sleuthkit.autopsy.casemodule.services.TagsManager; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.centralrepository.datamodel.EamArtifact; -import org.sleuthkit.autopsy.centralrepository.datamodel.EamArtifactInstance; import org.sleuthkit.autopsy.centralrepository.datamodel.EamArtifactUtil; import org.sleuthkit.autopsy.centralrepository.datamodel.EamCase; import org.sleuthkit.autopsy.centralrepository.datamodel.EamDataSource; @@ -46,7 +44,6 @@ import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifactTag; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.ContentTag; -import org.sleuthkit.datamodel.TagName; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; import org.sleuthkit.datamodel.TskDataException; @@ -142,57 +139,14 @@ public class CaseEventListener implements PropertyChangeListener { } } - if ((af.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) - || (af.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) - || (af.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.SLACK) - || (af.getKnown() == TskData.FileKnown.KNOWN) - || (af.isDir() == true) - || (!af.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.ALLOC))) { - break; - } + final EamArtifact eamArtifact = EamArtifactUtil.getEamArtifactFromContent(af, + knownStatus, comment); - String dsName; - try { - dsName = af.getDataSource().getName(); - } catch (TskCoreException ex) { - LOGGER.log(Level.SEVERE, "Error, unable to get name of data source from abstract file during CONTENT_TAG_ADDED event.", ex); - return; - } - - String md5 = af.getMd5Hash(); - if (md5 == null || md5.isEmpty()) { - return; - } - String deviceId; - try { - deviceId = Case.getCurrentCase().getSleuthkitCase().getDataSource(af.getDataSource().getId()).getDeviceId(); - } catch (TskCoreException | TskDataException ex) { - LOGGER.log(Level.SEVERE, "Error, failed to get deviceID or data source from current case.", ex); - return; - } - - EamArtifact eamArtifact; - try { - EamArtifact.Type filesType = dbManager.getCorrelationTypeById(EamArtifact.FILES_TYPE_ID); - eamArtifact = new EamArtifact(filesType, af.getMd5Hash()); - EamArtifactInstance cei = new EamArtifactInstance( - new EamCase(Case.getCurrentCase().getName(), Case.getCurrentCase().getDisplayName()), - new EamDataSource(deviceId, dsName), - af.getParentPath() + af.getName(), - comment, - knownStatus, - EamArtifactInstance.GlobalStatus.LOCAL - ); - eamArtifact.addInstance(cei); - // send update to Central Repository db - Runnable r = new KnownStatusChangeRunner(eamArtifact, knownStatus); - // TODO: send r into a thread pool instead - Thread t = new Thread(r); - t.start(); - } catch (EamDbException ex) { - LOGGER.log(Level.SEVERE, "Error, unable to get FILES correlation type during CONTENT_TAG_ADDED/CONTENT_TAG_DELETED event.", ex); - } - + // send update to Central Repository db + Runnable r = new KnownStatusChangeRunner(eamArtifact, knownStatus); + // TODO: send r into a thread pool instead + Thread t = new Thread(r); + t.start(); } // CONTENT_TAG_ADDED, CONTENT_TAG_DELETED break; diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/ManageTagsDialog.java b/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/ManageTagsDialog.java index a5c0b87b1b..49d9ab6d79 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/ManageTagsDialog.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/ManageTagsDialog.java @@ -18,6 +18,7 @@ */ package org.sleuthkit.autopsy.centralrepository.optionspanel; +import java.awt.Cursor; import java.awt.Dimension; import java.awt.Toolkit; import java.util.ArrayList; @@ -27,13 +28,23 @@ import java.util.logging.Level; import java.util.stream.Collectors; import javax.swing.JFrame; import javax.swing.table.DefaultTableModel; +import javax.swing.event.TableModelEvent; +import javax.swing.event.TableModelListener; +import javax.swing.JOptionPane; import org.openide.util.NbBundle.Messages; import org.openide.windows.WindowManager; +import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.services.TagsManager; import org.sleuthkit.autopsy.centralrepository.datamodel.EamDb; import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException; +import org.sleuthkit.autopsy.centralrepository.datamodel.EamArtifact; +import org.sleuthkit.autopsy.centralrepository.datamodel.EamArtifactUtil; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.datamodel.BlackboardArtifactTag; +import org.sleuthkit.datamodel.TagName; +import org.sleuthkit.datamodel.ContentTag; +import org.sleuthkit.datamodel.TskData; /** * Instances of this class allow a user to select an existing hash database and @@ -92,6 +103,8 @@ final class ManageTagsDialog extends javax.swing.JDialog { boolean enabled = badTags.contains(tagName); model.addRow(new Object[]{tagName, enabled}); } + CheckBoxModelListener listener = new CheckBoxModelListener(this); + model.addTableModelListener(listener); } private void display() { @@ -230,6 +243,90 @@ final class ManageTagsDialog extends javax.swing.JDialog { } return true; } + + /** + * If the user sets a tag to "Implies known bad", give them the option to update + * any existing tagged items (in the current case only) in the central repo. + */ + public class CheckBoxModelListener implements TableModelListener { + @Messages({"ManageTagsDialog.updateCurrentCase.msg=Mark as known bad any files/artifacts in the current case that have this tag?", + "ManageTagsDialog.updateCurrentCase.title=Update current case?", + "ManageTagsDialog.updateCurrentCase.error=Error updating existing Central Repository entries"}) + + javax.swing.JDialog dialog; + public CheckBoxModelListener(javax.swing.JDialog dialog){ + this.dialog = dialog; + } + + @Override + public void tableChanged(TableModelEvent e) { + int row = e.getFirstRow(); + int column = e.getColumn(); + if (column == 1) { + DefaultTableModel model = (DefaultTableModel) e.getSource(); + String tagName = (String) model.getValueAt(row, 0); + Boolean checked = (Boolean) model.getValueAt(row, column); + if (checked) { + + // Don't do anything if there's no case open + if(Case.isCaseOpen()){ + int dialogButton = JOptionPane.YES_NO_OPTION; + int dialogResult = JOptionPane.showConfirmDialog ( + null, + Bundle.ManageTagsDialog_updateCurrentCase_msg(), + Bundle.ManageTagsDialog_updateCurrentCase_title(), + dialogButton); + if(dialogResult == JOptionPane.YES_OPTION){ + try{ + dialog.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); + setArtifactsKnownBadByTag(tagName, Case.getCurrentCase()); + } catch (EamDbException ex) { + LOGGER.log(Level.SEVERE, "Failed to apply known bad status to current case", ex); + JOptionPane.showMessageDialog(null, Bundle.ManageTagsDialog_updateCurrentCase_error()); + } finally { + dialog.setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); + } + } + } + } + } + } + } + + /** + * Set knownBad status for all files/artifacts in the given case that + * are tagged with the given tag name. + * Files/artifacts that are not already in the database will be added. + * @param tagName The name of the tag to search for + * @param curCase The case to search in + */ + public void setArtifactsKnownBadByTag(String tagNameString, Case curCase) throws EamDbException{ + try{ + TagName tagName = curCase.getServices().getTagsManager().getDisplayNamesToTagNamesMap().get(tagNameString); + + // First find any matching artifacts + List artifactTags = curCase.getSleuthkitCase().getBlackboardArtifactTagsByTagName(tagName); + + for(BlackboardArtifactTag bbTag:artifactTags){ + List convertedArtifacts = EamArtifactUtil.fromBlackboardArtifact(bbTag.getArtifact(), true, + EamDb.getInstance().getCorrelationTypes(), true); + for (EamArtifact eamArtifact : convertedArtifacts) { + EamDb.getInstance().setArtifactInstanceKnownStatus(eamArtifact,TskData.FileKnown.BAD); + } + } + + // Now search for files + List fileTags = curCase.getSleuthkitCase().getContentTagsByTagName(tagName); + for(ContentTag contentTag:fileTags){ + final EamArtifact eamArtifact = EamArtifactUtil.getEamArtifactFromContent(contentTag.getContent(), + TskData.FileKnown.BAD, ""); + EamDb.getInstance().setArtifactInstanceKnownStatus(eamArtifact, TskData.FileKnown.BAD); + } + } catch (TskCoreException ex){ + throw new EamDbException("Error updating artifacts", ex); + } + + } // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.ButtonGroup buttonGroup1; diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestControlPanel.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestControlPanel.java index bdd6310064..8f80022834 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestControlPanel.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestControlPanel.java @@ -253,12 +253,12 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { * controlling automated ingest for a single node within the cluster. */ private AutoIngestControlPanel() { - + //Disable the main window so they can only use the dashboard (if we used setVisible the taskBar icon would go away) WindowManager.getDefault().getMainWindow().setEnabled(false); - + manager = AutoIngestManager.getInstance(); - + pendingTableModel = new DefaultTableModel(JobsTableModelColumns.headers, 0) { private static final long serialVersionUID = 1L; @@ -304,6 +304,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { * text box. */ @Messages({ + "# {0} - case db status", "# {1} - search svc Status", "# {2} - coord svc Status", "# {3} - msg broker status", "AutoIngestControlPanel.tbServicesStatusMessage.Message=Case databases {0}, keyword search {1}, coordination {2}, messaging {3} ", "AutoIngestControlPanel.tbServicesStatusMessage.Message.Up=up", "AutoIngestControlPanel.tbServicesStatusMessage.Message.Down=down", @@ -669,8 +670,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { @Messages({ "AutoIngestControlPanel.AutoIngestStartupError=Failed to start automated ingest. Verify Multi-user Settings.", "AutoIngestControlPanel.AutoIngestStartupFailed.Message=Failed to start automated ingest.\nPlease see auto ingest system log for details.", - "AutoIngestControlPanel.AutoIngestStartupFailed.Title=Automated Ingest Error", - }) + "AutoIngestControlPanel.AutoIngestStartupFailed.Title=Automated Ingest Error",}) private void startUp() { /* @@ -679,7 +679,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { try { manager.startUp(); autoIngestStarted = true; - } catch (AutoIngestManager.AutoIngestManagerStartupException ex) { + } catch (AutoIngestManager.AutoIngestManagerException ex) { SYS_LOGGER.log(Level.SEVERE, "Dashboard error starting up auto ingest", ex); tbStatusMessage.setText(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.AutoIngestStartupError")); manager = null; @@ -812,8 +812,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { "AutoIngestControlPanel.PauseDueToWriteStateFilesFailure=Paused, unable to write to shared images or cases location.", "AutoIngestControlPanel.PauseDueToSharedConfigError=Paused, unable to update shared configuration.", "AutoIngestControlPanel.PauseDueToIngestJobStartFailure=Paused, unable to start ingest job processing.", - "AutoIngestControlPanel.PauseDueToFileExporterError=Paused, unable to load File Exporter settings.", - }) + "AutoIngestControlPanel.PauseDueToFileExporterError=Paused, unable to load File Exporter settings.",}) @Override public void update(Observable o, Object arg) { @@ -983,7 +982,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { List completedJobs = new ArrayList<>(); manager.getJobs(pendingJobs, runningJobs, completedJobs); // Sort the completed jobs list by completed date - Collections.sort(completedJobs, new AutoIngestJob.ReverseDateCompletedComparator()); + Collections.sort(completedJobs, new AutoIngestJob.ReverseCompletedDateComparator()); EventQueue.invokeLater(new RefreshComponentsTask(pendingJobs, runningJobs, completedJobs)); } } @@ -1076,7 +1075,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { * @return True or fale. */ private boolean isLocalJob(AutoIngestJob job) { - return job.getNodeName().equals(LOCAL_HOST_NAME); + return job.getProcessingHostName().equals(LOCAL_HOST_NAME); } /** @@ -1145,20 +1144,19 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { tableModel.setRowCount(0); for (AutoIngestJob job : jobs) { AutoIngestJob.StageDetails status = job.getStageDetails(); - AutoIngestJobNodeData nodeData = job.getNodeData(); tableModel.addRow(new Object[]{ - nodeData.getCaseName(), // CASE - nodeData.getDataSourcePath().getFileName(), // DATA_SOURCE - job.getNodeName(), // HOST_NAME - nodeData.getManifestFileDate(), // CREATED_TIME - job.getStageStartDate(), // STARTED_TIME - nodeData.getCompletedDate(), // COMPLETED_TIME + job.getManifest().getCaseName(), // CASE + job.getManifest().getDataSourcePath().getFileName(), // DATA_SOURCE + job.getProcessingHostName(), // HOST_NAME + job.getManifest().getDateFileCreated(), // CREATED_TIME + job.getProcessingStageStartDate(), // STARTED_TIME + job.getCompletedDate(), // COMPLETED_TIME status.getDescription(), // ACTIVITY - nodeData.getErrorsOccurred(), // STATUS +job.getErrorsOccurred(), // STATUS ((Date.from(Instant.now()).getTime()) - (status.getStartDate().getTime())), // ACTIVITY_TIME job.getCaseDirectoryPath(), // CASE_DIRECTORY_PATH - job.getNodeName().equals(LOCAL_HOST_NAME), // IS_LOCAL_JOB - nodeData.getManifestFilePath()}); // MANIFEST_FILE_PATH + job.getProcessingHostName().equals(LOCAL_HOST_NAME), // IS_LOCAL_JOB + job.getManifest().getFilePath()}); // MANIFEST_FILE_PATH } } catch (Exception ex) { SYS_LOGGER.log(Level.SEVERE, "Dashboard error refreshing table", ex); @@ -1703,11 +1701,17 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { * * @param evt The button click event. */ + @Messages({"AutoIngestControlPanel.casePrioritization.errorMessage=An error occurred when prioritizing the case. Some or all jobs may not have been prioritized."}) private void bnPrioritizeCaseActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bnPrioritizeCaseActionPerformed if (pendingTableModel.getRowCount() > 0 && pendingTable.getSelectedRow() >= 0) { this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); String caseName = (pendingTableModel.getValueAt(pendingTable.getSelectedRow(), JobsTableModelColumns.CASE.ordinal())).toString(); - manager.prioritizeCase(caseName); + try { + manager.prioritizeCase(caseName); + } catch (AutoIngestManager.AutoIngestManagerException ex) { + SYS_LOGGER.log(Level.SEVERE, "Error prioritizing a case", ex); + MessageNotifyUtil.Message.error(Bundle.AutoIngestControlPanel_casePrioritization_errorMessage()); + } refreshTables(); pendingTable.clearSelection(); enablePendingTableButtons(false); @@ -1755,12 +1759,18 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { options[0]); } }//GEN-LAST:event_bnShowCaseLogActionPerformed - + + @Messages({"AutoIngestControlPanel.jobPrioritization.errorMessage=An error occurred when prioritizing the job."}) private void bnPrioritizeJobActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bnPrioritizeJobActionPerformed if (pendingTableModel.getRowCount() > 0 && pendingTable.getSelectedRow() >= 0) { this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); Path manifestFilePath = (Path) (pendingTableModel.getValueAt(pendingTable.getSelectedRow(), JobsTableModelColumns.MANIFEST_FILE_PATH.ordinal())); - manager.prioritizeJob(manifestFilePath); + try { + manager.prioritizeJob(manifestFilePath); + } catch (AutoIngestManager.AutoIngestManagerException ex) { + SYS_LOGGER.log(Level.SEVERE, "Error prioritizing a case", ex); + MessageNotifyUtil.Message.error(Bundle.AutoIngestControlPanel_jobPrioritization_errorMessage()); + } refreshTables(); pendingTable.clearSelection(); enablePendingTableButtons(false); diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestDashboard.form b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestDashboard.form index 502fab6f16..ba1b540bd0 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestDashboard.form +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestDashboard.form @@ -1,6 +1,15 @@
+ + + + + + + + + @@ -41,7 +50,7 @@ - + diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestDashboard.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestDashboard.java index ba07eaf42b..6782140e7e 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestDashboard.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestDashboard.java @@ -31,7 +31,6 @@ import java.util.logging.Level; import javax.swing.DefaultListSelectionModel; import java.awt.Color; import java.beans.PropertyChangeEvent; -import java.util.Collections; import javax.swing.JPanel; import javax.swing.JTable; import javax.swing.SwingWorker; @@ -438,11 +437,9 @@ public final class AutoIngestDashboard extends JPanel implements Observer { List pendingJobs = jobsSnapshot.getPendingJobs(); List runningJobs = jobsSnapshot.getRunningJobs(); List completedJobs = jobsSnapshot.getCompletedJobs(); - - // DLG: DONE! Do the appropriate sorts for each table. - Collections.sort(pendingJobs, new AutoIngestJob.PriorityComparator()); - runningJobs.sort(new AutoIngestJob.AlphabeticalComparator()); - + pendingJobs.sort(new AutoIngestJob.PriorityComparator()); + runningJobs.sort(new AutoIngestJob.CaseNameAndProcessingHostComparator()); + completedJobs.sort(new AutoIngestJob.ReverseCompletedDateComparator()); refreshTable(pendingJobs, pendingTable, pendingTableModel); refreshTable(runningJobs, runningTable, runningTableModel); refreshTable(completedJobs, completedTable, completedTableModel); @@ -462,27 +459,25 @@ public final class AutoIngestDashboard extends JPanel implements Observer { Path currentRow = getSelectedEntry(table, tableModel); tableModel.setRowCount(0); for (AutoIngestJob job : jobs) { - if (job.getNodeData().getVersion() < 1) { + if (job.getVersion() < 1) { // Ignore version '0' nodes since they don't carry enough // data to populate the table. continue; } AutoIngestJob.StageDetails status = job.getStageDetails(); - AutoIngestJobNodeData nodeData = job.getNodeData(); tableModel.addRow(new Object[]{ - nodeData.getCaseName(), // CASE - nodeData.getDataSourcePath().getFileName(), // DATA_SOURCE - job.getNodeName(), // HOST_NAME - nodeData.getManifestFileDate(), // CREATED_TIME - job.getStageStartDate(), // STARTED_TIME - nodeData.getCompletedDate(), // COMPLETED_TIME + job.getManifest().getCaseName(), // CASE + job.getManifest().getDataSourcePath().getFileName(), job.getProcessingHostName(), // HOST_NAME + job.getManifest().getDateFileCreated(), // CREATED_TIME + job.getProcessingStageStartDate(), // STARTED_TIME + job.getCompletedDate(), // COMPLETED_TIME status.getDescription(), // ACTIVITY - nodeData.getErrorsOccurred(), // STATUS + job.getErrorsOccurred(), // STATUS ((Date.from(Instant.now()).getTime()) - (status.getStartDate().getTime())), // ACTIVITY_TIME job.getCaseDirectoryPath(), // CASE_DIRECTORY_PATH - nodeData.getManifestFilePath()//DLG: , // MANIFEST_FILE_PATH - //DLG: job - }); // JOB + job.getManifest().getFilePath() // MANIFEST_FILE_PATH + //DLG: Put job object in the table + }); } setSelectedEntry(table, tableModel, currentRow); } catch (Exception ex) { @@ -547,7 +542,7 @@ public final class AutoIngestDashboard extends JPanel implements Observer { */ private enum JobsTableModelColumns { - // DLG: Go through the bundles.properties file and delete any unused key-value pairs. + // DLG: Go through the bundle.properties file and delete any unused key-value pairs. CASE(NbBundle.getMessage(AutoIngestDashboard.class, "AutoIngestDashboard.JobsTableModel.ColumnHeader.Case")), DATA_SOURCE(NbBundle.getMessage(AutoIngestDashboard.class, "AutoIngestDashboard.JobsTableModel.ColumnHeader.ImageFolder")), HOST_NAME(NbBundle.getMessage(AutoIngestDashboard.class, "AutoIngestDashboard.JobsTableModel.ColumnHeader.HostName")), @@ -592,7 +587,7 @@ public final class AutoIngestDashboard extends JPanel implements Observer { STAGE_TIME.getColumnHeader(), CASE_DIRECTORY_PATH.getColumnHeader(), MANIFEST_FILE_PATH.getColumnHeader() //DLG: , - //DLG: JOB.getColumnHeader() + //DLG: JOB.getColumnHeader() }; }; @@ -662,6 +657,7 @@ public final class AutoIngestDashboard extends JPanel implements Observer { // //GEN-BEGIN:initComponents private void initComponents() { + jButton1 = new javax.swing.JButton(); pendingScrollPane = new javax.swing.JScrollPane(); pendingTable = new javax.swing.JTable(); runningScrollPane = new javax.swing.JScrollPane(); @@ -676,6 +672,8 @@ public final class AutoIngestDashboard extends JPanel implements Observer { tbServicesStatusMessage = new javax.swing.JTextField(); prioritizeButton = new javax.swing.JButton(); + org.openide.awt.Mnemonics.setLocalizedText(jButton1, org.openide.util.NbBundle.getMessage(AutoIngestDashboard.class, "AutoIngestDashboard.jButton1.text")); // NOI18N + pendingTable.setModel(pendingTableModel); pendingTable.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestDashboard.class, "AutoIngestDashboard.pendingTable.toolTipText")); // NOI18N pendingTable.setAutoResizeMode(javax.swing.JTable.AUTO_RESIZE_ALL_COLUMNS); @@ -843,8 +841,6 @@ public final class AutoIngestDashboard extends JPanel implements Observer { jobsSnapshot = autoIngestMonitor.prioritizeJob(manifestFilePath); refreshTables(jobsSnapshot); } catch (AutoIngestMonitor.AutoIngestMonitorException ex) { - // DLG: DONE! Log the exception and do a popup with a user-friendly - // message explaining that the operation failed String errorMessage = String.format(NbBundle.getMessage(AutoIngestDashboard.class, "AutoIngestDashboard.PrioritizeError"), manifestFilePath); logger.log(Level.SEVERE, errorMessage, ex); MessageNotifyUtil.Message.error(errorMessage); @@ -856,6 +852,7 @@ public final class AutoIngestDashboard extends JPanel implements Observer { // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JScrollPane completedScrollPane; private javax.swing.JTable completedTable; + private javax.swing.JButton jButton1; private javax.swing.JLabel lbCompleted; private javax.swing.JLabel lbPending; private javax.swing.JLabel lbRunning; diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJob.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJob.java index 1bd4194ebd..d4986c82c7 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJob.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJob.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2015-2017 Basis Technology Corp. + * Copyright 2011-2017 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -22,6 +22,7 @@ import java.io.Serializable; import java.nio.file.Path; import java.nio.file.Paths; import java.time.Instant; +import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.Objects; @@ -40,23 +41,63 @@ import org.sleuthkit.autopsy.ingest.IngestJob; public final class AutoIngestJob implements Comparable, Serializable { private static final long serialVersionUID = 1L; + private static final int CURRENT_VERSION = 1; + private static final int DEFAULT_PRIORITY = 0; private static final String LOCAL_HOST_NAME = NetworkUtils.getLocalHostName(); - private final AutoIngestJobNodeData nodeData; + private final int version; + private final Manifest manifest; private final String nodeName; @GuardedBy("this") - private String caseDirectoryPath; // DLG: Replace with AutoIngestJobNodeData.caseDirectoryPath + private String caseDirectoryPath; + @GuardedBy("this") + private Integer priority; @GuardedBy("this") private Stage stage; @GuardedBy("this") private Date stageStartDate; @GuardedBy("this") + private StageDetails stageDetails; + @GuardedBy("this") transient private DataSourceProcessor dataSourceProcessor; @GuardedBy("this") transient private IngestJob ingestJob; @GuardedBy("this") - transient private boolean canceled; + transient private boolean cancelled; @GuardedBy("this") transient private boolean completed; + @GuardedBy("this") + private Date completedDate; + @GuardedBy("this") + private boolean errorsOccurred; + @GuardedBy("this") + private ProcessingStatus processingStatus; + @GuardedBy("this") + private int numberOfCrashes; + + /** + * Constructs a new automated ingest job for a manifest. All job state not + * specified in the manifest is set to the default state for a new job. + * + * @param manifest The manifest. + */ + AutoIngestJob(Manifest manifest) { + this.version = CURRENT_VERSION; + this.manifest = manifest; + this.nodeName = AutoIngestJob.LOCAL_HOST_NAME; + this.caseDirectoryPath = ""; + this.priority = DEFAULT_PRIORITY; + this.stage = Stage.PENDING; + this.stageStartDate = manifest.getDateFileCreated(); + this.stageDetails = this.getStageDetails(); + this.dataSourceProcessor = null; + this.ingestJob = null; + this.cancelled = false; + this.completed = false; + this.completedDate = new Date(0); + this.errorsOccurred = false; + this.processingStatus = ProcessingStatus.PENDING; + this.numberOfCrashes = 0; + } /** * Constructs an automated ingest job for a manifest. The manifest specifies @@ -66,49 +107,34 @@ public final class AutoIngestJob implements Comparable, Serializa * Note: Manifest objects will be phased out and no longer be part of the * AutoIngestJob class. * - * @param nodeData The node data. - * @param caseDirectoryPath The path to the case directory for the job, may - * be null. - * @param nodeName If the job is in progress, the node doing the - * processing, otherwise the locla host. - * @param stage The processing stage for display purposes. + * @param nodeData The node data. */ - /* - * DLG: We need a contrucotr that takes just the node data. When we have - * added the case dierectory path, the host name and the stage data to the - * ZK nodes, we probably cna use that constructor only. I'm thinking this - * because we will creater node data with initial values when we first - * discover the nodes, and then we will continue to update it. - */ - AutoIngestJob(AutoIngestJobNodeData nodeData, Path caseDirectoryPath, String nodeName, Stage stage) { - this.nodeData = nodeData; - if (null != caseDirectoryPath) { - this.caseDirectoryPath = caseDirectoryPath.toString(); - } else { - this.caseDirectoryPath = ""; - } - this.nodeName = nodeName; - this.stage = stage; - this.stageStartDate = nodeData.getManifestFileDate(); + AutoIngestJob(AutoIngestJobNodeData nodeData) { + this.version = nodeData.getVersion(); + this.manifest = new Manifest(nodeData.getManifestFilePath(), nodeData.getManifestFileDate(), nodeData.getCaseName(), nodeData.getDeviceId(), nodeData.getDataSourcePath(), Collections.emptyMap()); + this.nodeName = nodeData.getProcessingHostName(); + this.caseDirectoryPath = nodeData.getCaseDirectoryPath().toString(); + this.priority = nodeData.getPriority(); + this.stage = nodeData.getProcessingStage(); + this.stageStartDate = nodeData.getProcessingStageStartDate(); + this.stageDetails = this.getStageDetails(); + this.dataSourceProcessor = null; + this.ingestJob = null; + this.cancelled = false; + this.completed = false; + this.completedDate = nodeData.getCompletedDate(); + this.errorsOccurred = nodeData.getErrorsOccurred(); + this.processingStatus = nodeData.getProcessingStatus(); + this.numberOfCrashes = nodeData.getNumberOfCrashes(); } /** - * Gets the auto ingest job node data. + * Gets the auto ingest job manifest. * - * @return The node data. + * @return The manifest. */ - AutoIngestJobNodeData getNodeData() { - return this.nodeData; - } - - /** - * Queries whether or not a case directory path has been set for this auto - * ingest job. - * - * @return True or false - */ - synchronized boolean hasCaseDirectoryPath() { - return (false == this.caseDirectoryPath.isEmpty()); + Manifest getManifest() { + return this.manifest; } /** @@ -135,30 +161,46 @@ public final class AutoIngestJob implements Comparable, Serializa } } - synchronized void setStage(Stage newStage) { - setStage(newStage, Date.from(Instant.now())); + /** + * Sets the priority of the job. A higher number indicates a higher + * priority. + * + * @param priority The priority. + */ + synchronized void setPriority(Integer priority) { + this.priority = priority; } - synchronized void setStage(Stage newState, Date stateStartedDate) { - if (Stage.CANCELING == this.stage && Stage.COMPLETED != newState) { + /** + * Gets the priority of the job. A higher number indicates a higher + * priority. + * + * @return The priority. + */ + synchronized Integer getPriority() { + return this.priority; + } + + synchronized void setStage(Stage newStage) { + if (Stage.CANCELLING == this.stage && Stage.COMPLETED != newStage) { return; } - this.stage = newState; - this.stageStartDate = stateStartedDate; + this.stage = newStage; + this.stageStartDate = Date.from(Instant.now()); } - synchronized Stage getStage() { + synchronized Stage getProcessingStage() { return this.stage; } - synchronized Date getStageStartDate() { - return this.stageStartDate; + synchronized Date getProcessingStageStartDate() { + return new Date(this.stageStartDate.getTime()); } synchronized StageDetails getStageDetails() { String description; Date startDate; - if (Stage.CANCELING != this.stage && null != this.ingestJob) { + if (Stage.CANCELLING != this.stage && null != this.ingestJob) { IngestJob.ProgressSnapshot progress = this.ingestJob.getSnapshot(); IngestJob.DataSourceIngestModuleHandle ingestModuleHandle = progress.runningDataSourceIngestModule(); if (null != ingestModuleHandle) { @@ -171,7 +213,7 @@ public final class AutoIngestJob implements Comparable, Serializa if (!ingestModuleHandle.isCancelled()) { description = ingestModuleHandle.displayName(); } else { - description = String.format(Stage.CANCELING_MODULE.getDisplayText(), ingestModuleHandle.displayName()); + description = String.format(Stage.CANCELLING_MODULE.getDisplayText(), ingestModuleHandle.displayName()); } } else { /** @@ -189,9 +231,14 @@ public final class AutoIngestJob implements Comparable, Serializa description = this.stage.getDisplayText(); startDate = this.stageStartDate; } - return new StageDetails(description, startDate); + this.stageDetails = new StageDetails(description, startDate); + return this.stageDetails; } + synchronized void setStageDetails(StageDetails stageDetails) { + this.stageDetails = stageDetails; + } + synchronized void setDataSourceProcessor(DataSourceProcessor dataSourceProcessor) { this.dataSourceProcessor = dataSourceProcessor; } @@ -205,9 +252,9 @@ public final class AutoIngestJob implements Comparable, Serializa } synchronized void cancel() { - setStage(Stage.CANCELING); - canceled = true; - nodeData.setErrorsOccurred(true); + setStage(Stage.CANCELLING); + cancelled = true; + errorsOccurred = true; if (null != dataSourceProcessor) { dataSourceProcessor.cancel(); } @@ -217,7 +264,7 @@ public final class AutoIngestJob implements Comparable, Serializa } synchronized boolean isCanceled() { - return canceled; + return cancelled; } synchronized void setCompleted() { @@ -229,10 +276,68 @@ public final class AutoIngestJob implements Comparable, Serializa return completed; } - String getNodeName() { + /** + * Sets the date the job was completed, with or without cancellation or + * errors. + * + * @param completedDate The completion date. + */ + synchronized void setCompletedDate(Date completedDate) { + this.completedDate = new Date(completedDate.getTime()); + } + + /** + * Gets the date the job was completed, with or without cancellation or + * errors. + * + * @return True or false. + */ + synchronized Date getCompletedDate() { + return new Date(completedDate.getTime()); + } + + /** + * Sets whether or not errors occurred during the processing of the job. + * + * @param errorsOccurred True or false; + */ + synchronized void setErrorsOccurred(boolean errorsOccurred) { + this.errorsOccurred = errorsOccurred; + } + + /** + * Queries whether or not errors occurred during the processing of the job. + * + * @return True or false. + */ + synchronized boolean getErrorsOccurred() { + return this.errorsOccurred; + } + + synchronized String getProcessingHostName() { return nodeName; } + int getVersion() { + return this.version; + } + + synchronized ProcessingStatus getProcessingStatus() { + return this.processingStatus; + } + + synchronized void setProcessingStatus(ProcessingStatus processingStatus) { + this.processingStatus = processingStatus; + } + + synchronized int getNumberOfCrashes() { + return this.numberOfCrashes; + } + + synchronized void setNumberOfCrashes(int numberOfCrashes) { + this.numberOfCrashes = numberOfCrashes; + } + @Override public boolean equals(Object obj) { if (!(obj instanceof AutoIngestJob)) { @@ -241,11 +346,7 @@ public final class AutoIngestJob implements Comparable, Serializa if (obj == this) { return true; } - - Path manifestPath1 = this.getNodeData().getManifestFilePath(); - Path manifestPath2 = ((AutoIngestJob) obj).getNodeData().getManifestFilePath(); - - return manifestPath1.equals(manifestPath2); + return this.getManifest().getFilePath().equals(((AutoIngestJob) obj).getManifest().getFilePath()); } @Override @@ -256,29 +357,20 @@ public final class AutoIngestJob implements Comparable, Serializa @Override public int compareTo(AutoIngestJob o) { - Date date1 = this.getNodeData().getManifestFileDate(); - Date date2 = o.getNodeData().getManifestFileDate(); - - return -date1.compareTo(date2); + return -this.getManifest().getDateFileCreated().compareTo(o.getManifest().getDateFileCreated()); } - // DLG: Add a toString override - @Override - public String toString() { - // DLG: FINISH ME! - return ""; - } - /** * Custom comparator that allows us to sort List on reverse * chronological date modified (descending) */ - static class ReverseDateCompletedComparator implements Comparator { + static class ReverseCompletedDateComparator implements Comparator { @Override public int compare(AutoIngestJob o1, AutoIngestJob o2) { - return -o1.getStageStartDate().compareTo(o2.getStageStartDate()); + return -o1.getCompletedDate().compareTo(o2.getCompletedDate()); } + } /** @@ -288,10 +380,7 @@ public final class AutoIngestJob implements Comparable, Serializa @Override public int compare(AutoIngestJob job, AutoIngestJob anotherJob) { - Integer priority1 = job.getNodeData().getPriority(); - Integer priority2 = anotherJob.getNodeData().getPriority(); - - return -priority1.compareTo(priority2); + return -(job.getPriority().compareTo(anotherJob.getPriority())); } } @@ -301,21 +390,29 @@ public final class AutoIngestJob implements Comparable, Serializa * alphabetically except for jobs for the current host, which are placed at * the top of the list. */ - static class AlphabeticalComparator implements Comparator { + static class CaseNameAndProcessingHostComparator implements Comparator { @Override public int compare(AutoIngestJob o1, AutoIngestJob o2) { - if (o1.getNodeName().equalsIgnoreCase(LOCAL_HOST_NAME)) { + if (o1.getProcessingHostName().equalsIgnoreCase(LOCAL_HOST_NAME)) { return -1; // o1 is for current case, float to top - } else if (o2.getNodeName().equalsIgnoreCase(LOCAL_HOST_NAME)) { + } else if (o2.getProcessingHostName().equalsIgnoreCase(LOCAL_HOST_NAME)) { return 1; // o2 is for current case, float to top } else { - String caseName1 = o1.getNodeData().getCaseName(); - String caseName2 = o2.getNodeData().getCaseName(); - - return caseName1.compareToIgnoreCase(caseName2); + return o1.getManifest().getCaseName().compareToIgnoreCase(o2.getManifest().getCaseName()); } } + + } + + /** + * Processing status for the auto ingest job for the manifest. + */ + enum ProcessingStatus { + PENDING, + PROCESSING, + COMPLETED, + DELETED } enum Stage { @@ -330,8 +427,8 @@ public final class AutoIngestJob implements Comparable, Serializa ANALYZING_DATA_SOURCE("Analyzing data source"), ANALYZING_FILES("Analyzing files"), EXPORTING_FILES("Exporting files"), - CANCELING_MODULE("Canceling module"), - CANCELING("Canceling"), + CANCELLING_MODULE("Cancelling module"), + CANCELLING("Cancelling"), COMPLETED("Completed"); private final String displayText; @@ -347,12 +444,13 @@ public final class AutoIngestJob implements Comparable, Serializa } @Immutable - static final class StageDetails { + static final class StageDetails implements Serializable { + private static final long serialVersionUID = 1L; private final String description; private final Date startDate; - private StageDetails(String description, Date startDate) { + StageDetails(String description, Date startDate) { this.description = description; this.startDate = startDate; } @@ -362,7 +460,7 @@ public final class AutoIngestJob implements Comparable, Serializa } Date getStartDate() { - return this.startDate; + return new Date(this.startDate.getTime()); } } diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobNodeData.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobNodeData.java index 25ea226a7f..df2e321af2 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobNodeData.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobNodeData.java @@ -18,7 +18,7 @@ */ package org.sleuthkit.autopsy.experimental.autoingest; -import java.io.Serializable; +import java.nio.BufferUnderflowException; import java.nio.ByteBuffer; import java.nio.file.Path; import java.nio.file.Paths; @@ -26,20 +26,19 @@ import java.util.Date; import javax.lang.model.type.TypeKind; /** - * A coordination service node data transfer object for an auto ingest job. + * An object that converts auto ingest job data for an auto ingest job + * coordination service node to and from byte arrays. */ -final class AutoIngestJobNodeData implements Serializable { +final class AutoIngestJobNodeData { - private static final long serialVersionUID = 1L; - private static final int NODE_DATA_VERSION = 1; - private static final int MAX_POSSIBLE_NODE_DATA_SIZE = 131493; + private static final int CURRENT_VERSION = 1; + private static final int MAX_POSSIBLE_NODE_DATA_SIZE = 131629; private static final int DEFAULT_PRIORITY = 0; /* * Version 0 fields. */ - private final boolean coordSvcNodeDataWasSet; - private ProcessingStatus status; + private int processingStatus; private int priority; private int numberOfCrashes; private long completedDate; @@ -49,228 +48,197 @@ final class AutoIngestJobNodeData implements Serializable { * Version 1 fields. */ private int version; - private String deviceId; - private String caseName; - private String caseDirectoryPath; - private long manifestFileDate; private String manifestFilePath; + private long manifestFileDate; + private String caseName; + private String deviceId; private String dataSourcePath; - private String processingStage; + private String caseDirectoryPath; + private String processingHostName; + private byte processingStage; private long processingStageStartDate; - private String processingHost; + private String processingStageDetailsDescription; + private long processingStageDetailsStartDate; - //DLG: Add caseDirectoryPath from AutoIngestJob - /* - * DLG: Rename class to AutoIngestJobNodeData - Add String - * caseDirectoryPath. Needed to locate case auto ingest log and later, for - * case deletion - * - * Add String processingStage, long processingStageStartDate, String - * processingHost fields. These three fields are needed to populate running - * jobs table; use of auto ingest job data is not enough, because there - * would be no data until a status event was received by the auto ingest - * monitor. - * - * Update the AutoIngestManager code that creates ZK nodes for auto ingest - * jobs to write the new fields described above to new nodes - * - * Update the AutoIngestManager code that publishes auto ingest status - * events for the current job to update the the processing status fields - * described above in addition to publishing AutoIngestJobStatusEvents. - * Probably also need to write this data initially when a jo becomes the - * current job. - */ /** - * Constructs a coordination service node data data transfer object for an - * auto ingest manifest from the raw bytes obtained from the coordination - * service. + * Uses an auto ingest job to construct an object that converts auto ingest + * job data for an auto ingest job coordination service node to and from + * byte arrays. + * + * @param job The job. + */ + AutoIngestJobNodeData(AutoIngestJob job) { + setProcessingStatus(job.getProcessingStatus()); + setPriority(job.getPriority()); + setNumberOfCrashes(numberOfCrashes); // RJCTODO + setCompletedDate(job.getCompletedDate()); + setErrorsOccurred(job.getErrorsOccurred()); + this.version = CURRENT_VERSION; + Manifest manifest = job.getManifest(); + setManifestFilePath(manifest.getFilePath()); + setManifestFileDate(manifest.getDateFileCreated()); + setCaseName(manifest.getCaseName()); + setDeviceId(manifest.getDeviceId()); + setDataSourcePath(manifest.getDataSourcePath()); + setCaseDirectoryPath(job.getCaseDirectoryPath()); + setProcessingHostName(job.getProcessingHostName()); + setProcessingStage(job.getProcessingStage()); + setProcessingStageStartDate(job.getProcessingStageStartDate()); + setProcessingStageDetails(job.getStageDetails()); + } + + /** + * Uses a coordination service node data to construct an object that + * converts auto ingest job data for an auto ingest job coordination service + * node to and from byte arrays. * * @param nodeData The raw bytes received from the coordination service. */ - AutoIngestJobNodeData(byte[] nodeData) throws AutoIngestJobNodeDataException { + AutoIngestJobNodeData(byte[] nodeData) throws InvalidDataException { + if (null == nodeData || nodeData.length == 0) { + throw new InvalidDataException(null == nodeData ? "Null nodeData byte array" : "Zero-length nodeData byte array"); + } + + /* + * Set default values for all fields. + */ + this.processingStatus = AutoIngestJob.ProcessingStatus.PENDING.ordinal(); + this.priority = DEFAULT_PRIORITY; + this.numberOfCrashes = 0; + this.completedDate = 0L; + this.errorsOccurred = false; + this.version = CURRENT_VERSION; + this.manifestFilePath = ""; + this.manifestFileDate = 0L; + this.caseName = ""; + this.deviceId = ""; + this.dataSourcePath = ""; + this.caseDirectoryPath = ""; + this.processingHostName = ""; + this.processingStage = (byte) AutoIngestJob.Stage.PENDING.ordinal(); + this.processingStageStartDate = 0L; + this.processingStageDetailsDescription = ""; + this.processingStageDetailsStartDate = 0L; + + /* + * Get fields from node data. + */ ByteBuffer buffer = ByteBuffer.wrap(nodeData); - this.coordSvcNodeDataWasSet = buffer.hasRemaining(); - if (this.coordSvcNodeDataWasSet) { - int rawStatus = buffer.getInt(); - if (ProcessingStatus.PENDING.ordinal() == rawStatus) { - this.status = ProcessingStatus.PENDING; - } else if (ProcessingStatus.PROCESSING.ordinal() == rawStatus) { - this.status = ProcessingStatus.PROCESSING; - } else if (ProcessingStatus.COMPLETED.ordinal() == rawStatus) { - this.status = ProcessingStatus.COMPLETED; - } else if (ProcessingStatus.DELETED.ordinal() == rawStatus) { - this.status = ProcessingStatus.DELETED; + try { + if (buffer.hasRemaining()) { + /* + * Get version 0 fields. + */ + this.processingStatus = buffer.getInt(); + this.priority = buffer.getInt(); + this.numberOfCrashes = buffer.getInt(); + this.completedDate = buffer.getLong(); + int errorFlag = buffer.getInt(); + this.errorsOccurred = (1 == errorFlag); } - this.priority = buffer.getInt(); - this.numberOfCrashes = buffer.getInt(); - this.completedDate = buffer.getLong(); - int errorFlag = buffer.getInt(); - this.errorsOccurred = (1 == errorFlag); - } else { - this.status = ProcessingStatus.PENDING; - this.priority = DEFAULT_PRIORITY; - this.numberOfCrashes = 0; - this.completedDate = 0L; - this.errorsOccurred = false; - } - if (buffer.hasRemaining()) { - /* - * There are more than 24 bytes in the buffer, so we assume the - * version is greater than '0'. - */ - this.version = buffer.getInt(); - if (this.version > NODE_DATA_VERSION) { - throw new AutoIngestJobNodeDataException(String.format("Node data version %d is not suppored.", this.version)); + if (buffer.hasRemaining()) { + /* + * Get version 1 fields. + */ + this.version = buffer.getInt(); + this.deviceId = getStringFromBuffer(buffer, TypeKind.BYTE); + this.caseName = getStringFromBuffer(buffer, TypeKind.BYTE); + this.caseDirectoryPath = getStringFromBuffer(buffer, TypeKind.SHORT); + this.manifestFileDate = buffer.getLong(); + this.manifestFilePath = getStringFromBuffer(buffer, TypeKind.SHORT); + this.dataSourcePath = getStringFromBuffer(buffer, TypeKind.SHORT); + this.processingStage = buffer.get(); + this.processingStageStartDate = buffer.getLong(); + this.processingStageDetailsDescription = getStringFromBuffer(buffer, TypeKind.BYTE); + this.processingStageDetailsStartDate = buffer.getLong();; + this.processingHostName = getStringFromBuffer(buffer, TypeKind.SHORT); } - this.deviceId = getStringFromBuffer(buffer, TypeKind.BYTE); - this.caseName = getStringFromBuffer(buffer, TypeKind.BYTE); - //DLG: this.caseDirectoryPath = getStringFromBuffer(buffer, TypeKind.SHORT); - this.manifestFileDate = buffer.getLong(); - this.manifestFilePath = getStringFromBuffer(buffer, TypeKind.SHORT); - this.dataSourcePath = getStringFromBuffer(buffer, TypeKind.SHORT); - //DLG: this.processingStage = getStringFromBuffer(buffer, TypeKind.BYTE); - //DLG: this.processingStageStartDate = buffer.getLong(); - //DLG: this.processingHost = getStringFromBuffer(buffer, TypeKind.SHORT); - } else { - this.version = 0; - this.deviceId = ""; - this.caseName = ""; - this.caseDirectoryPath = ""; - this.manifestFileDate = 0L; - this.manifestFilePath = ""; - this.dataSourcePath = ""; - this.processingStage = ""; - this.processingStageStartDate = 0L; - this.processingHost = ""; + + } catch (BufferUnderflowException ex) { + throw new InvalidDataException("Node data is incomplete", ex); } } /** - * Constructs a coordination service node data data transfer object for an - * auto ingest manifest from values provided by the auto ingest system. + * Gets the processing status of the job. * - * @param manifest The manifest - * @param status The processing status of the manifest. - * @param priority The priority of the manifest. - * @param numberOfCrashes The number of times auto ingest jobs for the - * manifest have crashed during processing. - * @param completedDate The date the auto ingest job for the manifest was - * completed. - * @param errorsOccurred Boolean to determine if errors have occurred. + * @return The processing status. */ - AutoIngestJobNodeData(Manifest manifest, ProcessingStatus status, int priority, int numberOfCrashes, Date completedDate, boolean errorOccurred) { - this.coordSvcNodeDataWasSet = false; - this.status = status; - this.priority = priority; - this.numberOfCrashes = numberOfCrashes; - this.completedDate = completedDate.getTime(); - this.errorsOccurred = errorOccurred; - - this.version = NODE_DATA_VERSION; - this.deviceId = manifest.getDeviceId(); - this.caseName = manifest.getCaseName(); - this.manifestFileDate = manifest.getDateFileCreated().getTime(); - this.manifestFilePath = manifest.getFilePath().toString(); - this.dataSourcePath = manifest.getDataSourcePath().toString(); + AutoIngestJob.ProcessingStatus getProcessingStatus() { + return AutoIngestJob.ProcessingStatus.values()[this.processingStatus]; } /** - * Indicates whether or not the coordination service node data was set, - * i.e., this object was constructed from raw bytes from the ccordination - * service node for the manifest. + * Sets the processing status of the job. * - * @return True or false. + * @param processingSatus The processing status. */ - boolean coordSvcNodeDataWasSet() { - return this.coordSvcNodeDataWasSet; + void setProcessingStatus(AutoIngestJob.ProcessingStatus processingStatus) { + this.processingStatus = processingStatus.ordinal(); } /** - * Gets the processing status of the manifest + * Gets the priority of the job. * - * @return The processing status of the manifest. - */ - ProcessingStatus getStatus() { - return this.status; - } - - /** - * Sets the processing status of the manifest - * - * @param status The processing status of the manifest. - */ - void setStatus(ProcessingStatus status) { - this.status = status; - } - - /** - * Gets the priority of the manifest. - * - * @return The priority of the manifest. + * @return The priority. */ int getPriority() { return this.priority; } /** - * Sets the priority of the manifest. A higher number indicates a higheer + * Sets the priority of the job. A higher number indicates a higheer * priority. * - * @param priority The priority of the manifest. + * @param priority The priority. */ void setPriority(int priority) { this.priority = priority; } /** - * Gets the number of times auto ingest jobs for the manifest have crashed - * during processing. + * Gets the number of times the job has crashed during processing. * - * @return The number of times auto ingest jobs for the manifest have - * crashed during processing. + * @return The number of crashes. */ int getNumberOfCrashes() { return this.numberOfCrashes; } /** - * Sets the number of times auto ingest jobs for the manifest have crashed - * during processing. + * Sets the number of times the job has crashed during processing. * - * @param numberOfCrashes The number of times auto ingest jobs for the - * manifest have crashed during processing. + * @param numberOfCrashes The number of crashes. */ void setNumberOfCrashes(int numberOfCrashes) { this.numberOfCrashes = numberOfCrashes; } /** - * Gets the date the auto ingest job for the manifest was completed. + * Gets the date the job was completed. A completion date equal to the epoch + * (January 1, 1970, 00:00:00 GMT), i.e., Date.getTime() returns 0L, + * indicates the job has not been completed. * - * @return The date the auto ingest job for the manifest was completed. The - * epoch (January 1, 1970, 00:00:00 GMT) indicates the date is not - * set, i.e., Date.getTime() returns 0L. + * @return The job completion date. */ Date getCompletedDate() { return new Date(this.completedDate); } /** - * Sets the date the auto ingest job for the manifest was completed. + * Sets the date the job was completed. A completion date equal to the epoch + * (January 1, 1970, 00:00:00 GMT), i.e., Date.getTime() returns 0L, + * indicates the job has not been completed. * - * @param completedDate The date the auto ingest job for the manifest was - * completed. Use the epoch (January 1, 1970, 00:00:00 - * GMT) to indicate the date is not set, i.e., new - * Date(0L). + * @param completedDate The job completion date. */ void setCompletedDate(Date completedDate) { this.completedDate = completedDate.getTime(); } /** - * Queries whether or not any errors occurred during the processing of the - * auto ingest job for the manifest. + * Gets whether or not any errors occurred during the processing of the job. * * @return True or false. */ @@ -279,8 +247,7 @@ final class AutoIngestJobNodeData implements Serializable { } /** - * Sets whether or not any errors occurred during the processing of the auto - * ingest job for the manifest. + * Sets whether or not any errors occurred during the processing of job. * * @param errorsOccurred True or false. */ @@ -289,25 +256,17 @@ final class AutoIngestJobNodeData implements Serializable { } /** - * Get the node data version. + * Gets the node data version number. * - * @return The node data version. + * @return The version number. */ int getVersion() { return this.version; } /** - * Set the node data version. - * - * @param version The node data version. - */ - void setVersion(int version) { - this.version = version; - } - - /** - * Get the device ID. + * Gets the device ID of the device associated with the data source for the + * job. * * @return The device ID. */ @@ -316,7 +275,8 @@ final class AutoIngestJobNodeData implements Serializable { } /** - * Set the device ID. + * Sets the device ID of the device associated with the data source for the + * job. * * @param deviceId The device ID. */ @@ -325,7 +285,7 @@ final class AutoIngestJobNodeData implements Serializable { } /** - * Get the case name. + * Gets the case name. * * @return The case name. */ @@ -334,7 +294,7 @@ final class AutoIngestJobNodeData implements Serializable { } /** - * Set the case name. + * Sets the case name. * * @param caseName The case name. */ @@ -342,12 +302,37 @@ final class AutoIngestJobNodeData implements Serializable { this.caseName = caseName; } + /** + * Sets the path to the case directory of the case associated with the job. + * + * @param caseDirectoryPath The path to the case directory. + */ + synchronized void setCaseDirectoryPath(Path caseDirectoryPath) { + if (caseDirectoryPath == null) { + this.caseDirectoryPath = ""; + } else { + this.caseDirectoryPath = caseDirectoryPath.toString(); + } + } + + /** + * Gets the path to the case directory of the case associated with the job. + * + * @return The case directory path or null if the case directory has not + * been created yet. + */ + synchronized Path getCaseDirectoryPath() { + if (!caseDirectoryPath.isEmpty()) { + return Paths.get(caseDirectoryPath); + } else { + return null; + } + } + /** * Gets the date the manifest was created. * - * @return The date the manifest was created. The epoch (January 1, 1970, - * 00:00:00 GMT) indicates the date is not set, i.e., Date.getTime() - * returns 0L. + * @return The date the manifest was created. */ Date getManifestFileDate() { return new Date(this.manifestFileDate); @@ -356,16 +341,14 @@ final class AutoIngestJobNodeData implements Serializable { /** * Sets the date the manifest was created. * - * @param manifestFileDate The date the manifest was created. Use the epoch - * (January 1, 1970, 00:00:00 GMT) to indicate the - * date is not set, i.e., new Date(0L). + * @param manifestFileDate The date the manifest was created. */ void setManifestFileDate(Date manifestFileDate) { this.manifestFileDate = manifestFileDate.getTime(); } /** - * Get the manifest file path. + * Gets the manifest file path. * * @return The manifest file path. */ @@ -374,7 +357,7 @@ final class AutoIngestJobNodeData implements Serializable { } /** - * Set the manifest file path. + * Sets the manifest file path. * * @param manifestFilePath The manifest file path. */ @@ -387,7 +370,7 @@ final class AutoIngestJobNodeData implements Serializable { } /** - * Get the data source path. + * Gets the path of the data source for the job. * * @return The data source path. */ @@ -396,7 +379,7 @@ final class AutoIngestJobNodeData implements Serializable { } /** - * Get the file name portion of the data source path. + * Get the file name portion of the path of the data source for the job. * * @return The data source file name. */ @@ -405,7 +388,7 @@ final class AutoIngestJobNodeData implements Serializable { } /** - * Set the data source path. + * Sets the path of the data source for the job. * * @param dataSourcePath The data source path. */ @@ -418,16 +401,91 @@ final class AutoIngestJobNodeData implements Serializable { } /** - * Gets the node data as raw bytes that can be sent to the coordination + * Get the processing stage of the job. + * + * @return The processing stage. + */ + AutoIngestJob.Stage getProcessingStage() { + return AutoIngestJob.Stage.values()[this.processingStage]; + } + + /** + * Sets the processing stage job. + * + * @param processingStage The processing stage. + */ + void setProcessingStage(AutoIngestJob.Stage processingStage) { + this.processingStage = (byte) processingStage.ordinal(); + } + + /** + * Gets the processing stage start date. + * + * @return The processing stage start date. + */ + Date getProcessingStageStartDate() { + return new Date(this.processingStageStartDate); + } + + /** + * Sets the processing stage start date. + * + * @param processingStageStartDate The processing stage start date. + */ + void setProcessingStageStartDate(Date processingStageStartDate) { + this.processingStageStartDate = processingStageStartDate.getTime(); + } + + /** + * Get the processing stage details. + * + * @return A processing stage details object. + */ + AutoIngestJob.StageDetails getProcessingStageDetails() { + return new AutoIngestJob.StageDetails(this.processingStageDetailsDescription, new Date(this.processingStageDetailsStartDate)); + } + + /** + * Sets the details of the current processing stage. + * + * @param stageDetails A stage details object. + */ + void setProcessingStageDetails(AutoIngestJob.StageDetails stageDetails) { + this.processingStageDetailsDescription = stageDetails.getDescription(); + this.processingStageDetailsStartDate = stageDetails.getStartDate().getTime(); + } + + /** + * Gets the processing host name, may be the empty string. + * + * @return The processing host. The empty string if the job is not currently + * being processed. + */ + String getProcessingHostName() { + return this.processingHostName; + } + + /** + * Sets the processing host name. May be the empty string. + * + * @param processingHost The processing host name. The empty string if the + * job is not currently being processed. + */ + void setProcessingHostName(String processingHost) { + this.processingHostName = processingHost; + } + + /** + * Gets the node data as a byte array that can be sent to the coordination * service. * - * @return The manifest node data as a byte array. + * @return The node data as a byte array. */ byte[] toArray() { ByteBuffer buffer = ByteBuffer.allocate(MAX_POSSIBLE_NODE_DATA_SIZE); // Write data (compatible with version 0) - buffer.putInt(this.status.ordinal()); + buffer.putInt(this.processingStatus); buffer.putInt(this.priority); buffer.putInt(this.numberOfCrashes); buffer.putLong(this.completedDate); @@ -440,13 +498,15 @@ final class AutoIngestJobNodeData implements Serializable { // Write data putStringIntoBuffer(deviceId, buffer, TypeKind.BYTE); putStringIntoBuffer(caseName, buffer, TypeKind.BYTE); - //DLG: putStringIntoBuffer(caseDirectoryPath, buffer, TypeKind.SHORT); + putStringIntoBuffer(caseDirectoryPath, buffer, TypeKind.SHORT); buffer.putLong(this.manifestFileDate); putStringIntoBuffer(manifestFilePath, buffer, TypeKind.SHORT); putStringIntoBuffer(dataSourcePath, buffer, TypeKind.SHORT); - //DLG: putStringIntoBuffer(processingStage, buffer, TypeKind.BYTE); - //DLG: buffer.putLong(this.processingStageStartDate); - //DLG: putStringIntoBuffer(processingHost, buffer, TypeKind.SHORT); + buffer.put(this.processingStage); + buffer.putLong(this.processingStageStartDate); + putStringIntoBuffer(this.processingStageDetailsDescription, buffer, TypeKind.BYTE); + buffer.putLong(this.processingStageDetailsStartDate); + putStringIntoBuffer(processingHostName, buffer, TypeKind.SHORT); } // Prepare the array @@ -457,6 +517,7 @@ final class AutoIngestJobNodeData implements Serializable { return array; } + // DGL: Document what is going on here and how the max buffer sie constant is calculated. private String getStringFromBuffer(ByteBuffer buffer, TypeKind lengthType) { int length = 0; String output = ""; @@ -479,6 +540,7 @@ final class AutoIngestJobNodeData implements Serializable { return output; } + // DGL: Document what is going on here and how the max buffer sie constant is calculated. private void putStringIntoBuffer(String stringValue, ByteBuffer buffer, TypeKind lengthType) { switch (lengthType) { case BYTE: @@ -492,14 +554,17 @@ final class AutoIngestJobNodeData implements Serializable { buffer.put(stringValue.getBytes()); } - /** - * Processing status for the auto ingest job for the manifest. - */ - enum ProcessingStatus { - PENDING, - PROCESSING, - COMPLETED, - DELETED + final static class InvalidDataException extends Exception { + + private static final long serialVersionUID = 1L; + + private InvalidDataException(String message) { + super(message); + } + + private InvalidDataException(String message, Throwable cause) { + super(message, cause); + } } } diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobNodeDataException.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobNodeDataException.java deleted file mode 100755 index 6618062e50..0000000000 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobNodeDataException.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2017 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.experimental.autoingest; - -/** - * Exception thrown when a manifest node contains incompatible data. - */ -public class AutoIngestJobNodeDataException extends Exception { - - /** - * Constructs an exception thrown when a manifest node contains incompatible - * data. - * - * @param message An error message. - */ - public AutoIngestJobNodeDataException(String message) { - super(message); - } - - /** - * Constructs an exception thrown when a manifest node contains incompatible - * data. - * - * @param message An error message. - * @param cause An exception that caused this exception to be thrown. - */ - public AutoIngestJobNodeDataException(String message, Throwable cause) { - super(message, cause); - } -} diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java index d5632bbfa7..c0944e9aaa 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java @@ -84,11 +84,10 @@ import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestAlertFile.AutoIng import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJobLogger.AutoIngestJobLoggerException; import org.sleuthkit.autopsy.experimental.autoingest.FileExporter.FileExportException; import org.sleuthkit.autopsy.experimental.autoingest.ManifestFileParser.ManifestFileParserException; -import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJobNodeData.ProcessingStatus; -import static org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJobNodeData.ProcessingStatus.COMPLETED; -import static org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJobNodeData.ProcessingStatus.DELETED; -import static org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJobNodeData.ProcessingStatus.PENDING; -import static org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJobNodeData.ProcessingStatus.PROCESSING; +import static org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJob.ProcessingStatus.COMPLETED; +import static org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJob.ProcessingStatus.DELETED; +import static org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJob.ProcessingStatus.PENDING; +import static org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJob.ProcessingStatus.PROCESSING; import org.sleuthkit.autopsy.experimental.configuration.AutoIngestUserPreferences; import org.sleuthkit.autopsy.experimental.configuration.SharedConfiguration; import org.sleuthkit.autopsy.experimental.configuration.SharedConfiguration.SharedConfigurationException; @@ -205,22 +204,22 @@ public final class AutoIngestManager extends Observable implements PropertyChang /** * Starts up auto ingest. * - * @throws AutoIngestManagerStartupException if there is a problem starting - * auto ingest. + * @throws AutoIngestManagerException if there is a problem starting auto + * ingest. */ - void startUp() throws AutoIngestManagerStartupException { + void startUp() throws AutoIngestManagerException { SYS_LOGGER.log(Level.INFO, "Auto ingest starting"); try { coordinationService = CoordinationService.getInstance(); } catch (CoordinationServiceException ex) { - throw new AutoIngestManagerStartupException("Failed to get coordination service", ex); + throw new AutoIngestManagerException("Failed to get coordination service", ex); } try { eventPublisher.openRemoteEventChannel(EVENT_CHANNEL_NAME); SYS_LOGGER.log(Level.INFO, "Opened auto ingest event channel"); } catch (AutopsyEventException ex) { SYS_LOGGER.log(Level.SEVERE, "Failed to open auto ingest event channel", ex); - throw new AutoIngestManagerStartupException("Failed to open auto ingest event channel", ex); + throw new AutoIngestManagerException("Failed to open auto ingest event channel", ex); } rootInputDirectory = Paths.get(AutoIngestUserPreferences.getAutoModeImageFolder()); rootOutputDirectory = Paths.get(AutoIngestUserPreferences.getAutoModeResultsFolder()); @@ -286,19 +285,19 @@ public final class AutoIngestManager extends Observable implements PropertyChang * @param event A job started from another auto ingest node. */ private void handleRemoteJobStartedEvent(AutoIngestJobStartedEvent event) { - String hostName = event.getJob().getNodeName(); + String hostName = event.getJob().getProcessingHostName(); hostNamesToLastMsgTime.put(hostName, Instant.now()); synchronized (jobsLock) { - Path manifestFilePath = event.getJob().getNodeData().getManifestFilePath(); + Path manifestFilePath = event.getJob().getManifest().getFilePath(); for (Iterator iterator = pendingJobs.iterator(); iterator.hasNext();) { AutoIngestJob pendingJob = iterator.next(); - if (pendingJob.getNodeData().getManifestFilePath().equals(manifestFilePath)) { + if (pendingJob.getManifest().getFilePath().equals(manifestFilePath)) { iterator.remove(); break; } } } - hostNamesToRunningJobs.put(event.getJob().getNodeName(), event.getJob()); + hostNamesToRunningJobs.put(hostName, event.getJob()); setChanged(); notifyObservers(Event.JOB_STARTED); } @@ -313,9 +312,17 @@ public final class AutoIngestManager extends Observable implements PropertyChang * @param event An job status event from another auto ingest node. */ private void handleRemoteJobStatusEvent(AutoIngestJobStatusEvent event) { - String hostName = event.getJob().getNodeName(); + AutoIngestJob job = event.getJob(); + for (Iterator iterator = pendingJobs.iterator(); iterator.hasNext();) { + AutoIngestJob pendingJob = iterator.next(); + if (job.equals(pendingJob)) { + iterator.remove(); + break; + } + } + String hostName = job.getProcessingHostName(); hostNamesToLastMsgTime.put(hostName, Instant.now()); - hostNamesToRunningJobs.put(hostName, event.getJob()); + hostNamesToRunningJobs.put(hostName, job); setChanged(); notifyObservers(Event.JOB_STATUS_UPDATED); } @@ -331,7 +338,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang * @param event An job completed event from another auto ingest node. */ private void handleRemoteJobCompletedEvent(AutoIngestJobCompletedEvent event) { - String hostName = event.getJob().getNodeName(); + String hostName = event.getJob().getProcessingHostName(); hostNamesToLastMsgTime.put(hostName, Instant.now()); hostNamesToRunningJobs.remove(hostName); if (event.shouldRetry() == false) { @@ -339,7 +346,6 @@ public final class AutoIngestManager extends Observable implements PropertyChang completedJobs.add(event.getJob()); } } - //scanInputDirsNow(); setChanged(); notifyObservers(Event.JOB_COMPLETED); } @@ -462,7 +468,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang } for (AutoIngestJob job : hostNamesToRunningJobs.values()) { runningJobs.add(job); - runningJobs.sort(new AutoIngestJob.AlphabeticalComparator()); + runningJobs.sort(new AutoIngestJob.CaseNameAndProcessingHostComparator()); } } if (null != completedJobs) { @@ -516,12 +522,17 @@ public final class AutoIngestManager extends Observable implements PropertyChang jobProcessingTask.requestResume(); } + /** + */ /** * Bumps the priority of all pending ingest jobs for a specified case. * * @param caseName The name of the case to be prioritized. + * + * @throws AutoIngestManagerException If there is an error bumping the + * priority of the jobs for the case. */ - void prioritizeCase(final String caseName) { + void prioritizeCase(final String caseName) throws AutoIngestManagerException { if (state != State.RUNNING) { return; @@ -531,29 +542,22 @@ public final class AutoIngestManager extends Observable implements PropertyChang int maxPriority = 0; synchronized (jobsLock) { for (AutoIngestJob job : pendingJobs) { - if (job.getNodeData().getPriority() > maxPriority) { - maxPriority = job.getNodeData().getPriority(); + if (job.getPriority() > maxPriority) { + maxPriority = job.getPriority(); } - if (job.getNodeData().getCaseName().equals(caseName)) { + if (job.getManifest().getCaseName().equals(caseName)) { prioritizedJobs.add(job); } } if (!prioritizedJobs.isEmpty()) { ++maxPriority; for (AutoIngestJob job : prioritizedJobs) { - String manifestNodePath = job.getNodeData().getManifestFilePath().toString(); try { - AutoIngestJobNodeData nodeData = new AutoIngestJobNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestNodePath)); - nodeData.setPriority(maxPriority); - coordinationService.setNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestNodePath, nodeData.toArray()); - } catch (AutoIngestJobNodeDataException ex) { - SYS_LOGGER.log(Level.WARNING, String.format("Unable to use node data for %s", manifestNodePath), ex); - } catch (CoordinationServiceException ex) { - SYS_LOGGER.log(Level.SEVERE, String.format("Coordination service error while prioritizing %s", manifestNodePath), ex); - } catch (InterruptedException ex) { - SYS_LOGGER.log(Level.SEVERE, "Unexpected interrupt while updating coordination service node data for {0}", manifestNodePath); + this.updateCoordinationServiceNode(job); + job.setPriority(maxPriority); + } catch (CoordinationServiceException | InterruptedException ex) { + throw new AutoIngestManagerException("Error updating case priority", ex); } - job.getNodeData().setPriority(maxPriority); } } @@ -571,8 +575,11 @@ public final class AutoIngestManager extends Observable implements PropertyChang * Bumps the priority of an auto ingest job. * * @param manifestPath The manifest file path for the job to be prioritized. + * + * @throws AutoIngestManagerException If there is an error bumping the + * priority of the job. */ - void prioritizeJob(Path manifestPath) { + void prioritizeJob(Path manifestPath) throws AutoIngestManagerException { if (state != State.RUNNING) { return; } @@ -580,36 +587,38 @@ public final class AutoIngestManager extends Observable implements PropertyChang int maxPriority = 0; AutoIngestJob prioritizedJob = null; synchronized (jobsLock) { + /* + * Find the job in the pending jobs list and record the highest + * existing priority. + */ for (AutoIngestJob job : pendingJobs) { - if (job.getNodeData().getPriority() > maxPriority) { - maxPriority = job.getNodeData().getPriority(); + if (job.getPriority() > maxPriority) { + maxPriority = job.getPriority(); } - if (job.getNodeData().getManifestFilePath().equals(manifestPath)) { + if (job.getManifest().getFilePath().equals(manifestPath)) { prioritizedJob = job; } } + + /* + * Bump the priority by one and update the coordination service node + * data for the job. + */ if (null != prioritizedJob) { ++maxPriority; - String manifestNodePath = prioritizedJob.getNodeData().getManifestFilePath().toString(); try { - AutoIngestJobNodeData nodeData = new AutoIngestJobNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestNodePath)); - nodeData.setPriority(maxPriority); - coordinationService.setNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestNodePath, nodeData.toArray()); - } catch (AutoIngestJobNodeDataException ex) { - SYS_LOGGER.log(Level.WARNING, String.format("Unable to use node data for %s", manifestPath), ex); - } catch (CoordinationServiceException ex) { - SYS_LOGGER.log(Level.SEVERE, String.format("Coordination service error while prioritizing %s", manifestNodePath), ex); - } catch (InterruptedException ex) { - SYS_LOGGER.log(Level.SEVERE, "Unexpected interrupt while updating coordination service node data for {0}", manifestNodePath); + this.updateCoordinationServiceNode(prioritizedJob); + } catch (CoordinationServiceException | InterruptedException ex) { + throw new AutoIngestManagerException("Error updating job priority", ex); } - prioritizedJob.getNodeData().setPriority(maxPriority); + prioritizedJob.setPriority(maxPriority); } Collections.sort(pendingJobs, new AutoIngestJob.PriorityComparator()); } if (null != prioritizedJob) { - final String caseName = prioritizedJob.getNodeData().getCaseName(); + final String caseName = prioritizedJob.getManifest().getCaseName(); new Thread(() -> { eventPublisher.publishRemotely(new AutoIngestCasePrioritizedEvent(LOCAL_HOST_NAME, caseName)); }).start(); @@ -625,25 +634,29 @@ public final class AutoIngestManager extends Observable implements PropertyChang void reprocessJob(Path manifestPath) { AutoIngestJob completedJob = null; synchronized (jobsLock) { + /* + * Find the job in the completed jobs list. + */ for (Iterator iterator = completedJobs.iterator(); iterator.hasNext();) { AutoIngestJob job = iterator.next(); - if (job.getNodeData().getManifestFilePath().equals(manifestPath)) { + if (job.getManifest().getFilePath().equals(manifestPath)) { completedJob = job; iterator.remove(); break; } } + /* + * Add the job to the pending jobs queue and update the coordinatino + * service node data for the job. + */ if (null != completedJob && null != completedJob.getCaseDirectoryPath()) { try { - AutoIngestJobNodeData nodeData = completedJob.getNodeData(); - nodeData.setStatus(PENDING); - nodeData.setPriority(DEFAULT_JOB_PRIORITY); - nodeData.setNumberOfCrashes(0); - nodeData.setCompletedDate(new Date(0)); - nodeData.setErrorsOccurred(true); - coordinationService.setNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestPath.toString(), nodeData.toArray()); - pendingJobs.add(new AutoIngestJob(nodeData, completedJob.getCaseDirectoryPath(), LOCAL_HOST_NAME, AutoIngestJob.Stage.PENDING)); + completedJob.setErrorsOccurred(false); + completedJob.setCompletedDate(new Date(0)); + completedJob.setPriority(DEFAULT_JOB_PRIORITY); + updateCoordinationServiceNode(completedJob); + pendingJobs.add(completedJob); } catch (CoordinationServiceException ex) { SYS_LOGGER.log(Level.SEVERE, String.format("Coordination service error while reprocessing %s", manifestPath), ex); completedJobs.add(completedJob); @@ -734,10 +747,12 @@ public final class AutoIngestManager extends Observable implements PropertyChang for (Path manifestPath : manifestPaths) { try { AutoIngestJobNodeData nodeData = new AutoIngestJobNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestPath.toString())); - nodeData.setStatus(AutoIngestJobNodeData.ProcessingStatus.DELETED); - coordinationService.setNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestPath.toString(), nodeData.toArray()); - } catch (AutoIngestJobNodeDataException ex) { - SYS_LOGGER.log(Level.WARNING, String.format("Unable to use node data for %s", manifestPath), ex); + AutoIngestJob deletedJob = new AutoIngestJob(nodeData); + deletedJob.setProcessingStatus(AutoIngestJob.ProcessingStatus.DELETED); + this.updateCoordinationServiceNode(deletedJob); + } catch (AutoIngestJobNodeData.InvalidDataException ex) { + SYS_LOGGER.log(Level.WARNING, String.format("Invalid auto ingest job node data for %s", manifestPath), ex); + return CaseDeletionResult.PARTIALLY_DELETED; } catch (InterruptedException | CoordinationServiceException ex) { SYS_LOGGER.log(Level.SEVERE, String.format("Error attempting to set delete flag on manifest data for %s for case %s at %s", manifestPath, caseName, caseDirectoryPath), ex); return CaseDeletionResult.PARTIALLY_DELETED; @@ -794,7 +809,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang private void removeJobs(Set manifestPaths, List jobs) { for (Iterator iterator = jobs.iterator(); iterator.hasNext();) { AutoIngestJob job = iterator.next(); - Path manifestPath = job.getNodeData().getManifestFilePath(); + Path manifestPath = job.getManifest().getFilePath(); if (manifestPaths.contains(manifestPath)) { iterator.remove(); } @@ -815,7 +830,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang synchronized (jobsLock) { if (null != currentJob) { currentJob.cancel(); - SYS_LOGGER.log(Level.INFO, "Cancelling automated ingest for manifest {0}", currentJob.getNodeData().getManifestFilePath()); + SYS_LOGGER.log(Level.INFO, "Cancelling automated ingest for manifest {0}", currentJob.getManifest().getFilePath()); } } } @@ -834,15 +849,31 @@ public final class AutoIngestManager extends Observable implements PropertyChang if (null != ingestJob) { IngestJob.DataSourceIngestModuleHandle moduleHandle = ingestJob.getSnapshot().runningDataSourceIngestModule(); if (null != moduleHandle) { - currentJob.setStage(AutoIngestJob.Stage.CANCELING_MODULE); + currentJob.setStage(AutoIngestJob.Stage.CANCELLING_MODULE); moduleHandle.cancel(); - SYS_LOGGER.log(Level.INFO, "Cancelling {0} module for manifest {1}", new Object[]{moduleHandle.displayName(), currentJob.getNodeData().getManifestFilePath()}); + SYS_LOGGER.log(Level.INFO, "Cancelling {0} module for manifest {1}", new Object[]{moduleHandle.displayName(), currentJob.getManifest().getFilePath()}); } } } } } + /** + * Sets the coordination service node data for an auto ingest job. + * + * Note that a new auto ingest node data object will be created from the job + * passed in. Thus, if the data version of the node has changed, the node + * will be "upgraded" as well as updated. + * + * @param job The auto ingest job. + */ + void updateCoordinationServiceNode(AutoIngestJob job) throws CoordinationServiceException, InterruptedException { + AutoIngestJobNodeData nodeData = new AutoIngestJobNodeData(job); + String manifestNodePath = job.getManifest().getFilePath().toString(); + byte[] rawData = nodeData.toArray(); + coordinationService.setNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestNodePath, rawData); + } + /** * A task that submits an input directory scan task to the input directory * scan task executor. @@ -1025,55 +1056,37 @@ public final class AutoIngestManager extends Observable implements PropertyChang */ try { byte[] rawData = coordinationService.getNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestPath.toString()); - if (null != rawData) { + if (null != rawData && rawData.length > 0) { try { AutoIngestJobNodeData nodeData = new AutoIngestJobNodeData(rawData); - if(nodeData.getVersion() < 1) { - /* - * A version '0' node doesn't have a sufficient - * amount of data to populate the jobs tables, - * so we will it here. - */ - nodeData.setDeviceId(manifest.getDeviceId()); - nodeData.setCaseName(manifest.getCaseName()); - nodeData.setManifestFileDate(manifest.getDateFileCreated()); - nodeData.setManifestFilePath(manifest.getFilePath()); - nodeData.setDataSourcePath(manifest.getDataSourcePath()); - nodeData.setVersion(1); - rawData = nodeData.toArray(); - coordinationService.setNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestPath.toString(), rawData); + AutoIngestJob.ProcessingStatus processingStatus = nodeData.getProcessingStatus(); + switch (processingStatus) { + case PENDING: + addPendingJob(manifest, nodeData); + break; + case PROCESSING: + doRecoveryIfCrashed(manifest, nodeData); + break; + case COMPLETED: + addCompletedJob(manifest, nodeData); + break; + case DELETED: + /* + * Ignore jobs marked as "deleted." + */ + break; + default: + SYS_LOGGER.log(Level.SEVERE, "Unknown ManifestNodeData.ProcessingStatus"); + break; } - - if (nodeData.coordSvcNodeDataWasSet()) { - ProcessingStatus processingStatus = nodeData.getStatus(); - switch (processingStatus) { - case PENDING: - addPendingJob(nodeData); - break; - case PROCESSING: - doRecoveryIfCrashed(nodeData); - break; - case COMPLETED: - addCompletedJob(nodeData); - break; - case DELETED: - // Do nothing - we dont'want to add it to any job list or do recovery - break; - default: - SYS_LOGGER.log(Level.SEVERE, "Unknown ManifestNodeData.ProcessingStatus"); - break; - } - } else { - addNewPendingJob(manifest); - } - } catch(AutoIngestJobNodeDataException ex) { - SYS_LOGGER.log(Level.WARNING, String.format("Unable to use node data for %s", manifestPath), ex); + } catch (AutoIngestJobNodeData.InvalidDataException ex) { + SYS_LOGGER.log(Level.WARNING, String.format("Invalid auto ingest job node data for %s", manifestPath), ex); } } else { addNewPendingJob(manifest); } } catch (CoordinationServiceException ex) { - SYS_LOGGER.log(Level.SEVERE, String.format("Error getting node data for %s", manifestPath), ex); + SYS_LOGGER.log(Level.SEVERE, String.format("Error transmitting node data for %s", manifestPath), ex); return CONTINUE; } catch (InterruptedException ex) { Thread.currentThread().interrupt(); @@ -1089,22 +1102,49 @@ public final class AutoIngestManager extends Observable implements PropertyChang } /** - * Adds a job to process a manifest to the pending jobs queue. + * Adds an existing job to the pending jobs queue. * + * @param manifest The manifest for the job. * @param nodeData The data stored in the coordination service node for - * the manifest. + * the job. + * + * @throws InterruptedException if the thread running the input + * directory scan task is interrupted while + * blocked, i.e., if auto ingest is + * shutting down. */ - private void addPendingJob(AutoIngestJobNodeData nodeData) { - Path caseDirectory = PathUtils.findCaseDirectory(rootOutputDirectory, nodeData.getCaseName()); - nodeData.setCompletedDate(new Date(0)); - nodeData.setErrorsOccurred(false); - newPendingJobsList.add(new AutoIngestJob(nodeData, caseDirectory, LOCAL_HOST_NAME, AutoIngestJob.Stage.PENDING)); + private void addPendingJob(Manifest manifest, AutoIngestJobNodeData nodeData) throws InterruptedException { + AutoIngestJob job = new AutoIngestJob(manifest); + job.setPriority(nodeData.getPriority()); + Path caseDirectory = PathUtils.findCaseDirectory(rootOutputDirectory, manifest.getCaseName()); + job.setCaseDirectoryPath(caseDirectory); + newPendingJobsList.add(job); + + /* + * Try to upgrade/update the coordination service node data for the + * job. + * + * An exclusive lock is obtained before doing so because another + * host may have already found the job, obtained an exclusive lock, + * and started processing it. However, this locking does make it + * possible that two hosts will both try to obtain the lock to do + * the upgrade/update operation at the same time. If this happens, + * the host that is holding the lock will complete the + * update/upgrade operation, so there is nothing more to do. + */ + try (Lock manifestLock = coordinationService.tryGetExclusiveLock(CoordinationService.CategoryNode.MANIFESTS, manifest.getFilePath().toString())) { + if (null != manifestLock) { + AutoIngestManager.this.updateCoordinationServiceNode(job); + } + } catch (CoordinationServiceException ex) { + SYS_LOGGER.log(Level.SEVERE, String.format("Error attempting to set node data for %s", manifest.getFilePath()), ex); + } } /** - * Adds a job to process a manifest to the pending jobs queue. + * Adds a new job to the pending jobs queue. * - * @param manifest The manifest. + * @param manifest The manifest for the job. * * @throws InterruptedException if the thread running the input * directory scan task is interrupted while @@ -1112,13 +1152,25 @@ public final class AutoIngestManager extends Observable implements PropertyChang * shutting down. */ private void addNewPendingJob(Manifest manifest) throws InterruptedException { - // TODO (JIRA-1960): This is something of a hack, grabbing the lock to create the node. - // Is use of Curator.create().forPath() possible instead? + /* + * Create the coordination service node data for the job. Note that + * getting the lock will create the node for the job (with no data) + * if it does not already exist. + * + * An exclusive lock is obtained before creating the node data + * because another host may have already found the job, obtained an + * exclusive lock, and started processing it. However, this locking + * does make it possible that two hosts will both try to obtain the + * lock to do the create operation at the same time. If this + * happens, the host that is locked out will not add the job to its + * pending queue for this scan of the input directory, but it will + * be picked up on the next scan. + */ try (Lock manifestLock = coordinationService.tryGetExclusiveLock(CoordinationService.CategoryNode.MANIFESTS, manifest.getFilePath().toString())) { if (null != manifestLock) { - AutoIngestJobNodeData newNodeData = new AutoIngestJobNodeData(manifest, PENDING, DEFAULT_JOB_PRIORITY, 0, new Date(0), false); - coordinationService.setNodeData(CoordinationService.CategoryNode.MANIFESTS, manifest.getFilePath().toString(), newNodeData.toArray()); - newPendingJobsList.add(new AutoIngestJob(newNodeData, null, LOCAL_HOST_NAME, AutoIngestJob.Stage.PENDING)); + AutoIngestJob job = new AutoIngestJob(manifest); + AutoIngestManager.this.updateCoordinationServiceNode(job); + newPendingJobsList.add(job); } } catch (CoordinationServiceException ex) { SYS_LOGGER.log(Level.SEVERE, String.format("Error attempting to set node data for %s", manifest.getFilePath()), ex); @@ -1133,65 +1185,87 @@ public final class AutoIngestManager extends Observable implements PropertyChang * the node that was processing the job crashed and the processing * status was not updated. * - * @param nodeData + * @param manifest The manifest for upgrading the node. + * @param nodeData The node data. * * @throws InterruptedException if the thread running the input * directory scan task is interrupted while * blocked, i.e., if auto ingest is * shutting down. */ - private void doRecoveryIfCrashed(AutoIngestJobNodeData nodeData) throws InterruptedException { - String manifestPath = nodeData.getManifestFilePath().toString(); - try { - Lock manifestLock = coordinationService.tryGetExclusiveLock(CoordinationService.CategoryNode.MANIFESTS, manifestPath); + private void doRecoveryIfCrashed(Manifest manifest, AutoIngestJobNodeData nodeData) throws InterruptedException { + /* + * Try to get an exclusive lock on the coordination service node for + * the job. If the lock cannot be obtained, another host in the auto + * ingest cluster is already doing the recovery. + */ + String manifestPath = manifest.getFilePath().toString(); + try (Lock manifestLock = coordinationService.tryGetExclusiveLock(CoordinationService.CategoryNode.MANIFESTS, manifestPath)) { if (null != manifestLock) { + SYS_LOGGER.log(Level.SEVERE, "Attempting crash recovery for {0}", manifestPath); try { - if (nodeData.coordSvcNodeDataWasSet() && ProcessingStatus.PROCESSING == nodeData.getStatus()) { - SYS_LOGGER.log(Level.SEVERE, "Attempting crash recovery for {0}", manifestPath); - int numberOfCrashes = nodeData.getNumberOfCrashes(); - ++numberOfCrashes; - nodeData.setNumberOfCrashes(numberOfCrashes); - nodeData.setCompletedDate(new Date(0)); - nodeData.setErrorsOccurred(true); + /* + * Create the recovery job. + */ + AutoIngestJob job = new AutoIngestJob(nodeData); + int numberOfCrashes = job.getNumberOfCrashes(); + ++numberOfCrashes; + job.setNumberOfCrashes(numberOfCrashes); + job.setCompletedDate(new Date(0)); + Path caseDirectoryPath = PathUtils.findCaseDirectory(rootOutputDirectory, manifest.getCaseName()); + if (null != caseDirectoryPath) { + job.setCaseDirectoryPath(caseDirectoryPath); + job.setErrorsOccurred(true); + } else { + job.setErrorsOccurred(false); + } + + /* + * Update the coordination service node for the job. If + * this fails, leave the recovery to anoterh host. + */ + try { + updateCoordinationServiceNode(job); if (numberOfCrashes <= AutoIngestUserPreferences.getMaxNumTimesToProcessImage()) { - nodeData.setStatus(PENDING); - Path caseDirectoryPath = PathUtils.findCaseDirectory(rootOutputDirectory, nodeData.getCaseName()); - newPendingJobsList.add(new AutoIngestJob(nodeData, caseDirectoryPath, LOCAL_HOST_NAME, AutoIngestJob.Stage.PENDING)); - if (null != caseDirectoryPath) { - try { - AutoIngestAlertFile.create(caseDirectoryPath); - } catch (AutoIngestAlertFileException ex) { - SYS_LOGGER.log(Level.SEVERE, String.format("Error creating alert file for crashed job for %s", manifestPath), ex); - } - try { - new AutoIngestJobLogger(nodeData.getManifestFilePath(), nodeData.getDataSourceFileName(), caseDirectoryPath).logCrashRecoveryWithRetry(); - } catch (AutoIngestJobLoggerException ex) { - SYS_LOGGER.log(Level.SEVERE, String.format("Error creating case auto ingest log entry for crashed job for %s", manifestPath), ex); - } - } + newPendingJobsList.add(job); } else { - nodeData.setStatus(COMPLETED); - Path caseDirectoryPath = PathUtils.findCaseDirectory(rootOutputDirectory, nodeData.getCaseName()); - newCompletedJobsList.add(new AutoIngestJob(nodeData, caseDirectoryPath, LOCAL_HOST_NAME, AutoIngestJob.Stage.COMPLETED)); - if (null != caseDirectoryPath) { - try { - AutoIngestAlertFile.create(caseDirectoryPath); - } catch (AutoIngestAlertFileException ex) { - SYS_LOGGER.log(Level.SEVERE, String.format("Error creating alert file for crashed job for %s", manifestPath), ex); - } - try { - new AutoIngestJobLogger(nodeData.getManifestFilePath(), nodeData.getDataSourceFileName(), caseDirectoryPath).logCrashRecoveryNoRetry(); - } catch (AutoIngestJobLoggerException ex) { - SYS_LOGGER.log(Level.SEVERE, String.format("Error creating case auto ingest log entry for crashed job for %s", manifestPath), ex); - } - } + newCompletedJobsList.add(new AutoIngestJob(nodeData)); } + } catch (CoordinationServiceException ex) { + SYS_LOGGER.log(Level.SEVERE, String.format("Error attempting to set node data for %s", manifestPath), ex); + return; + } + + /* + * Write the alert file and do the logging. + */ + if (null != caseDirectoryPath) { try { - coordinationService.setNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestPath, nodeData.toArray()); - } catch (CoordinationServiceException ex) { - SYS_LOGGER.log(Level.SEVERE, String.format("Error attempting to set node data for %s", manifestPath), ex); + AutoIngestAlertFile.create(nodeData.getCaseDirectoryPath()); + } catch (AutoIngestAlertFileException ex) { + SYS_LOGGER.log(Level.SEVERE, String.format("Error creating alert file for crashed job for %s", manifestPath), ex); } } + if (numberOfCrashes <= AutoIngestUserPreferences.getMaxNumTimesToProcessImage()) { + job.setProcessingStatus(AutoIngestJob.ProcessingStatus.PENDING); + if (null != caseDirectoryPath) { + try { + new AutoIngestJobLogger(manifest.getFilePath(), manifest.getDataSourceFileName(), caseDirectoryPath).logCrashRecoveryWithRetry(); + } catch (AutoIngestJobLoggerException ex) { + SYS_LOGGER.log(Level.SEVERE, String.format("Error creating case auto ingest log entry for crashed job for %s", manifestPath), ex); + } + } + } else { + job.setProcessingStatus(AutoIngestJob.ProcessingStatus.COMPLETED); + if (null != caseDirectoryPath) { + try { + new AutoIngestJobLogger(manifest.getFilePath(), manifest.getDataSourceFileName(), nodeData.getCaseDirectoryPath()).logCrashRecoveryNoRetry(); + } catch (AutoIngestJobLoggerException ex) { + SYS_LOGGER.log(Level.SEVERE, String.format("Error creating case auto ingest log entry for crashed job for %s", manifestPath), ex); + } + } + } + } finally { try { manifestLock.release(); @@ -1210,11 +1284,42 @@ public final class AutoIngestManager extends Observable implements PropertyChang * * @param nodeData The data stored in the coordination service node for * the manifest. + * @param manifest The manifest for upgrading the node. + * + * @throws CoordinationServiceException + * @throws InterruptedException */ - private void addCompletedJob(AutoIngestJobNodeData nodeData) { - Path caseDirectoryPath = PathUtils.findCaseDirectory(rootOutputDirectory, nodeData.getCaseName()); + private void addCompletedJob(Manifest manifest, AutoIngestJobNodeData nodeData) throws CoordinationServiceException, InterruptedException { + Path caseDirectoryPath = PathUtils.findCaseDirectory(rootOutputDirectory, manifest.getCaseName()); if (null != caseDirectoryPath) { - newCompletedJobsList.add(new AutoIngestJob(nodeData, caseDirectoryPath, LOCAL_HOST_NAME, AutoIngestJob.Stage.COMPLETED)); + AutoIngestJob job = new AutoIngestJob(manifest); + job.setCaseDirectoryPath(caseDirectoryPath); + job.setProcessingStatus(AutoIngestJob.ProcessingStatus.COMPLETED); + job.setStage(AutoIngestJob.Stage.COMPLETED); + job.setCompletedDate(nodeData.getCompletedDate()); + job.setErrorsOccurred(true); + newCompletedJobsList.add(new AutoIngestJob(nodeData)); + + /* + * Try to upgrade/update the coordination service node data for + * the job. + * + * An exclusive lock is obtained before doing so because another + * host may have already found the job, obtained an exclusive + * lock, and started processing it. However, this locking does + * make it possible that two hosts will both try to obtain the + * lock to do the upgrade/update operation at the same time. If + * this happens, the host that is holding the lock will complete + * the update/upgrade operation, so there is nothing more to do. + */ + try (Lock manifestLock = coordinationService.tryGetExclusiveLock(CoordinationService.CategoryNode.MANIFESTS, manifest.getFilePath().toString())) { + if (null != manifestLock) { + updateCoordinationServiceNode(job); + } + } catch (CoordinationServiceException ex) { + SYS_LOGGER.log(Level.SEVERE, String.format("Error attempting to set node data for %s", manifest.getFilePath()), ex); + } + } else { SYS_LOGGER.log(Level.WARNING, String.format("Job completed for %s, but cannot find case directory, ignoring job", nodeData.getManifestFilePath())); } @@ -1339,7 +1444,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang if (jobProcessingTaskFuture.isCancelled()) { break; } - if (ex instanceof CoordinationServiceException) { + if (ex instanceof CoordinationServiceException || ex instanceof AutoIngestJobNodeData.InvalidDataException) { errorState = ErrorState.COORDINATION_SERVICE_ERROR; } else if (ex instanceof SharedConfigurationException) { errorState = ErrorState.SHARED_CONFIGURATION_DOWNLOAD_ERROR; @@ -1505,42 +1610,60 @@ public final class AutoIngestManager extends Observable implements PropertyChang /** * Processes jobs until the pending jobs queue is empty. * - * @throws CoordinationServiceException if there is an error - * acquiring or releasing - * coordination service locks - * or setting coordination - * service node data. - * @throws SharedConfigurationException if there is an error while - * downloading shared - * configuration. - * @throws ServicesMonitorException if there is an error - * querying the services - * monitor. - * @throws DatabaseServerDownException if the database server is - * down. - * @throws KeywordSearchServerDownException if the Solr server is down. - * @throws CaseManagementException if there is an error - * creating, opening or closing - * the case for the job. - * @throws AnalysisStartupException if there is an error - * starting analysis of the - * data source by the data - * source level and file level - * ingest modules. - * @throws FileExportException if there is an error - * exporting files. - * @throws AutoIngestAlertFileException if there is an error - * creating an alert file. - * @throws AutoIngestJobLoggerException if there is an error writing - * to the auto ingest log for - * the case. - * @throws InterruptedException if the thread running the - * job processing task is - * interrupted while blocked, - * i.e., if auto ingest is - * shutting down. + * @throws CoordinationServiceException if there is an + * error acquiring or + * releasing + * coordination + * service locks or + * setting + * coordination + * service node data. + * @throws SharedConfigurationException if there is an + * error while + * downloading shared + * configuration. + * @throws ServicesMonitorException if there is an + * error querying the + * services monitor. + * @throws DatabaseServerDownException if the database + * server is down. + * @throws KeywordSearchServerDownException if the Solr server + * is down. + * @throws CaseManagementException if there is an + * error creating, + * opening or closing + * the case for the + * job. + * @throws AnalysisStartupException if there is an + * error starting + * analysis of the + * data source by the + * data source level + * and file level + * ingest modules. + * @throws FileExportException if there is an + * error exporting + * files. + * @throws AutoIngestAlertFileException if there is an + * error creating an + * alert file. + * @throws AutoIngestJobLoggerException if there is an + * error writing to + * the auto ingest + * log for the case. + * @throws InterruptedException if the thread + * running the job + * processing task is + * interrupted while + * blocked, i.e., if + * auto ingest is + * shutting down. + * @throws AutoIngestJobNodeData.InvalidDataException if there is an + * error constructing + * auto ingest node + * data objects. */ - private void processJobs() throws CoordinationServiceException, SharedConfigurationException, ServicesMonitorException, DatabaseServerDownException, KeywordSearchServerDownException, CaseManagementException, AnalysisStartupException, FileExportException, AutoIngestAlertFileException, AutoIngestJobLoggerException, InterruptedException, AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException { + private void processJobs() throws CoordinationServiceException, SharedConfigurationException, ServicesMonitorException, DatabaseServerDownException, KeywordSearchServerDownException, CaseManagementException, AnalysisStartupException, FileExportException, AutoIngestAlertFileException, AutoIngestJobLoggerException, InterruptedException, AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException, AutoIngestJobNodeData.InvalidDataException { SYS_LOGGER.log(Level.INFO, "Started processing pending jobs queue"); Lock manifestLock = JobProcessingTask.this.dequeueAndLockNextJob(); while (null != manifestLock) { @@ -1549,8 +1672,6 @@ public final class AutoIngestManager extends Observable implements PropertyChang return; } processJob(); - } catch (AutoIngestJobNodeDataException ex) { - SYS_LOGGER.log(Level.WARNING, String.format("Unable to use node data"), ex); } finally { manifestLock.release(); } @@ -1598,13 +1719,13 @@ public final class AutoIngestManager extends Observable implements PropertyChang synchronized (jobsLock) { manifestLock = dequeueAndLockNextJob(true); if (null != manifestLock) { - SYS_LOGGER.log(Level.INFO, "Dequeued job for {0}", currentJob.getNodeData().getManifestFilePath()); + SYS_LOGGER.log(Level.INFO, "Dequeued job for {0}", currentJob.getManifest().getFilePath()); } else { SYS_LOGGER.log(Level.INFO, "No ready job"); SYS_LOGGER.log(Level.INFO, "Checking pending jobs queue for ready job, not enforcing max jobs per case"); manifestLock = dequeueAndLockNextJob(false); if (null != manifestLock) { - SYS_LOGGER.log(Level.INFO, "Dequeued job for {0}", currentJob.getNodeData().getManifestFilePath()); + SYS_LOGGER.log(Level.INFO, "Dequeued job for {0}", currentJob.getManifest().getFilePath()); } else { SYS_LOGGER.log(Level.INFO, "No ready job"); } @@ -1637,7 +1758,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang Iterator iterator = pendingJobs.iterator(); while (iterator.hasNext()) { AutoIngestJob job = iterator.next(); - Path manifestPath = job.getNodeData().getManifestFilePath(); + Path manifestPath = job.getManifest().getFilePath(); manifestLock = coordinationService.tryGetExclusiveLock(CoordinationService.CategoryNode.MANIFESTS, manifestPath.toString()); if (null == manifestLock) { /* @@ -1651,11 +1772,11 @@ public final class AutoIngestManager extends Observable implements PropertyChang try { AutoIngestJobNodeData nodeData = new AutoIngestJobNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestPath.toString())); - if (!nodeData.getStatus().equals(PENDING)) { + if (!nodeData.getProcessingStatus().equals(PENDING)) { /* * Due to a timing issue or a missed event, a - * non-pending job has ended up on the pending queue. - * Skip the job and remove it from the queue. + * non-pending job has ended up on the pending + * queue. Skip the job and remove it from the queue. */ iterator.remove(); continue; @@ -1664,7 +1785,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang if (enforceMaxJobsPerCase) { int currentJobsForCase = 0; for (AutoIngestJob runningJob : hostNamesToRunningJobs.values()) { - if (0 == job.getNodeData().getCaseName().compareTo(runningJob.getNodeData().getCaseName())) { + if (0 == job.getManifest().getCaseName().compareTo(runningJob.getManifest().getCaseName())) { ++currentJobsForCase; } } @@ -1677,8 +1798,8 @@ public final class AutoIngestManager extends Observable implements PropertyChang iterator.remove(); currentJob = job; break; - } catch (AutoIngestJobNodeDataException ex) { - SYS_LOGGER.log(Level.WARNING, String.format("Unable to use node data for %s", manifestPath), ex); + } catch (AutoIngestJobNodeData.InvalidDataException ex) { + SYS_LOGGER.log(Level.WARNING, String.format("Unable to use node data for %s", manifestPath), ex); // JCTODO: Is this right? } } } @@ -1688,46 +1809,63 @@ public final class AutoIngestManager extends Observable implements PropertyChang /** * Processes and auto ingest job. * - * @throws CoordinationServiceException if there is an error - * acquiring or releasing - * coordination service locks - * or setting coordination - * service node data. - * @throws SharedConfigurationException if there is an error while - * downloading shared - * configuration. - * @throws ServicesMonitorException if there is an error - * querying the services - * monitor. - * @throws DatabaseServerDownException if the database server is - * down. - * @throws KeywordSearchServerDownException if the Solr server is down. - * @throws CaseManagementException if there is an error - * creating, opening or closing - * the case for the job. - * @throws AnalysisStartupException if there is an error - * starting analysis of the - * data source by the data - * source level and file level - * ingest modules. - * @throws FileExportException if there is an error - * exporting files. - * @throws AutoIngestAlertFileException if there is an error - * creating an alert file. - * @throws AutoIngestJobLoggerException if there is an error writing - * to the auto ingest log for - * the case. - * @throws InterruptedException if the thread running the - * job processing task is - * interrupted while blocked, - * i.e., if auto ingest is - * shutting down. + * @throws CoordinationServiceException if there is an + * error acquiring or + * releasing + * coordination + * service locks or + * setting + * coordination + * service node data. + * @throws SharedConfigurationException if there is an + * error while + * downloading shared + * configuration. + * @throws ServicesMonitorException if there is an + * error querying the + * services monitor. + * @throws DatabaseServerDownException if the database + * server is down. + * @throws KeywordSearchServerDownException if the Solr server + * is down. + * @throws CaseManagementException if there is an + * error creating, + * opening or closing + * the case for the + * job. + * @throws AnalysisStartupException if there is an + * error starting + * analysis of the + * data source by the + * data source level + * and file level + * ingest modules. + * @throws FileExportException if there is an + * error exporting + * files. + * @throws AutoIngestAlertFileException if there is an + * error creating an + * alert file. + * @throws AutoIngestJobLoggerException if there is an + * error writing to + * the auto ingest + * log for the case. + * @throws InterruptedException if the thread + * running the job + * processing task is + * interrupted while + * blocked, i.e., if + * auto ingest is + * shutting down. + * @throws AutoIngestJobNodeData.InvalidDataException if there is an + * error constructing + * auto ingest node + * data objects. */ - private void processJob() throws CoordinationServiceException, SharedConfigurationException, ServicesMonitorException, DatabaseServerDownException, KeywordSearchServerDownException, CaseManagementException, AnalysisStartupException, FileExportException, AutoIngestAlertFileException, AutoIngestJobLoggerException, InterruptedException, AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException, AutoIngestJobNodeDataException { - Path manifestPath = currentJob.getNodeData().getManifestFilePath(); - AutoIngestJobNodeData nodeData = new AutoIngestJobNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestPath.toString())); - nodeData.setStatus(PROCESSING); - coordinationService.setNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestPath.toString(), nodeData.toArray()); + private void processJob() throws CoordinationServiceException, SharedConfigurationException, ServicesMonitorException, DatabaseServerDownException, KeywordSearchServerDownException, CaseManagementException, AnalysisStartupException, FileExportException, AutoIngestAlertFileException, AutoIngestJobLoggerException, InterruptedException, AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException, AutoIngestJobNodeData.InvalidDataException { + Path manifestPath = currentJob.getManifest().getFilePath(); + currentJob.setProcessingStatus(AutoIngestJob.ProcessingStatus.PROCESSING); + updateCoordinationServiceNode(currentJob); SYS_LOGGER.log(Level.INFO, "Started processing of {0}", manifestPath); currentJob.setStage(AutoIngestJob.Stage.STARTING); setChanged(); @@ -1744,18 +1882,15 @@ public final class AutoIngestManager extends Observable implements PropertyChang currentJob.cancel(); } - nodeData = new AutoIngestJobNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestPath.toString())); if (currentJob.isCompleted() || currentJob.isCanceled()) { - nodeData.setStatus(COMPLETED); + currentJob.setProcessingStatus(AutoIngestJob.ProcessingStatus.COMPLETED); Date completedDate = new Date(); - currentJob.getNodeData().setCompletedDate(completedDate); - nodeData.setCompletedDate(currentJob.getNodeData().getCompletedDate()); - nodeData.setErrorsOccurred(currentJob.getNodeData().getErrorsOccurred()); + currentJob.setCompletedDate(completedDate); } else { // The job may get retried - nodeData.setStatus(PENDING); + currentJob.setProcessingStatus(AutoIngestJob.ProcessingStatus.PENDING); } - coordinationService.setNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestPath.toString(), nodeData.toArray()); + updateCoordinationServiceNode(currentJob); boolean retry = (!currentJob.isCanceled() && !currentJob.isCompleted()); SYS_LOGGER.log(Level.INFO, "Completed processing of {0}, retry = {1}", new Object[]{manifestPath, retry}); @@ -1763,7 +1898,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang Path caseDirectoryPath = currentJob.getCaseDirectoryPath(); if (null != caseDirectoryPath) { AutoIngestAlertFile.create(caseDirectoryPath); // Do this first, it is more important than the case log - AutoIngestJobLogger jobLogger = new AutoIngestJobLogger(nodeData.getManifestFilePath(), nodeData.getDataSourceFileName(), caseDirectoryPath); + AutoIngestJobLogger jobLogger = new AutoIngestJobLogger(manifestPath, currentJob.getManifest().getDataSourceFileName(), caseDirectoryPath); jobLogger.logJobCancelled(); } } @@ -1830,8 +1965,8 @@ public final class AutoIngestManager extends Observable implements PropertyChang try { Case.closeCurrentCase(); } catch (CaseActionException ex) { - AutoIngestJobNodeData nodeData = currentJob.getNodeData(); - throw new CaseManagementException(String.format("Error closing case %s for %s", nodeData.getCaseName(), nodeData.getManifestFilePath()), ex); + Manifest manifest = currentJob.getManifest(); + throw new CaseManagementException(String.format("Error closing case %s for %s", manifest.getCaseName(), manifest.getFilePath()), ex); } } } @@ -1849,7 +1984,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang */ private void updateConfiguration() throws SharedConfigurationException, InterruptedException { if (AutoIngestUserPreferences.getSharedConfigEnabled()) { - Path manifestPath = currentJob.getNodeData().getManifestFilePath(); + Path manifestPath = currentJob.getManifest().getFilePath(); SYS_LOGGER.log(Level.INFO, "Downloading shared configuration for {0}", manifestPath); currentJob.setStage(AutoIngestJob.Stage.UPDATING_SHARED_CONFIG); new SharedConfiguration().downloadConfiguration(); @@ -1867,7 +2002,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang * down. */ private void verifyRequiredSevicesAreRunning() throws ServicesMonitorException, DatabaseServerDownException, KeywordSearchServerDownException { - Path manifestPath = currentJob.getNodeData().getManifestFilePath(); + Path manifestPath = currentJob.getManifest().getFilePath(); SYS_LOGGER.log(Level.INFO, "Checking services availability for {0}", manifestPath); currentJob.setStage(AutoIngestJob.Stage.CHECKING_SERVICES); if (!isServiceUp(ServicesMonitor.Service.REMOTE_CASE_DATABASE.toString())) { @@ -1913,9 +2048,9 @@ public final class AutoIngestManager extends Observable implements PropertyChang * if auto ingest is shutting down. */ private Case openCase() throws CoordinationServiceException, CaseManagementException, InterruptedException { - AutoIngestJobNodeData nodeData = currentJob.getNodeData(); - String caseName = nodeData.getCaseName(); - SYS_LOGGER.log(Level.INFO, "Opening case {0} for {1}", new Object[]{caseName, nodeData.getManifestFilePath()}); + Manifest manifest = currentJob.getManifest(); + String caseName = manifest.getCaseName(); + SYS_LOGGER.log(Level.INFO, "Opening case {0} for {1}", new Object[]{caseName, manifest.getFilePath()}); currentJob.setStage(AutoIngestJob.Stage.OPENING_CASE); /* * Acquire and hold a case name lock so that only one node at as @@ -1941,20 +2076,20 @@ public final class AutoIngestManager extends Observable implements PropertyChang } currentJob.setCaseDirectoryPath(caseDirectoryPath); Case caseForJob = Case.getCurrentCase(); - SYS_LOGGER.log(Level.INFO, "Opened case {0} for {1}", new Object[]{caseForJob.getName(), nodeData.getManifestFilePath()}); + SYS_LOGGER.log(Level.INFO, "Opened case {0} for {1}", new Object[]{caseForJob.getName(), manifest.getFilePath()}); return caseForJob; } catch (CaseActionException ex) { - throw new CaseManagementException(String.format("Error creating or opening case %s for %s", caseName, nodeData.getManifestFilePath()), ex); + throw new CaseManagementException(String.format("Error creating or opening case %s for %s", caseName, manifest.getFilePath()), ex); } catch (IllegalStateException ex) { /* * Deal with the unfortunate fact that * Case.getCurrentCase throws IllegalStateException. */ - throw new CaseManagementException(String.format("Error getting current case %s for %s", caseName, nodeData.getManifestFilePath()), ex); + throw new CaseManagementException(String.format("Error getting current case %s for %s", caseName, manifest.getFilePath()), ex); } } else { - throw new CaseManagementException(String.format("Timed out acquiring case name lock for %s for %s", caseName, nodeData.getManifestFilePath())); + throw new CaseManagementException(String.format("Timed out acquiring case name lock for %s for %s", caseName, manifest.getFilePath())); } } } @@ -1985,7 +2120,6 @@ public final class AutoIngestManager extends Observable implements PropertyChang * ingest is shutting down. */ private void runIngestForJob(Case caseForJob) throws CoordinationServiceException, AnalysisStartupException, FileExportException, AutoIngestAlertFileException, AutoIngestJobLoggerException, InterruptedException, AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException { - Path manifestPath = currentJob.getNodeData().getManifestFilePath(); try { if (currentJob.isCanceled() || jobProcessingTaskFuture.isCancelled()) { return; @@ -2082,22 +2216,22 @@ public final class AutoIngestManager extends Observable implements PropertyChang * if auto ingest is shutting down. */ private DataSource identifyDataSource(Case caseForJob) throws AutoIngestAlertFileException, AutoIngestJobLoggerException, InterruptedException { - AutoIngestJobNodeData nodeData = currentJob.getNodeData(); - Path manifestPath = nodeData.getManifestFilePath(); + Manifest manifest = currentJob.getManifest(); + Path manifestPath = manifest.getFilePath(); SYS_LOGGER.log(Level.INFO, "Identifying data source for {0} ", manifestPath); currentJob.setStage(AutoIngestJob.Stage.IDENTIFYING_DATA_SOURCE); Path caseDirectoryPath = currentJob.getCaseDirectoryPath(); - AutoIngestJobLogger jobLogger = new AutoIngestJobLogger(manifestPath, nodeData.getDataSourceFileName(), caseDirectoryPath); - Path dataSourcePath = nodeData.getDataSourcePath(); + AutoIngestJobLogger jobLogger = new AutoIngestJobLogger(manifestPath, manifest.getDataSourceFileName(), caseDirectoryPath); + Path dataSourcePath = manifest.getDataSourcePath(); File dataSource = dataSourcePath.toFile(); if (!dataSource.exists()) { SYS_LOGGER.log(Level.SEVERE, "Missing data source for {0}", manifestPath); - currentJob.getNodeData().setErrorsOccurred(true); + currentJob.setErrorsOccurred(true); AutoIngestAlertFile.create(caseDirectoryPath); // Do this first, it is more important than the case log jobLogger.logMissingDataSource(); return null; } - String deviceId = nodeData.getDeviceId(); + String deviceId = manifest.getDeviceId(); return new DataSource(deviceId, dataSourcePath); } @@ -2118,15 +2252,15 @@ public final class AutoIngestManager extends Observable implements PropertyChang * ingest is shutting down. */ private void runDataSourceProcessor(Case caseForJob, DataSource dataSource) throws InterruptedException, AutoIngestAlertFileException, AutoIngestJobLoggerException, AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException { - AutoIngestJobNodeData nodeData = currentJob.getNodeData(); - Path manifestPath = nodeData.getManifestFilePath(); + Manifest manifest = currentJob.getManifest(); + Path manifestPath = manifest.getFilePath(); SYS_LOGGER.log(Level.INFO, "Adding data source for {0} ", manifestPath); currentJob.setStage(AutoIngestJob.Stage.ADDING_DATA_SOURCE); UUID taskId = UUID.randomUUID(); DataSourceProcessorCallback callBack = new AddDataSourceCallback(caseForJob, dataSource, taskId); DataSourceProcessorProgressMonitor progressMonitor = new DoNothingDSPProgressMonitor(); Path caseDirectoryPath = currentJob.getCaseDirectoryPath(); - AutoIngestJobLogger jobLogger = new AutoIngestJobLogger(manifestPath, nodeData.getDataSourceFileName(), caseDirectoryPath); + AutoIngestJobLogger jobLogger = new AutoIngestJobLogger(manifestPath, manifest.getDataSourceFileName(), caseDirectoryPath); try { caseForJob.notifyAddingDataSource(taskId); @@ -2151,7 +2285,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang if (validDataSourceProcessorsMap.isEmpty()) { // This should never happen. We should add all unsupported data sources as logical files. AutoIngestAlertFile.create(caseDirectoryPath); - currentJob.getNodeData().setErrorsOccurred(true); + currentJob.setErrorsOccurred(true); jobLogger.logFailedToIdentifyDataSource(); SYS_LOGGER.log(Level.WARNING, "Unsupported data source {0} for {1}", new Object[]{dataSource.getPath(), manifestPath}); // NON-NLS return; @@ -2177,7 +2311,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang // if a DSP fails even if a later one succeeds since we expected to be able to process // the data source which each DSP on the list. AutoIngestAlertFile.create(caseDirectoryPath); - currentJob.getNodeData().setErrorsOccurred(true); + currentJob.setErrorsOccurred(true); jobLogger.logDataSourceProcessorError(selectedProcessor.getDataSourceType()); SYS_LOGGER.log(Level.SEVERE, "Exception while processing {0} with data source processor {1}", new Object[]{dataSource.getPath(), selectedProcessor.getDataSourceType()}); } @@ -2211,17 +2345,17 @@ public final class AutoIngestManager extends Observable implements PropertyChang * ingest is shutting down. */ private void logDataSourceProcessorResult(DataSource dataSource) throws AutoIngestAlertFileException, AutoIngestJobLoggerException, InterruptedException { - AutoIngestJobNodeData nodeData = currentJob.getNodeData(); - Path manifestPath = nodeData.getManifestFilePath(); + Manifest manifest = currentJob.getManifest(); + Path manifestPath = manifest.getFilePath(); Path caseDirectoryPath = currentJob.getCaseDirectoryPath(); - AutoIngestJobLogger jobLogger = new AutoIngestJobLogger(manifestPath, nodeData.getDataSourceFileName(), caseDirectoryPath); + AutoIngestJobLogger jobLogger = new AutoIngestJobLogger(manifestPath, manifest.getDataSourceFileName(), caseDirectoryPath); DataSourceProcessorResult resultCode = dataSource.getResultDataSourceProcessorResultCode(); if (null != resultCode) { switch (resultCode) { case NO_ERRORS: jobLogger.logDataSourceAdded(); if (dataSource.getContent().isEmpty()) { - currentJob.getNodeData().setErrorsOccurred(true); + currentJob.setErrorsOccurred(true); AutoIngestAlertFile.create(caseDirectoryPath); // Do this first, it is more important than the case log jobLogger.logNoDataSourceContent(); } @@ -2233,7 +2367,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang } jobLogger.logDataSourceAdded(); if (dataSource.getContent().isEmpty()) { - currentJob.getNodeData().setErrorsOccurred(true); + currentJob.setErrorsOccurred(true); AutoIngestAlertFile.create(caseDirectoryPath); // Do this first, it is more important than the case log jobLogger.logNoDataSourceContent(); } @@ -2243,7 +2377,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang for (String errorMessage : dataSource.getDataSourceProcessorErrorMessages()) { SYS_LOGGER.log(Level.SEVERE, "Critical error running data source processor for {0}: {1}", new Object[]{manifestPath, errorMessage}); } - currentJob.getNodeData().setErrorsOccurred(true); + currentJob.setErrorsOccurred(true); AutoIngestAlertFile.create(caseDirectoryPath); // Do this first, it is more important than the case log jobLogger.logFailedToAddDataSource(); break; @@ -2257,7 +2391,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang * cancelCurrentJob. */ SYS_LOGGER.log(Level.WARNING, "Cancellation while waiting for data source processor for {0}", manifestPath); - currentJob.getNodeData().setErrorsOccurred(true); + currentJob.setErrorsOccurred(true); AutoIngestAlertFile.create(caseDirectoryPath); // Do this first, it is more important than the case log jobLogger.logDataSourceProcessorCancelled(); } @@ -2283,12 +2417,12 @@ public final class AutoIngestManager extends Observable implements PropertyChang * ingest is shutting down. */ private void analyze(DataSource dataSource) throws AnalysisStartupException, AutoIngestAlertFileException, AutoIngestJobLoggerException, InterruptedException { - AutoIngestJobNodeData nodeData = currentJob.getNodeData(); - Path manifestPath = nodeData.getManifestFilePath(); + Manifest manifest = currentJob.getManifest(); + Path manifestPath = manifest.getFilePath(); SYS_LOGGER.log(Level.INFO, "Starting ingest modules analysis for {0} ", manifestPath); currentJob.setStage(AutoIngestJob.Stage.ANALYZING_DATA_SOURCE); Path caseDirectoryPath = currentJob.getCaseDirectoryPath(); - AutoIngestJobLogger jobLogger = new AutoIngestJobLogger(manifestPath, nodeData.getDataSourceFileName(), caseDirectoryPath); + AutoIngestJobLogger jobLogger = new AutoIngestJobLogger(manifestPath, manifest.getDataSourceFileName(), caseDirectoryPath); IngestJobEventListener ingestJobEventListener = new IngestJobEventListener(); IngestManager.getInstance().addIngestJobEventListener(ingestJobEventListener); try { @@ -2313,7 +2447,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang List cancelledModules = snapshot.getCancelledDataSourceIngestModules(); if (!cancelledModules.isEmpty()) { SYS_LOGGER.log(Level.WARNING, String.format("Ingest module(s) cancelled for %s", manifestPath)); - currentJob.getNodeData().setErrorsOccurred(true); + currentJob.setErrorsOccurred(true); AutoIngestAlertFile.create(caseDirectoryPath); // Do this first, it is more important than the case log for (String module : snapshot.getCancelledDataSourceIngestModules()) { SYS_LOGGER.log(Level.WARNING, String.format("%s ingest module cancelled for %s", module, manifestPath)); @@ -2322,8 +2456,8 @@ public final class AutoIngestManager extends Observable implements PropertyChang } jobLogger.logAnalysisCompleted(); } else { - currentJob.setStage(AutoIngestJob.Stage.CANCELING); - currentJob.getNodeData().setErrorsOccurred(true); + currentJob.setStage(AutoIngestJob.Stage.CANCELLING); + currentJob.setErrorsOccurred(true); AutoIngestAlertFile.create(caseDirectoryPath); // Do this first, it is more important than the case log jobLogger.logAnalysisCancelled(); CancellationReason cancellationReason = snapshot.getCancellationReason(); @@ -2336,13 +2470,13 @@ public final class AutoIngestManager extends Observable implements PropertyChang for (IngestModuleError error : ingestJobStartResult.getModuleErrors()) { SYS_LOGGER.log(Level.SEVERE, String.format("%s ingest module startup error for %s", error.getModuleDisplayName(), manifestPath), error.getThrowable()); } - currentJob.getNodeData().setErrorsOccurred(true); + currentJob.setErrorsOccurred(true); AutoIngestAlertFile.create(caseDirectoryPath); // Do this first, it is more important than the case log jobLogger.logIngestModuleStartupErrors(); throw new AnalysisStartupException(String.format("Error(s) during ingest module startup for %s", manifestPath)); } else { SYS_LOGGER.log(Level.SEVERE, String.format("Ingest manager ingest job start error for %s", manifestPath), ingestJobStartResult.getStartupException()); - currentJob.getNodeData().setErrorsOccurred(true); + currentJob.setErrorsOccurred(true); AutoIngestAlertFile.create(caseDirectoryPath); // Do this first, it is more important than the case log jobLogger.logAnalysisStartupError(); throw new AnalysisStartupException("Ingest manager error starting job", ingestJobStartResult.getStartupException()); @@ -2351,7 +2485,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang for (String warning : settingsWarnings) { SYS_LOGGER.log(Level.SEVERE, "Ingest job settings error for {0}: {1}", new Object[]{manifestPath, warning}); } - currentJob.getNodeData().setErrorsOccurred(true); + currentJob.setErrorsOccurred(true); AutoIngestAlertFile.create(caseDirectoryPath); // Do this first, it is more important than the case log jobLogger.logIngestJobSettingsErrors(); throw new AnalysisStartupException("Error(s) in ingest job settings"); @@ -2382,16 +2516,16 @@ public final class AutoIngestManager extends Observable implements PropertyChang * ingest is shutting down. */ private void exportFiles(DataSource dataSource) throws FileExportException, AutoIngestAlertFileException, AutoIngestJobLoggerException, InterruptedException { - AutoIngestJobNodeData nodeData = currentJob.getNodeData(); - Path manifestPath = nodeData.getManifestFilePath(); + Manifest manifest = currentJob.getManifest(); + Path manifestPath = manifest.getFilePath(); SYS_LOGGER.log(Level.INFO, "Exporting files for {0}", manifestPath); currentJob.setStage(AutoIngestJob.Stage.EXPORTING_FILES); Path caseDirectoryPath = currentJob.getCaseDirectoryPath(); - AutoIngestJobLogger jobLogger = new AutoIngestJobLogger(manifestPath, nodeData.getDataSourceFileName(), caseDirectoryPath); + AutoIngestJobLogger jobLogger = new AutoIngestJobLogger(manifestPath, manifest.getDataSourceFileName(), caseDirectoryPath); try { FileExporter fileExporter = new FileExporter(); if (fileExporter.isEnabled()) { - fileExporter.process(nodeData.getDeviceId(), dataSource.getContent(), currentJob::isCanceled); + fileExporter.process(manifest.getDeviceId(), dataSource.getContent(), currentJob::isCanceled); jobLogger.logFileExportCompleted(); } else { SYS_LOGGER.log(Level.WARNING, "Exporting files not enabled for {0}", manifestPath); @@ -2399,7 +2533,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang } } catch (FileExportException ex) { SYS_LOGGER.log(Level.SEVERE, String.format("Error doing file export for %s", manifestPath), ex); - currentJob.getNodeData().setErrorsOccurred(true); + currentJob.setErrorsOccurred(true); AutoIngestAlertFile.create(caseDirectoryPath); // Do this first, it is more important than the case log jobLogger.logFileExportError(); } @@ -2636,6 +2770,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang try { synchronized (jobsLock) { if (currentJob != null) { + currentJob.getStageDetails(); setChanged(); notifyObservers(Event.JOB_STATUS_UPDATED); eventPublisher.publishRemotely(new AutoIngestJobStatusEvent(currentJob)); @@ -2646,8 +2781,8 @@ public final class AutoIngestManager extends Observable implements PropertyChang boolean isError = false; if (getErrorState().equals(ErrorState.NONE)) { if (currentJob != null) { - message = "Processing " + currentJob.getNodeData().getDataSourceFileName() - + " for case " + currentJob.getNodeData().getCaseName(); + message = "Processing " + currentJob.getManifest().getDataSourceFileName() + + " for case " + currentJob.getManifest().getCaseName(); } else { message = "Paused or waiting for next case"; } @@ -2665,7 +2800,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang // check whether any remote nodes have timed out for (AutoIngestJob job : hostNamesToRunningJobs.values()) { - if (isStale(hostNamesToLastMsgTime.get(job.getNodeName()))) { + if (isStale(hostNamesToLastMsgTime.get(job.getProcessingHostName()))) { // remove the job from remote job running map. /* * NOTE: there is theoretically a check-then-act race @@ -2677,7 +2812,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang * back into hostNamesToRunningJobs as a result of * processing the job status update. */ - hostNamesToRunningJobs.remove(job.getNodeName()); + hostNamesToRunningJobs.remove(job.getProcessingHostName()); setChanged(); notifyObservers(Event.JOB_COMPLETED); } @@ -2858,15 +2993,15 @@ public final class AutoIngestManager extends Observable implements PropertyChang } - static final class AutoIngestManagerStartupException extends Exception { + static final class AutoIngestManagerException extends Exception { private static final long serialVersionUID = 1L; - private AutoIngestManagerStartupException(String message) { + private AutoIngestManagerException(String message) { super(message); } - private AutoIngestManagerStartupException(String message, Throwable cause) { + private AutoIngestManagerException(String message, Throwable cause) { super(message, cause); } diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestMonitor.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestMonitor.java index 20478a1935..2000bba6f0 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestMonitor.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestMonitor.java @@ -19,6 +19,7 @@ package org.sleuthkit.autopsy.experimental.autoingest; import com.google.common.util.concurrent.ThreadFactoryBuilder; +import java.awt.Cursor; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.nio.file.Path; @@ -33,12 +34,14 @@ import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.logging.Level; import javax.annotation.concurrent.GuardedBy; +import org.openide.util.Exceptions; import org.sleuthkit.autopsy.coordinationservice.CoordinationService; import org.sleuthkit.autopsy.coordinationservice.CoordinationService.CoordinationServiceException; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.NetworkUtils; import org.sleuthkit.autopsy.events.AutopsyEventException; import org.sleuthkit.autopsy.events.AutopsyEventPublisher; +import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJob.ProcessingStatus; /** * An auto ingest monitor responsible for monitoring and reporting the @@ -140,8 +143,10 @@ public final class AutoIngestMonitor extends Observable implements PropertyChang */ private void handleJobStartedEvent(AutoIngestJobStartedEvent event) { synchronized (jobsLock) { - // DLG: Remove job from pending queue, if present - // DLG: Add job to running jobs list + // DLG: TEST! Remove job from pending queue, if present + // DLG: TEST! Add job to running jobs list + jobsSnapshot.removePendingJob(event.getJob()); + jobsSnapshot.addOrReplaceRunningJob(event.getJob()); setChanged(); notifyObservers(jobsSnapshot); } @@ -154,7 +159,8 @@ public final class AutoIngestMonitor extends Observable implements PropertyChang */ private void handleJobStatusEvent(AutoIngestJobStatusEvent event) { synchronized (jobsLock) { - // DLG: Replace job in running list with job from event + // DLG: TEST! Replace job in running list with job from event + jobsSnapshot.addOrReplaceRunningJob(event.getJob()); setChanged(); notifyObservers(jobsSnapshot); } @@ -167,8 +173,10 @@ public final class AutoIngestMonitor extends Observable implements PropertyChang */ private void handleJobCompletedEvent(AutoIngestJobCompletedEvent event) { synchronized (jobsLock) { - // DLG: Remove job from event from running list, if present - // DLG: Add job to completed list + // DLG: TEST! Remove job from event from running list, if present + // DLG: TEST! Add job to completed list + jobsSnapshot.removeRunningJob(event.getJob()); + jobsSnapshot.addOrReplaceCompletedJob(event.getJob()); setChanged(); notifyObservers(jobsSnapshot); } @@ -180,12 +188,7 @@ public final class AutoIngestMonitor extends Observable implements PropertyChang * @param event A job/case prioritization event. */ private void handleCasePrioritizationEvent(AutoIngestCasePrioritizedEvent event) { - synchronized (jobsLock) { - // DLG: Replace job in pending queue with job from event - // DLG: See 'bnPrioritizeCaseActionPerformed(ActionEvent)' in the AutoIngestControlPanel class!!! - setChanged(); - notifyObservers(jobsSnapshot); - } + coordSvcQueryExecutor.submit(new CoordinationServiceQueryTask()); } /** @@ -236,9 +239,31 @@ public final class AutoIngestMonitor extends Observable implements PropertyChang JobsSnapshot newJobsSnapshot = new JobsSnapshot(); List nodeList = coordinationService.getNodeList(CoordinationService.CategoryNode.MANIFESTS); for (String node : nodeList) { - // DLG: Do not need a lock here - // DLG: Get the node data and construct a AutoIngestJobNodeData object (rename AutoIngestJobNodeData => AutoIngestJobData) - // DLG: Construct an AutoIngestJob object from the AutoIngestJobNodeData object, need new AutoIngestJob constructor + try { + AutoIngestJobNodeData nodeData = new AutoIngestJobNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.MANIFESTS, node)); + AutoIngestJob job = new AutoIngestJob(nodeData); + ProcessingStatus processingStatus = nodeData.getProcessingStatus(); + switch (processingStatus) { + case PENDING: + newJobsSnapshot.addOrReplacePendingJob(job); + break; + case PROCESSING: + newJobsSnapshot.addOrReplaceRunningJob(job); + break; + case COMPLETED: + newJobsSnapshot.addOrReplaceCompletedJob(job); + break; + case DELETED: + break; + default: + LOGGER.log(Level.SEVERE, "Unknown AutoIngestJobData.ProcessingStatus"); + break; + } + } catch (InterruptedException ex) { + LOGGER.log(Level.SEVERE, String.format("Unexpected interrupt while retrieving coordination service node data for '%s'", node), ex); + } catch (AutoIngestJobNodeData.InvalidDataException ex) { + LOGGER.log(Level.SEVERE, String.format("Unable to use node data for '%s'", node), ex); + } } return newJobsSnapshot; } catch (CoordinationServiceException ex) { @@ -261,10 +286,10 @@ public final class AutoIngestMonitor extends Observable implements PropertyChang * the pending jobs queue. */ for (AutoIngestJob job : jobsSnapshot.getPendingJobs()) { - if (job.getNodeData().getPriority() > highestPriority) { - highestPriority = job.getNodeData().getPriority(); + if (job.getPriority() > highestPriority) { + highestPriority = job.getPriority(); } - if (job.getNodeData().getManifestFilePath().equals(manifestFilePath)) { + if (job.getManifest().getFilePath().equals(manifestFilePath)) { prioritizedJob = job; } } @@ -275,22 +300,22 @@ public final class AutoIngestMonitor extends Observable implements PropertyChang */ if (null != prioritizedJob) { ++highestPriority; - String manifestNodePath = prioritizedJob.getNodeData().getManifestFilePath().toString(); + String manifestNodePath = prioritizedJob.getManifest().getFilePath().toString(); try { AutoIngestJobNodeData nodeData = new AutoIngestJobNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestNodePath)); nodeData.setPriority(highestPriority); coordinationService.setNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestNodePath, nodeData.toArray()); - } catch (AutoIngestJobNodeDataException | CoordinationServiceException | InterruptedException ex) { + } catch (AutoIngestJobNodeData.InvalidDataException | CoordinationServiceException | InterruptedException ex) { throw new AutoIngestMonitorException("Error bumping priority for job " + prioritizedJob.toString(), ex); } - prioritizedJob.getNodeData().setPriority(highestPriority); + prioritizedJob.setPriority(highestPriority); } /* * Publish a prioritization event. */ if (null != prioritizedJob) { - final String caseName = prioritizedJob.getNodeData().getCaseName(); + final String caseName = prioritizedJob.getManifest().getCaseName(); new Thread(() -> { eventPublisher.publishRemotely(new AutoIngestCasePrioritizedEvent(LOCAL_HOST_NAME, caseName)); }).start(); diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutopsyManifestFileParser.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutopsyManifestFileParser.java index 4a5a3c86c5..2edc31ab71 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutopsyManifestFileParser.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutopsyManifestFileParser.java @@ -18,11 +18,11 @@ */ package org.sleuthkit.autopsy.experimental.autoingest; -import java.io.File; import java.io.IOException; +import java.nio.file.Files; import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.Arrays; +import java.nio.file.attribute.BasicFileAttributes; +import java.util.Date; import java.util.HashMap; import javax.annotation.concurrent.Immutable; import javax.xml.parsers.DocumentBuilder; @@ -42,24 +42,11 @@ import org.xml.sax.SAXException; public final class AutopsyManifestFileParser implements ManifestFileParser { private static final String MANIFEST_FILE_NAME_SIGNATURE = "_MANIFEST.XML"; - private static final String NMEC_MANIFEST_ELEM_TAG_NAME = "NMEC_Manifest"; - private static final String MANIFEST_ELEM_TAG_NAME = "Manifest"; - private static final String CASE_NAME_XPATH = "/Collection/Name/text()"; - private static final String DEVICE_ID_XPATH = "/Collection/Image/ID/text()"; - private static final String IMAGE_NAME_XPATH = "/Collection/Image/Name/text()"; - private static final String IMAGE_FULL_NAME_XPATH = "/Collection/Image/FullName/text()"; - private static final String IMAGE_RELATIVE_PATH_XPATH = "/Collection/Image/RelativePath/text()"; - - private String actualRootElementTag = ""; - + private static final String ROOT_ELEM_TAG_NAME = "AutopsyManifest"; + private static final String CASE_NAME_XPATH = "/AutopsyManifest/CaseName/text()"; + private static final String DEVICE_ID_XPATH = "/AutopsyManifest/DeviceId/text()"; + private static final String DATA_SOURCE_NAME_XPATH = "/AutopsyManifest/DataSource/text()"; - /** - * Determine whether the given file is a supported manifest file. - * - * @param filePath - * - * @return true if this is a supported manifest file, otherwise false - */ @Override public boolean fileIsManifest(Path filePath) { boolean fileIsManifest = false; @@ -68,9 +55,7 @@ public final class AutopsyManifestFileParser implements ManifestFileParser { if (fileName.toString().toUpperCase().endsWith(MANIFEST_FILE_NAME_SIGNATURE)) { Document doc = this.createManifestDOM(filePath); Element docElement = doc.getDocumentElement(); - actualRootElementTag = docElement.getTagName(); - fileIsManifest = actualRootElementTag.equals(MANIFEST_ELEM_TAG_NAME) || - actualRootElementTag.equals(NMEC_MANIFEST_ELEM_TAG_NAME); + fileIsManifest = docElement.getTagName().equals(ROOT_ELEM_TAG_NAME); } } catch (Exception unused) { fileIsManifest = false; @@ -78,95 +63,30 @@ public final class AutopsyManifestFileParser implements ManifestFileParser { return fileIsManifest; } - /** - * Parse the given manifest file and create a Manifest object. - * - * @param filePath Fully qualified path to manifest file - * - * @return A Manifest object representing the parsed manifest file. - * - * @throws ManifestFileParserException - */ @Override public Manifest parse(Path filePath) throws ManifestFileParserException { try { + BasicFileAttributes attrs = Files.readAttributes(filePath, BasicFileAttributes.class); + Date dateFileCreated = new Date(attrs.creationTime().toMillis()); Document doc = this.createManifestDOM(filePath); XPath xpath = XPathFactory.newInstance().newXPath(); - XPathExpression expr = xpath.compile(constructXPathExpression(CASE_NAME_XPATH)); - String caseName = (String) expr.evaluate(doc, XPathConstants.STRING); - expr = xpath.compile(constructXPathExpression(DEVICE_ID_XPATH)); + XPathExpression expr = xpath.compile(CASE_NAME_XPATH); + String caseName = (String) expr.evaluate(doc, XPathConstants.STRING); + expr = xpath.compile(DEVICE_ID_XPATH); String deviceId = (String) expr.evaluate(doc, XPathConstants.STRING); - Path dataSourcePath = determineDataSourcePath(filePath, doc); - return new Manifest(filePath, caseName, deviceId, dataSourcePath, new HashMap<>()); + expr = xpath.compile(DATA_SOURCE_NAME_XPATH); + String dataSourceName = (String) expr.evaluate(doc, XPathConstants.STRING); + Path dataSourcePath = filePath.getParent().resolve(dataSourceName); + return new Manifest(filePath, dateFileCreated, caseName, deviceId, dataSourcePath, new HashMap<>()); } catch (Exception ex) { throw new ManifestFileParserException(String.format("Error parsing manifest %s", filePath), ex); } } - /** - * Create a new DOM document object for the given manifest file. - * - * @param manifestFilePath Fully qualified path to manifest file. - * - * @return DOM document object - * - * @throws ParserConfigurationException - * @throws SAXException - * @throws IOException - */ private Document createManifestDOM(Path manifestFilePath) throws ParserConfigurationException, SAXException, IOException { DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder docBuilder = docBuilderFactory.newDocumentBuilder(); return docBuilder.parse(manifestFilePath.toFile()); } - /** - * Creates an XPath expression string relative to the actual root - * element of the manifest for the given path. - * - * @param path - * @return XPath expression string. - */ - private String constructXPathExpression(String path) { - return "/" + actualRootElementTag + path; - } - - /** - * Attempt to find a valid (existing) data source for the manifest file. - * - * @param manifestFilePath Fully qualified path to manifest file. - * @param doc DOM document object for the manifest file. - * @return Path to an existing data source. - * @throws ManifestFileParserException if an error occurred while parsing manifest file. - */ - private Path determineDataSourcePath(Path manifestFilePath, Document doc) throws ManifestFileParserException { - String dataSourcePath = ""; - try { - for (String element : Arrays.asList(IMAGE_NAME_XPATH, IMAGE_FULL_NAME_XPATH, IMAGE_RELATIVE_PATH_XPATH)) { - XPath xpath = XPathFactory.newInstance().newXPath(); - XPathExpression expr = xpath.compile(constructXPathExpression(element)); - String fileName = (String) expr.evaluate(doc, XPathConstants.STRING); - if (fileName.contains("\\")) { - fileName = fileName.substring(fileName.lastIndexOf("\\") + 1); - } - try { - dataSourcePath = manifestFilePath.getParent().resolve(fileName).toString(); - } catch (Exception ignore) { - // NOTE: exceptions can be thrown by resolve() method based on contents of the manifest file. - // For example if file name is "test .txt" and in one of the path fields they only enter "test " - // i.e. the file name without extension. - // We should continue on to the next XML path field - continue; - } - if (new File(dataSourcePath).exists()) { - // found the data source - return Paths.get(dataSourcePath); - } - // keep trying other XML fields - } - return Paths.get(dataSourcePath); - } catch (Exception ex) { - throw new ManifestFileParserException(String.format("Error parsing manifest %s", manifestFilePath), ex); - } - } } diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/Bundle.properties b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/Bundle.properties index 31fa2c11f0..a22410a70e 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/Bundle.properties +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/Bundle.properties @@ -27,7 +27,6 @@ AutoIngestDashboard.DeletionFailed=Deletion failed for job AutoIngestDashboard.ShowLogFailed.Title=Unable to display case log AutoIngestDashboard.ShowLogFailed.Message=Case log file does not exist AutoIngestDashboard.bnPrioritizeCase.toolTipText=Move all images associated with a case to top of Pending queue. -AutoIngestDashboard.bnPrioritizeCase.text=Prioritize Case AutoIngestDashboard.ExitConsequences=This will cancel any currently running job on this host. Exiting while a job is running potentially leaves the case in an inconsistent or corrupted state. AutoIngestDashboard.ExitingStatus=Exiting... AutoIngestDashboard.OK=OK @@ -275,3 +274,4 @@ AutoIngestDashboard.prioritizeButton.toolTipText=Prioritizes the selected job AutoIngestDashboard.prioritizeButton.text=&Prioritize AutoIngestDashboard.refreshButton.toolTipText=Refresh displayed tables AutoIngestDashboard.refreshButton.text=&Refresh +AutoIngestDashboard.jButton1.text=jButton1 diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/Manifest.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/Manifest.java index da195239f9..e3c4cb6bb1 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/Manifest.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/Manifest.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2015 Basis Technology Corp. + * Copyright 2011-2017 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -18,12 +18,9 @@ */ package org.sleuthkit.autopsy.experimental.autoingest; -import java.io.IOException; import java.io.Serializable; -import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; -import java.nio.file.attribute.BasicFileAttributes; import java.util.Date; import java.util.HashMap; import java.util.Map; @@ -40,10 +37,9 @@ public final class Manifest implements Serializable { private final String dataSourcePath; private final Map manifestProperties; - public Manifest(Path manifestFilePath, String caseName, String deviceId, Path dataSourcePath, Map manifestProperties) throws IOException { + public Manifest(Path manifestFilePath, Date dateFileCreated, String caseName, String deviceId, Path dataSourcePath, Map manifestProperties) { this.filePath = manifestFilePath.toString(); - BasicFileAttributes attrs = Files.readAttributes(manifestFilePath, BasicFileAttributes.class); - this.dateFileCreated = new Date(attrs.creationTime().toMillis()); + this.dateFileCreated = dateFileCreated; this.caseName = caseName; this.deviceId = deviceId; if (dataSourcePath != null) { @@ -52,8 +48,8 @@ public final class Manifest implements Serializable { this.dataSourcePath = ""; } this.manifestProperties = new HashMap<>(manifestProperties); - } - + } + public Path getFilePath() { return Paths.get(this.filePath); } diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/ExtractedContentPanel.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/ExtractedContentPanel.java index d9bcd47332..d63163ff61 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/ExtractedContentPanel.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/ExtractedContentPanel.java @@ -637,7 +637,7 @@ class ExtractedContentPanel extends javax.swing.JPanel { } @Override - @NbBundle.Messages({"# 0 - Content name", + @NbBundle.Messages({"# {0} - Content name", "ExtractedContentPanel.SetMarkup.progress.loading=Loading text for {0}"}) protected String doInBackground() throws Exception { progress = ProgressHandle.createHandle(Bundle.ExtractedContentPanel_SetMarkup_progress_loading(contentName)); diff --git a/docs/doxygen-user/central_repo.dox b/docs/doxygen-user/central_repo.dox index c59732c9bd..66b446023a 100644 --- a/docs/doxygen-user/central_repo.dox +++ b/docs/doxygen-user/central_repo.dox @@ -68,6 +68,9 @@ and added to the list of Interesting Items. \image html central_repo_manage_tags.png +If a case is open, checking the Implies Known Bad checkbox will give you the option to add the known bad status to anything in the current case +that has already been tagged. For example, if you create a tag named "Alpha", tag a few items and then go into Manage Tags and check the box for the Alpha tag, you can optionally choose to have the status for those tagged items changed in the Central Repository. The effect is the same as if you had checked the box in Manage Tags before tagging the items. Note that data from any previous cases will not be changed. + By default there is a tag called "Evidence" as the only tag associated with this module. To associate one or more tag(s) with this module, check the Correlate box next to the tag name(s) and click OK. To create additional tags, use the Tags options panel.