From fdbbc0520e69c1104772fa74c39c78a9ae47fe5d Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Mon, 9 Jun 2014 12:03:40 -0400 Subject: [PATCH 1/4] Add code to ensure that every ingest job that starts get at least one task --- .../ingest/DataSourceIngestTaskScheduler.java | 72 ------------------- ...cheduler.java => IngestTaskScheduler.java} | 0 2 files changed, 72 deletions(-) delete mode 100755 Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestTaskScheduler.java rename Core/src/org/sleuthkit/autopsy/ingest/{FileIngestTaskScheduler.java => IngestTaskScheduler.java} (100%) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestTaskScheduler.java b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestTaskScheduler.java deleted file mode 100755 index 2acc58a5a1..0000000000 --- a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestTaskScheduler.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2012-2014 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.ingest; - -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.logging.Level; -import java.util.logging.Logger; -import org.sleuthkit.datamodel.Content; - -final class DataSourceIngestTaskScheduler implements IngestTaskQueue { - - private static final DataSourceIngestTaskScheduler instance = new DataSourceIngestTaskScheduler(); - private final List tasks = new ArrayList<>(); // Guarded by this - private final LinkedBlockingQueue tasksQueue = new LinkedBlockingQueue<>(); - - static DataSourceIngestTaskScheduler getInstance() { - return instance; - } - - private DataSourceIngestTaskScheduler() { - } - - synchronized void scheduleTask(IngestJob job, Content dataSource) throws InterruptedException { - DataSourceIngestTask task = new DataSourceIngestTask(job, dataSource); - tasks.add(task); - try { - // Should not block, queue is (theoretically) unbounded. - tasksQueue.put(task); - } catch (InterruptedException ex) { - tasks.remove(task); - Logger.getLogger(DataSourceIngestTaskScheduler.class.getName()).log(Level.FINE, "Interruption of unexpected block on tasks queue", ex); //NON-NLS - throw ex; - } - } - - @Override - public IngestTask getNextTask() throws InterruptedException { - return tasksQueue.take(); - } - - synchronized void notifyTaskCompleted(DataSourceIngestTask task) { - tasks.remove(task); - } - - synchronized boolean hasIncompleteTasksForIngestJob(IngestJob job) { - long jobId = job.getId(); - for (DataSourceIngestTask task : tasks) { - if (task.getIngestJob().getId() == jobId) { - return true; - } - } - return false; - } -} diff --git a/Core/src/org/sleuthkit/autopsy/ingest/FileIngestTaskScheduler.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestTaskScheduler.java similarity index 100% rename from Core/src/org/sleuthkit/autopsy/ingest/FileIngestTaskScheduler.java rename to Core/src/org/sleuthkit/autopsy/ingest/IngestTaskScheduler.java From 42102ee327d823c7ed650be47d912fa1b345aac1 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Mon, 9 Jun 2014 12:05:17 -0400 Subject: [PATCH 2/4] Add code to ensure that every ingest job that starts get at least one task --- .../sleuthkit/autopsy/ingest/IngestJob.java | 106 +++++----- .../autopsy/ingest/IngestJobContext.java | 2 +- .../autopsy/ingest/IngestManager.java | 4 +- .../autopsy/ingest/IngestTaskScheduler.java | 181 ++++++++++++------ 4 files changed, 174 insertions(+), 119 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java index 5187da7f7b..91756959dd 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java @@ -33,16 +33,16 @@ import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Content; /** - * InjestJobs store all settings and data associated with the user selecting a - * datasource and running a set of ingest modules on it. + * IngestJobs encapsulates the settings, ingest module pipelines, and progress + * bars that are used to process a data source when a user chooses to run a set + * of ingest modules on the data source. */ final class IngestJob { private static final Logger logger = Logger.getLogger(IngestManager.class.getName()); private static final AtomicLong nextIngestJobId = new AtomicLong(0L); private static final ConcurrentHashMap ingestJobsById = new ConcurrentHashMap<>(); - private static final DataSourceIngestTaskScheduler dataSourceTaskScheduler = DataSourceIngestTaskScheduler.getInstance(); - private static final FileIngestTaskScheduler fileTaskScheduler = FileIngestTaskScheduler.getInstance(); + private static final IngestTaskScheduler ingestTaskScheduler = IngestTaskScheduler.getInstance(); private final long id; private final Content dataSource; private final boolean processUnallocatedSpace; @@ -108,8 +108,8 @@ final class IngestJob { /** * Create the file and data source pipelines. - * @param ingestModuleTemplates - * @throws InterruptedException + * + * @throws InterruptedException */ private void createIngestPipelines(List ingestModuleTemplates) throws InterruptedException { IngestJobContext context = new IngestJobContext(this); @@ -120,6 +120,12 @@ final class IngestJob { } } + /** + * Check the data source and file ingest pipeline queues to see if at least + * one pipeline exists. + * + * @return True or false. + */ private boolean hasNonEmptyPipeline() { if (dataSourceIngestPipeline.isEmpty() && fileIngestPipelines.peek().isEmpty()) { return false; @@ -128,41 +134,35 @@ final class IngestJob { } /** - * Start both the data source and file ingest pipelines - * @return - * @throws InterruptedException + * Start both the data source and file ingest pipelines. + * + * @return A collection of ingest module start up errors, empty on success. + * @throws InterruptedException */ private List start() throws InterruptedException { List errors = startUpIngestPipelines(); if (errors.isEmpty()) { + // Start the progress bars before scheduling the tasks to make sure + // the progress bar will be available as soon as the task begin to + // be processed. if (!dataSourceIngestPipeline.isEmpty()) { - // Start the data source ingest progress bar before scheduling the - // data source task to make sure the progress bar will be available - // as soon as the task begins to be processed. startDataSourceIngestProgressBar(); - dataSourceTaskScheduler.scheduleTask(this, dataSource); + ingestTaskScheduler.scheduleDataSourceIngestTask(this, dataSource); } - if (!fileIngestPipelines.peek().isEmpty()) { - // Start the file ingest progress bar before scheduling the file - // ingest tasks to make sure the progress bar will be available - // as soon as the tasks begin to be processed. startFileIngestProgressBar(); - if (!fileTaskScheduler.tryScheduleTasks(this, dataSource)) { - fileIngestProgress.finish(); - } + ingestTaskScheduler.scheduleFileIngestTasks(this, dataSource); } } return errors; } - /** * Startup each of the file and data source ingest modules to collect * possible errors. - * + * * @return - * @throws InterruptedException + * @throws InterruptedException */ private List startUpIngestPipelines() throws InterruptedException { List errors = new ArrayList<>(); @@ -228,10 +228,9 @@ final class IngestJob { logIngestModuleErrors(errors); } } - dataSourceTaskScheduler.notifyTaskCompleted(task); + ingestTaskScheduler.notifyTaskCompleted(task); dataSourceIngestProgress.finish(); - - if (!fileTaskScheduler.hasIncompleteTasksForIngestJob(this)) { + if (!ingestTaskScheduler.hasIncompleteTasksForIngestJob(this)) { finish(); } } @@ -239,41 +238,40 @@ final class IngestJob { void process(FileIngestTask task) throws InterruptedException { if (!isCancelled()) { AbstractFile file = task.getFile(); - synchronized (this) { - ++processedFiles; - if (processedFiles <= estimatedFilesToProcess) { - fileIngestProgress.progress(file.getName(), (int) processedFiles); - } else { - fileIngestProgress.progress(file.getName(), (int) estimatedFilesToProcess); + if (file != null) { + synchronized (this) { + ++processedFiles; + if (processedFiles <= estimatedFilesToProcess) { + fileIngestProgress.progress(file.getName(), (int) processedFiles); + } else { + fileIngestProgress.progress(file.getName(), (int) estimatedFilesToProcess); + } + } + FileIngestPipeline pipeline = fileIngestPipelines.take(); + List errors = new ArrayList<>(); + errors.addAll(pipeline.process(file)); + fileIngestPipelines.put(pipeline); + if (!errors.isEmpty()) { + logIngestModuleErrors(errors); } } - FileIngestPipeline pipeline = fileIngestPipelines.take(); - List errors = new ArrayList<>(); - errors.addAll(pipeline.process(file)); - fileIngestPipelines.put(pipeline); - if (!errors.isEmpty()) { - logIngestModuleErrors(errors); - } } - fileTaskScheduler.notifyTaskCompleted(task); - - if (!fileTaskScheduler.hasIncompleteTasksForIngestJob(this)) { - List errors = new ArrayList<>(); - while (!fileIngestPipelines.isEmpty()) { - FileIngestPipeline pipeline = fileIngestPipelines.poll(); - errors.addAll(pipeline.shutDown()); - } - if (!errors.isEmpty()) { - logIngestModuleErrors(errors); - } - fileIngestProgress.finish(); - if (!dataSourceTaskScheduler.hasIncompleteTasksForIngestJob(this)) { - finish(); - } + ingestTaskScheduler.notifyTaskCompleted(task); + if (!ingestTaskScheduler.hasIncompleteTasksForIngestJob(this)) { + finish(); } } private void finish() { + List errors = new ArrayList<>(); + while (!fileIngestPipelines.isEmpty()) { + FileIngestPipeline pipeline = fileIngestPipelines.poll(); + errors.addAll(pipeline.shutDown()); + } + if (!errors.isEmpty()) { + logIngestModuleErrors(errors); + } + fileIngestProgress.finish(); ingestJobsById.remove(id); if (!isCancelled()) { IngestManager.getInstance().fireIngestJobCompleted(id); diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobContext.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobContext.java index eb30235c62..dd23b03058 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobContext.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobContext.java @@ -63,7 +63,7 @@ public final class IngestJobContext { public void scheduleFiles(List files) { for (AbstractFile file : files) { try { - FileIngestTaskScheduler.getInstance().scheduleTask(ingestJob, file); + IngestTaskScheduler.getInstance().scheduleFileIngestTask(ingestJob, file); } catch (InterruptedException ex) { // Ultimately, this method is called by ingest task execution // threads running ingest module code. Handle the unexpected diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java index b1bfa68af4..b4ba4023fe 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java @@ -124,7 +124,7 @@ public class IngestManager { */ private void startDataSourceIngestThread() { long threadId = nextThreadId.incrementAndGet(); - Future handle = dataSourceIngestThreadPool.submit(new ExecuteIngestTasksThread(DataSourceIngestTaskScheduler.getInstance())); + Future handle = dataSourceIngestThreadPool.submit(new ExecuteIngestTasksThread(IngestTaskScheduler.getInstance().getDataSourceIngestTaskQueue())); dataSourceIngestThreads.put(threadId, handle); } @@ -134,7 +134,7 @@ public class IngestManager { */ private void startFileIngestThread() { long threadId = nextThreadId.incrementAndGet(); - Future handle = fileIngestThreadPool.submit(new ExecuteIngestTasksThread(FileIngestTaskScheduler.getInstance())); + Future handle = fileIngestThreadPool.submit(new ExecuteIngestTasksThread(IngestTaskScheduler.getInstance().getFileIngestTaskQueue())); fileIngestThreads.put(threadId, handle); } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestTaskScheduler.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestTaskScheduler.java index 4727f30ebc..402796b6ed 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestTaskScheduler.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestTaskScheduler.java @@ -35,24 +35,40 @@ import org.sleuthkit.datamodel.FileSystem; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; -final class FileIngestTaskScheduler implements IngestTaskQueue { +final class IngestTaskScheduler { - private static final FileIngestTaskScheduler instance = new FileIngestTaskScheduler(); - private static final Logger logger = Logger.getLogger(FileIngestTaskScheduler.class.getName()); + private static final IngestTaskScheduler instance = new IngestTaskScheduler(); + private static final Logger logger = Logger.getLogger(IngestTaskScheduler.class.getName()); private static final int FAT_NTFS_FLAGS = TskData.TSK_FS_TYPE_ENUM.TSK_FS_TYPE_FAT12.getValue() | TskData.TSK_FS_TYPE_ENUM.TSK_FS_TYPE_FAT16.getValue() | TskData.TSK_FS_TYPE_ENUM.TSK_FS_TYPE_FAT32.getValue() | TskData.TSK_FS_TYPE_ENUM.TSK_FS_TYPE_NTFS.getValue(); - private final List fileTasks = new ArrayList<>(); // Guarded by this - private final TreeSet rootDirectoryTasksQueue = new TreeSet<>(new RootDirectoryTaskComparator()); // Guarded by this - private final List directoryTasksQueue = new ArrayList<>(); // Guarded by this - private final LinkedBlockingQueue fileTasksQueue = new LinkedBlockingQueue<>(); + private final LinkedBlockingQueue dataSourceTasks = new LinkedBlockingQueue<>(); + private final TreeSet rootDirectoryTasks = new TreeSet<>(new RootDirectoryTaskComparator()); // Guarded by this + private final List directoryTasks = new ArrayList<>(); // Guarded by this + private final LinkedBlockingQueue fileTasks = new LinkedBlockingQueue<>(); + private final List tasksInProgress = new ArrayList<>(); // Guarded by this + private final DataSourceIngestTaskQueue dataSourceTaskDispenser = new DataSourceIngestTaskQueue(); + private final FileIngestTaskQueue fileTaskDispenser = new FileIngestTaskQueue(); - static FileIngestTaskScheduler getInstance() { + static IngestTaskScheduler getInstance() { return instance; } - private FileIngestTaskScheduler() { + private IngestTaskScheduler() { } - boolean tryScheduleTasks(IngestJob job, Content dataSource) throws InterruptedException { + synchronized void scheduleDataSourceIngestTask(IngestJob job, Content dataSource) throws InterruptedException { + DataSourceIngestTask task = new DataSourceIngestTask(job, dataSource); + tasksInProgress.add(task); + try { + // Should not block, queue is (theoretically) unbounded. + dataSourceTasks.put(task); + } catch (InterruptedException ex) { + tasksInProgress.remove(task); + Logger.getLogger(IngestTaskScheduler.class.getName()).log(Level.FINE, "Interruption of unexpected block on tasks queue", ex); //NON-NLS + throw ex; + } + } + + void scheduleFileIngestTasks(IngestJob job, Content dataSource) throws InterruptedException { // Get the top level files of the data source. Collection rootObjects = dataSource.accept(new GetRootDirectoryVisitor()); List topLevelFiles = new ArrayList<>(); @@ -86,79 +102,84 @@ final class FileIngestTaskScheduler implements IngestTaskQueue { // Try to enqueue file ingest tasks for the top level files. for (AbstractFile firstLevelFile : topLevelFiles) { FileIngestTask fileTask = new FileIngestTask(job, firstLevelFile); - if (shouldEnqueueTask(fileTask)) { + if (shouldEnqueueFileTask(fileTask)) { synchronized (this) { - rootDirectoryTasksQueue.add(fileTask); + rootDirectoryTasks.add(fileTask); } } } - updateTaskQueues(); - - return !fileTasks.isEmpty(); + updateFileTaskQueues(); } - void scheduleTask(IngestJob job, AbstractFile file) throws InterruptedException { + void scheduleFileIngestTask(IngestJob job, AbstractFile file) throws InterruptedException { FileIngestTask task = new FileIngestTask(job, file); - if (shouldEnqueueTask(task)) { + if (shouldEnqueueFileTask(task)) { // Direct to file tasks queue, no need to update root directory or // directory tasks queues. enqueueFileTask(task); } } - @Override - public IngestTask getNextTask() throws InterruptedException { - FileIngestTask task = fileTasksQueue.take(); - updateTaskQueues(); - return task; - } - - private synchronized void updateTaskQueues() throws InterruptedException { - // we loop because we could have a directory that has all files - // that do not get enqueued + private synchronized void updateFileTaskQueues() throws InterruptedException { + // Loop until at least one task is added to the file tasks queue or the + // directory task queues are empty. while (true) { - // There are files in the queue, we're done - if (fileTasksQueue.isEmpty() == false) { + // First check for tasks in the file queue. If this queue is not + // empty, the update is done. + if (fileTasks.isEmpty() == false) { return; } - // fill in the directory queue if it is empty. - if (this.directoryTasksQueue.isEmpty()) { - // bail out if root is also empty -- we are done - if (rootDirectoryTasksQueue.isEmpty()) { + + // If the directory tasks queue is empty, move the next root + // directory task, if any, into it. If both directory task queues + // are empty and the file tasks queue is empty, the update is done. + if (directoryTasks.isEmpty()) { + if (rootDirectoryTasks.isEmpty()) { return; } - FileIngestTask rootTask = rootDirectoryTasksQueue.pollFirst(); - directoryTasksQueue.add(rootTask); + directoryTasks.add(rootDirectoryTasks.pollFirst()); } - //pop and push AbstractFile directory children if any - //add the popped and its leaf children onto cur file list - FileIngestTask parentTask = directoryTasksQueue.remove(directoryTasksQueue.size() - 1); - final AbstractFile parentFile = parentTask.getFile(); - // add itself to the file list - if (shouldEnqueueTask(parentTask)) { - enqueueFileTask(parentTask); + + // Try to move a task from the directory queue to the file tasks + // queue. If the directory contains directories or files, try to + // enqueue them as well. Note that it is absolutely necesssary to + // add at least one task to the file queue for every root directory + // that was enqueued, since scheduleFileIngestTasks() returned + // true for the associated job and the job is expecting to execute + // at least one task before it calls itself done. + boolean fileTaskEnqueued = false; + FileIngestTask directoryTask = directoryTasks.remove(directoryTasks.size() - 1); + if (shouldEnqueueFileTask(directoryTask)) { + enqueueFileTask(directoryTask); + fileTaskEnqueued = true; } - // add its children to the file and directory lists + final AbstractFile directory = directoryTask.getFile(); try { - List children = parentFile.getChildren(); - for (Content c : children) { - if (c instanceof AbstractFile) { - AbstractFile childFile = (AbstractFile) c; - FileIngestTask childTask = new FileIngestTask(parentTask.getIngestJob(), childFile); - if (childFile.hasChildren()) { - directoryTasksQueue.add(childTask); - } else if (shouldEnqueueTask(childTask)) { - enqueueFileTask(childTask); + List children = directory.getChildren(); + for (Content child : children) { + if (child instanceof AbstractFile) { + AbstractFile file = (AbstractFile) child; + FileIngestTask fileTask = new FileIngestTask(directoryTask.getIngestJob(), file); + if (file.hasChildren()) { + directoryTasks.add(fileTask); + fileTaskEnqueued = true; + } else if (shouldEnqueueFileTask(fileTask)) { + enqueueFileTask(fileTask); + fileTaskEnqueued = true; } } } } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Could not get children of file and update file queues: " + parentFile.getName(), ex); //NON-NLS + String errorMessage = String.format("An error occurred getting the children of %s", directory.getName()); //NON-NLS + logger.log(Level.SEVERE, errorMessage, ex); + } + if (!fileTaskEnqueued) { + enqueueFileTask(new FileIngestTask(directoryTask.getIngestJob(), null)); } } } - private static boolean shouldEnqueueTask(final FileIngestTask processTask) { + private static boolean shouldEnqueueFileTask(final FileIngestTask processTask) { final AbstractFile aFile = processTask.getFile(); //if it's unalloc file, skip if so scheduled if (processTask.getIngestJob().shouldProcessUnallocatedSpace() == false && aFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)) { @@ -204,34 +225,44 @@ final class FileIngestTaskScheduler implements IngestTaskQueue { } private synchronized void enqueueFileTask(FileIngestTask task) throws InterruptedException { - fileTasks.add(task); + tasksInProgress.add(task); try { // Should not block, queue is (theoretically) unbounded. - fileTasksQueue.put(task); + fileTasks.put(task); } catch (InterruptedException ex) { - fileTasks.remove(task); - Logger.getLogger(DataSourceIngestTaskScheduler.class.getName()).log(Level.FINE, "Interruption of unexpected block on tasks queue", ex); //NON-NLS + tasksInProgress.remove(task); + Logger.getLogger(IngestTaskScheduler.class.getName()).log(Level.FINE, "Interruption of unexpected block on tasks queue", ex); //NON-NLS throw ex; } } - synchronized void notifyTaskCompleted(FileIngestTask task) { - fileTasks.remove(task); + synchronized void notifyTaskCompleted(IngestTask task) { + tasksInProgress.remove(task); } synchronized boolean hasIncompleteTasksForIngestJob(IngestJob job) { long jobId = job.getId(); + for (IngestTask task : tasksInProgress) { + if (task.getIngestJob().getId() == jobId) { + return true; + } + } for (IngestTask task : fileTasks) { if (task.getIngestJob().getId() == jobId) { return true; } } - for (FileIngestTask task : directoryTasksQueue) { + for (IngestTask task : directoryTasks) { if (task.getIngestJob().getId() == jobId) { return true; } } - for (FileIngestTask task : rootDirectoryTasksQueue) { + for (IngestTask task : rootDirectoryTasks) { + if (task.getIngestJob().getId() == jobId) { + return true; + } + } + for (IngestTask task : dataSourceTasks) { if (task.getIngestJob().getId() == jobId) { return true; } @@ -334,4 +365,30 @@ final class FileIngestTaskScheduler implements IngestTaskQueue { } } } + + IngestTaskQueue getDataSourceIngestTaskQueue() { + return this.dataSourceTaskDispenser; + } + + IngestTaskQueue getFileIngestTaskQueue() { + return this.fileTaskDispenser; + } + + private final class DataSourceIngestTaskQueue implements IngestTaskQueue { + + @Override + public IngestTask getNextTask() throws InterruptedException { + return dataSourceTasks.take(); + } + } + + private final class FileIngestTaskQueue implements IngestTaskQueue { + + @Override + public IngestTask getNextTask() throws InterruptedException { + FileIngestTask task = fileTasks.take(); + updateFileTaskQueues(); + return task; + } + } } From a6ad04bbd4e408abd976c1ec9553f2c79f53e25f Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Mon, 9 Jun 2014 12:06:52 -0400 Subject: [PATCH 3/4] Fix comment typo in IngestJob.java --- Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java index 91756959dd..6aa63e9b2d 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java @@ -33,7 +33,7 @@ import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Content; /** - * IngestJobs encapsulates the settings, ingest module pipelines, and progress + * IngestJob encapsulates the settings, ingest module pipelines, and progress * bars that are used to process a data source when a user chooses to run a set * of ingest modules on the data source. */ From 4ad27fd2535ca374ad3e14334aec5b0260e2557e Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Mon, 9 Jun 2014 12:19:29 -0400 Subject: [PATCH 4/4] Fix comment in IngestJob.java --- Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java index 6aa63e9b2d..b21240a5db 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java @@ -109,6 +109,8 @@ final class IngestJob { /** * Create the file and data source pipelines. * + * @param ingestModuleTemplates Ingest module templates to use to populate + * the pipelines. * @throws InterruptedException */ private void createIngestPipelines(List ingestModuleTemplates) throws InterruptedException {