First cut at moving latest autoingest code to Autopsy

This commit is contained in:
Eugene Livis 2016-08-12 11:13:45 -04:00
parent 6d0dd29754
commit 406f82024c
19 changed files with 2017 additions and 757 deletions

View File

@ -0,0 +1,97 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2015 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.autoingest;
import java.io.IOException;
import java.nio.file.FileAlreadyExistsException;
import java.nio.file.Files;
import java.nio.file.Path;
import org.sleuthkit.autopsy.coreutils.Logger;
/**
* Utility for creating and checking for the existence of an automated ingest
* alert file. The purpose of the file is to put a marker in the case directory
* when an error or warning occurs in connection with an automated ingest job.
*/
final class AutoIngestAlertFile {
private static final String ERROR_FILE_NAME = "autoingest.alert";
private static final Logger logger = Logger.getLogger(AutoIngestAlertFile.class.getName());
/**
* Checks whether an automated ingest alert file exists in a case directory.
*
* @param caseDirectoryPath The case directory path.
*
* @return True or false.
*/
static boolean exists(Path caseDirectoryPath) {
return caseDirectoryPath.resolve(ERROR_FILE_NAME).toFile().exists();
}
/**
* Creates an automated ingest alert file in a case directory if such a file
* does not already exist.
*
* @param caseDirectoryPath The case directory path.
*
* @return True or false.
*
* @throws AutoIngestAlertFileException if there is a problem creating the file.
*/
static void create(Path caseDirectoryPath) throws AutoIngestAlertFileException {
try {
Files.createFile(caseDirectoryPath.resolve(ERROR_FILE_NAME));
} catch (FileAlreadyExistsException ignored) {
/*
* The file already exists, the exception is not exceptional.
*/
} catch (IOException ex) {
/*
* FileAlreadyExistsException implementation is optional, so check
* for that case.
*/
if (!exists(caseDirectoryPath)) {
throw new AutoIngestAlertFileException(String.format("Error creating automated ingest alert file in %s", caseDirectoryPath), ex);
}
}
}
/**
* Private, do-nothing constructor to suppress creation of instances of this
* class.
*/
private AutoIngestAlertFile() {
}
static final class AutoIngestAlertFileException extends Exception {
private static final long serialVersionUID = 1L;
private AutoIngestAlertFileException(String message) {
super(message);
}
private AutoIngestAlertFileException(String message, Throwable cause) {
super(message, cause);
}
}
}

View File

@ -30,44 +30,49 @@ import org.sleuthkit.autopsy.casemodule.CaseMetadata;
import org.sleuthkit.autopsy.coreutils.Logger;
/**
* A representation of case created by automated ingest.
* A representation of a case created by automated ingest.
*/
class AutoIngestCase implements Comparable<AutoIngestCase> {
private static final Logger logger = Logger.getLogger(AutoIngestCase.class.getName());
private final Path caseFolderPath;
private final Path caseDirectoryPath;
private final String caseName;
private final Path metadataFilePath;
private Date createDate;
private final Date createDate;
private Date lastModfiedDate;
/**
* Constructs s representation of case created by automated ingest.
* Constructs a representation of case created by automated ingest.
*
* @param caseFolderPath The case folder path.
* @param caseDirectoryPath The case directory path.
*/
AutoIngestCase(Path caseFolderPath) {
this.caseFolderPath = caseFolderPath;
caseName = PathUtils.caseNameFromCaseFolderPath(caseFolderPath);
metadataFilePath = caseFolderPath.resolve(caseName + CaseMetadata.getFileExtension());
// RJCTODO: Throw instead of reporting error, let client decide what to do.
AutoIngestCase(Path caseDirectoryPath) {
this.caseDirectoryPath = caseDirectoryPath;
caseName = PathUtils.caseNameFromCaseDirectoryPath(caseDirectoryPath);
metadataFilePath = caseDirectoryPath.resolve(caseName + CaseMetadata.getFileExtension());
BasicFileAttributes fileAttrs = null;
try {
BasicFileAttributes fileAttrs = Files.readAttributes(metadataFilePath, BasicFileAttributes.class);
fileAttrs = Files.readAttributes(metadataFilePath, BasicFileAttributes.class);
} catch (IOException ex) {
logger.log(Level.SEVERE, String.format("Error reading file attributes of case metadata file in %s, will use current time for case createDate/lastModfiedDate", caseDirectoryPath), ex);
}
if (null != fileAttrs) {
createDate = new Date(fileAttrs.creationTime().toMillis());
lastModfiedDate = new Date(fileAttrs.lastModifiedTime().toMillis());
} catch (IOException ex) {
logger.log(Level.SEVERE, String.format("Error reading file attributes of case metadata file in %s, will use current time for case createDate/lastModfiedDate", caseFolderPath), ex);
} else {
createDate = new Date();
lastModfiedDate = new Date();
}
}
/**
* Gets the case folder path.
* Gets the case directory path.
*
* @return The case folder path.
* @return The case directory path.
*/
Path getCaseFolderPath() {
return this.caseFolderPath;
Path getCaseDirectoryPath() {
return this.caseDirectoryPath;
}
/**
@ -83,7 +88,7 @@ class AutoIngestCase implements Comparable<AutoIngestCase> {
* Gets the creation date for the case, defined as the create time of the
* case metadata file.
*
* @return The creation date.
* @return The case creation date.
*/
Date getCreationDate() {
return this.createDate;
@ -95,98 +100,98 @@ class AutoIngestCase implements Comparable<AutoIngestCase> {
*
* @return The last accessed date.
*/
// RJCTODO: Throw instead of reporting error, let client decide what to do.
Date getLastAccessedDate() {
try {
BasicFileAttributes fileAttrs = Files.readAttributes(metadataFilePath, BasicFileAttributes.class);
createDate = new Date(fileAttrs.creationTime().toMillis());
lastModfiedDate = new Date(fileAttrs.lastModifiedTime().toMillis());
} catch (IOException ex) {
logger.log(Level.SEVERE, String.format("Error reading file attributes of case metadata file in %s, lastModfiedDate time not updated", caseFolderPath), ex);
logger.log(Level.SEVERE, String.format("Error reading file attributes of case metadata file in %s, lastModfiedDate time not updated", caseDirectoryPath), ex);
}
return lastModfiedDate;
}
/**
* Gets the status of this case based on state files in the case folder.
* Gets the status of this case based on the auto ingest result file in the
* case directory.
*
* @return See CaseStatus enum definition.
*/
CaseStatus getStatus() {
try {
if (StateFile.exists(caseFolderPath, StateFile.Type.CANCELLED)) {
return CaseStatus.CANCELLATIONS;
} else if (StateFile.exists(caseFolderPath, StateFile.Type.ERROR)) {
return CaseStatus.ERRORS;
} else if (StateFile.exists(caseFolderPath, StateFile.Type.INTERRUPTED)) {
return CaseStatus.INTERRUPTS;
} else {
return CaseStatus.OK;
}
} catch (IOException | SecurityException ex) {
logger.log(Level.SEVERE, String.format("Failed to determine status of case at %s", caseFolderPath), ex);
return CaseStatus.ERRORS;
if (AutoIngestAlertFile.exists(caseDirectoryPath)) {
return CaseStatus.ALERT;
} else {
return CaseStatus.OK;
}
}
/**
* @inheritDoc
* Indicates whether or not some other object is "equal to" this
* AutoIngestCase object.
*
* @param other The other object.
*
* @return True or false.
*/
@Override
public boolean equals(Object obj) {
if (!(obj instanceof AutoIngestCase)) {
public boolean equals(Object other) {
if (!(other instanceof AutoIngestCase)) {
return false;
}
if (obj == this) {
if (other == this) {
return true;
}
AutoIngestCase rhs = (AutoIngestCase) obj;
return this.caseFolderPath.toString().equals(rhs.caseFolderPath.toString());
return this.caseDirectoryPath.toString().equals(((AutoIngestCase) other).caseDirectoryPath.toString());
}
/**
* @inheritDoc
* Returns a hash code value for this AutoIngestCase object.
*
* @return The has code.
*/
@Override
public int hashCode() {
int hash = 7;
hash = 71 * hash + Objects.hashCode(this.caseFolderPath);
hash = 71 * hash + Objects.hashCode(this.caseDirectoryPath);
hash = 71 * hash + Objects.hashCode(this.createDate);
hash = 71 * hash + Objects.hashCode(this.caseName);
return hash;
}
/**
* Default sorting is by last accessed date, descending.
* Compares this AutopIngestCase object with abnother AutoIngestCase object
* for order.
*/
@Override
public int compareTo(AutoIngestCase o) {
return -this.lastModfiedDate.compareTo(o.getLastAccessedDate());
public int compareTo(AutoIngestCase other) {
return -this.lastModfiedDate.compareTo(other.getLastAccessedDate());
}
/**
* Custom comparator that allows us to sort List<AutoIngestCase> on reverse
* chronological date created (descending)
*
* Comparator for a descending order sort on date created.
*/
static class ReverseDateLastAccessedComparator implements Comparator<AutoIngestCase> {
static class LastAccessedDateDescendingComparator implements Comparator<AutoIngestCase> {
/**
* Compares two AutoIngestCase objects for order based on last accessed
* date (descending).
*
* @param object The first AutoIngestCase object
* @param otherObject The second AuotIngestCase object.
*
* @return A negative integer, zero, or a positive integer as the first
* argument is less than, equal to, or greater than the second.
*/
@Override
public int compare(AutoIngestCase o1, AutoIngestCase o2) {
return -o1.getLastAccessedDate().compareTo(o2.getLastAccessedDate());
public int compare(AutoIngestCase object, AutoIngestCase otherObject) {
return -object.getLastAccessedDate().compareTo(otherObject.getLastAccessedDate());
}
}
/**
* Custom comparator that allows us to sort List<AutoIngestCase> on reverse
* chronological date created (descending)
*
*/
static class ReverseDateCreatedComparator implements Comparator<AutoIngestCase> {
enum CaseStatus {
@Override
public int compare(AutoIngestCase o1, AutoIngestCase o2) {
return -o1.getCreationDate().compareTo(o2.getCreationDate());
}
OK,
ALERT
}
}

View File

@ -0,0 +1,65 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2015 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.autoingest;
import java.io.Serializable;
import org.sleuthkit.autopsy.events.AutopsyEvent;
/**
* Event published when a case is deleted by the automated ingest manager.
*/
public final class AutoIngestCaseDeletedEvent extends AutopsyEvent implements Serializable {
private static final long serialVersionUID = 1L;
private final AutoIngestManager.CaseDeletionResult result;
private final String nodeName;
/**
* Constructs an event that is published when a case is deleted by the
* automated ingest manager.
*
* @param result The deletion result // RJCTODO: Get rid of logical
* deletion
* @param nodeName The host name of the node that deleted the case.
*/
public AutoIngestCaseDeletedEvent(AutoIngestManager.CaseDeletionResult result, String nodeName) {
super(AutoIngestManager.Event.CASE_DELETED.toString(), null, null);
this.result = result;
this.nodeName = nodeName;
}
/**
* RJCTODO
*
* @return
*/
public String getNodeName() {
return nodeName;
}
/**
* RJCTODO
*
* @return
*/
public AutoIngestManager.CaseDeletionResult getResult() {
return result;
}
}

View File

@ -0,0 +1,65 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2015 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.autoingest;
import java.io.Serializable;
import org.sleuthkit.autopsy.events.AutopsyEvent;
/**
* Event published when an automated ingest manager prioritizes all or part of a
* case.
*/
public final class AutoIngestCasePrioritizedEvent extends AutopsyEvent implements Serializable {
private static final long serialVersionUID = 1L;
private final String caseName;
private final String nodeName;
/**
* Constructs an event published when an automated ingest manager
* prioritizes all or part of a case.
*
* @param caseName The name of the case.
* @param nodeName The host name of the node that prioritized the case.
*/
public AutoIngestCasePrioritizedEvent(String nodeName, String caseName) {
super(AutoIngestManager.Event.CASE_PRIORITIZED.toString(), null, null);
this.caseName = caseName;
this.nodeName = nodeName;
}
/**
* Gets the name of the prioritized case.
*
* @return The case name.
*/
public String getCaseName() {
return caseName;
}
/**
* Gets the host name of the node that prioritized the case.
*
* @return The host name of the node.
*/
public String getNodeName() {
return nodeName;
}
}

View File

@ -18,447 +18,236 @@
*/
package org.sleuthkit.autopsy.autoingest;
import java.io.IOException;
import java.nio.file.Files;
import java.io.Serializable;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.attribute.BasicFileAttributes;
import java.time.Instant;
import java.util.Comparator;
import java.util.Date;
import java.util.Objects;
import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.Logger;
import javax.annotation.concurrent.GuardedBy;
import javax.annotation.concurrent.Immutable;
import javax.annotation.concurrent.ThreadSafe;
import org.sleuthkit.autopsy.coreutils.NetworkUtils;
import org.sleuthkit.autopsy.core.UserPreferences;
import org.sleuthkit.autopsy.ingest.IngestJob;
import org.joda.time.DateTime;
/**
* An automated ingest job completed by, or to be completed by, the automated
* ingest manager.
* An automated ingest job auto ingest jobs associated with a manifest file. A
* manifest file specifies a co-located data source and a case to which the data
* source is to be added.
*/
class AutoIngestJob implements Comparable<AutoIngestJob> {
@ThreadSafe
public final class AutoIngestJob implements Comparable<AutoIngestJob>, Serializable {
// ELTODO: move JobIngestStatus back into AIM
/**
* Represents the state of an auto ingest job at any given moment during its
* lifecycle as it moves from waiting to be processed, through the various
* stages of processing, to its final completed state.
*/
static final class JobIngestStatus {
private enum IngestStatus {
PENDING("Pending"),
STARTING("Starting"),
UPDATING_SHARED_CONFIG("Updating shared configuration"),
CHECKING_SERVICES("Checking services"),
OPENING_CASE("Opening case"),
IDENTIFYING_IMAGES("Identifying images"),
ADDING_IMAGES("Adding images"),
ANALYZING_IMAGES("Analyzing images"),
ANALYZING_FILES("Analyzing files"),
EXPORTING_FILES("Exporting files"),
CANCELLING_MODULE("Cancelling module"),
CANCELLING("Cancelling"),
COMPLETED("Completed");
private final String displayText;
private IngestStatus(String displayText) {
this.displayText = displayText;
}
String getDisplayText() {
return displayText;
}
}
private IngestStatus ingestStatus;
private String statusDisplayName;
private Date startDate;
private IngestJob ingestJob;
private boolean cancelled;
private Date dateCompleted;
private JobIngestStatus(Date dateCompleted) {
ingestStatus = IngestStatus.PENDING;
statusDisplayName = ingestStatus.getDisplayText();
startDate = DateTime.now().toDate();
this.dateCompleted = dateCompleted;
}
/**
* Updates displayed status and start fileTime of auto ingest job. Used
* primarily to display status of remote running jobs.
*
* @param newDisplayName Displayed status of the auto ingest job.
* @param startTime Start fileTime of the current activity.
*/
synchronized private void setStatus(String newDisplayName, Date startTime) {
statusDisplayName = newDisplayName;
startDate = startTime;
}
/**
* Updates status of auto ingest job. Sets current fileTime as activity
* start fileTime. Used to update status of local running job.
*
* @param newStatus Status of the auto ingest job.
*/
synchronized private void setStatus(IngestStatus newStatus) {
if (ingestStatus == IngestStatus.CANCELLING && newStatus != IngestStatus.COMPLETED) {
/**
* Do not overwrite canceling status with anything other than
* completed status.
*/
return;
}
ingestStatus = newStatus;
statusDisplayName = ingestStatus.getDisplayText();
startDate = Date.from(Instant.now());
if (ingestStatus == IngestStatus.COMPLETED) {
/**
* Release the reference for garbage collection since this
* object may live for a long time within a completed job.
*/
ingestJob = null;
}
if (ingestStatus == IngestStatus.COMPLETED) {
dateCompleted = startDate;
}
}
synchronized private void setIngestJob(IngestJob ingestJob) {
/**
* Once this field is set, the ingest job should be used to
* determine the current activity up until the the job is completed.
*/
this.ingestJob = ingestJob;
}
synchronized AutoIngestJob.Status getStatus() {
if (null != ingestJob && ingestStatus != IngestStatus.CANCELLING && ingestStatus != IngestStatus.EXPORTING_FILES) {
String activityDisplayText;
IngestJob.ProgressSnapshot progress = ingestJob.getSnapshot();
IngestJob.DataSourceIngestModuleHandle ingestModuleHandle = progress.runningDataSourceIngestModule();
if (null != ingestModuleHandle) {
/**
* A first or second stage data source level ingest module
* is running. Reporting this takes precedence over
* reporting generic file analysis.
*/
startDate = ingestModuleHandle.startTime();
if (!ingestModuleHandle.isCancelled()) {
activityDisplayText = ingestModuleHandle.displayName();
} else {
activityDisplayText = String.format(IngestStatus.CANCELLING_MODULE.getDisplayText(), ingestModuleHandle.displayName());
}
} else {
/**
* If no data source level ingest module is running, then
* either it is still the first stage of analysis and file
* level ingest modules are running or another ingest job is
* still running. Note that there can be multiple ingest
* jobs running in parallel. For example, there is an ingest
* job created to ingest each extracted virtual machine.
*/
activityDisplayText = IngestStatus.ANALYZING_FILES.getDisplayText();
startDate = progress.fileIngestStartTime();
}
return new AutoIngestJob.Status(activityDisplayText, startDate);
} else {
return new AutoIngestJob.Status(statusDisplayName, startDate);
}
}
synchronized private IngestJob setStatusCancelled() {
cancelled = true;
setStatus(JobIngestStatus.IngestStatus.CANCELLING);
return ingestJob;
}
synchronized private IngestJob cancelModule() {
setStatus(JobIngestStatus.IngestStatus.CANCELLING_MODULE);
return ingestJob;
}
synchronized private boolean isCancelled() {
return cancelled;
}
synchronized Date getDateCompleted() {
return dateCompleted;
}
synchronized Date getDateStarted() {
return startDate;
}
}
private static final Logger logger = Logger.getLogger(AutoIngestJob.class.getName());
private final String caseName;
private final Path imageFolderPath;
private final Date imageFolderCreateDate;
private Path caseFolderName;
private final String jobDisplayName;
private String nodeName;
//ELTODO private final AutoIngestManager.JobIngestStatus ingestStatus;
private final JobIngestStatus ingestStatus;
private static final String localHostName = NetworkUtils.getLocalHostName();
private static final long serialVersionUID = 1L;
private static final String LOCAL_HOST_NAME = NetworkUtils.getLocalHostName();
private final Manifest manifest;
private final String nodeName;
@GuardedBy("this")
private String caseDirectoryPath;
@GuardedBy("this")
private Integer priority;
@GuardedBy("this")
private Stage stage;
@GuardedBy("this")
private Date stageStartDate;
@GuardedBy("this")
transient private IngestJob ingestJob;
/**
* This variable is being accessed by AID as well as JMS thread
*/
private volatile boolean isLocalJob;
private Date readyFileTimeStamp;
private Date prioritizedFileTimeStamp;
/**
* Constructs an automated ingest job completed by, or to be completed by,
* the automated ingest manager.
* RJCTODO
*
* @param imageFolderPath The fully qualified path to the case input folder
* for the job.
* @param caseName The case to which this job belongs. Note that this
* is the original case name (and not the timestamped
* case name).
* @param caseFolderName The fully qualified path to the case output folder
* for the job, if known.
* @param ingestStatus Ingest status details provided by the automated
* ingest manager.
* @param nodeName Name of the node that is processing the job
* @param manifest
*/
AutoIngestJob(Path imageFolderPath, String caseName, Path caseFolderName, /* //ELTODO AutoIngestManager.*/JobIngestStatus ingestStatus, String nodeName) {
this.caseName = caseName;
this.imageFolderPath = imageFolderPath;
this.caseFolderName = caseFolderName;
this.ingestStatus = ingestStatus;
this.jobDisplayName = resolveJobDisplayName(imageFolderPath);
this.isLocalJob = true; // jobs are set to "local" by default
AutoIngestJob(Manifest manifest, Path caseDirectoryPath, int priority, String nodeName) {
this.manifest = manifest;
if (null != caseDirectoryPath) {
this.caseDirectoryPath = caseDirectoryPath.toString();
} else {
this.caseDirectoryPath = "";
}
this.priority = priority;
this.nodeName = nodeName;
/**
* Either initialize to the folder creation date or the current date.
* Note that the way this is coded allows the folder creation date field
* to be final.
*/
BasicFileAttributes attrs = null;
try {
attrs = Files.readAttributes(imageFolderPath, BasicFileAttributes.class);
} catch (IOException ex) {
logger.log(Level.SEVERE, String.format("Failed to read attributes of input folder %s", imageFolderPath), ex);
}
this.imageFolderCreateDate = attrs == null ? new Date() : new Date(attrs.creationTime().toMillis());
try {
attrs = Files.readAttributes(Paths.get(imageFolderPath.toString(), StateFile.Type.READY.fileName()), BasicFileAttributes.class);
this.readyFileTimeStamp = new Date(attrs.creationTime().toMillis());
} catch (IOException ex) {
// Auto ingest job may be created for a remotely running job so no need to log exception if we don't find READY file
this.readyFileTimeStamp = new Date();
}
try {
attrs = Files.readAttributes(Paths.get(imageFolderPath.toString(), StateFile.Type.PRIORITIZED.fileName()), BasicFileAttributes.class);
this.prioritizedFileTimeStamp = new Date(attrs.creationTime().toMillis());
} catch (IOException ex) {
this.prioritizedFileTimeStamp = null;
}
}
static final class Status {
private final String activity;
private final Date activityStartDate;
Status(String activity, Date activityStartTime) {
this.activity = activity;
this.activityStartDate = activityStartTime;
}
String getActivity() {
return this.activity;
}
Date getActivityStartDate() {
return this.activityStartDate;
}
}
Status getStatus() {
return ingestStatus.getStatus();
}
// ELTODO AutoIngestManager.JobIngestStatus getIngestStatus() {
JobIngestStatus getIngestStatus() {
return ingestStatus;
this.stage = Stage.PENDING;
this.stageStartDate = manifest.getDateFileCreated();
}
/**
* Determine auto ingest job's display name. Display name is a relative path
* from case folder down to auto ingest job's folder.
*
* @param jobFolderPath Full path to auto ingest job's directory
*
* @return Auto ingest job's display name
*/
private String resolveJobDisplayName(Path jobFolderPath) {
Path pathRelative;
try {
Path rootInputFolderPath = Paths.get(UserPreferences.getAutoModeImageFolder());
Path casePath = PathUtils.caseImagesPathFromImageFolderPath(rootInputFolderPath, jobFolderPath);
pathRelative = casePath.relativize(jobFolderPath);
} catch (Exception ignore) {
// job folder is not a subpath of case folder, return entire job folder path
return jobFolderPath.toString();
}
return pathRelative.toString();
}
/**
* Returns the fully qualified path to the input folder.
*/
Path getImageFolderPath() {
return imageFolderPath;
}
/**
* Returns the name of the case to which the ingest job belongs. Note that
* this is the original case name (not the timestamped Autopsy case name).
*/
String getCaseName() {
return this.caseName;
}
/**
* Returns the display name for current auto ingest job.
*/
String getJobDisplayName() {
return this.jobDisplayName;
}
/**
* Returns the fully qualified path to the case results folder.
*/
Path getCaseFolderPath() {
return this.caseFolderName;
}
/**
* Set the fully qualifies path to the case results folder.
*
* @param resultsPath
*/
void setCaseFolderPath(Path resultsPath) {
this.caseFolderName = resultsPath;
}
/**
* Get the date processing completed on the job.
*/
Date getDateCompleted() {
return ingestStatus.getDateCompleted();
}
/**
* Get the ready file created date
*/
Date getReadyFileTimeStamp() {
return this.readyFileTimeStamp;
}
/**
* Get the prioritized file created date.
*/
Date getPrioritizedFileTimeStamp() {
return this.prioritizedFileTimeStamp;
}
/**
* Sets the created date of the prirotized state file for this job.
*/
void setPrioritizedFileTimeStamp(Date timeStamp) {
/*
* RJC: This method is a bit of a hack to support a quick and dirty way
* of giving user feedback when an input folder or a case is
* prioritized. It can be removed when a better solution is found, or
* replaced with a method that looks up the state file time stamp.
*/
this.prioritizedFileTimeStamp = timeStamp;
}
/**
* Gets case status based on the state files that exist in the job folder.
*
* @return See CaseStatus enum definition.
*/
CaseStatus getCaseStatus() {
try {
if (StateFile.exists(imageFolderPath, StateFile.Type.CANCELLED)) {
return CaseStatus.CANCELLATIONS;
} else if (StateFile.exists(imageFolderPath, StateFile.Type.ERROR)) {
return CaseStatus.ERRORS;
} else if (StateFile.exists(imageFolderPath, StateFile.Type.INTERRUPTED)) {
return CaseStatus.INTERRUPTS;
} else {
return CaseStatus.OK;
}
} catch (IOException | SecurityException ex) {
logger.log(Level.SEVERE, String.format("Failed to determine status of case at %s", imageFolderPath), ex);
return CaseStatus.ERRORS;
}
}
/**
* Returns the date the input folder was created.
* RJCTODO
*
* @return
*/
Date getDateCreated() {
return this.imageFolderCreateDate;
Manifest getManifest() {
return this.manifest;
}
/**
* Updates flag whether the auto ingest job is running on local AIM node or
* remote one.
* Queries whether or not a case directory path has been set for this auto
* ingest job.
*
* @param isLocal true if job is local, false otherwise.
* @return True or false
*/
void setIsLocalJob(boolean isLocal) {
this.isLocalJob = isLocal;
synchronized boolean hasCaseDirectoryPath() {
return (false == this.caseDirectoryPath.isEmpty());
}
/**
* Gets flag whether the auto ingest job is running on local AIM node or
* remote one.
* Sets the path to the case directory of the case associated with this job.
*
* @return True if job is local, false otherwise.
* @param caseDirectoryPath The path to the case directory.
*/
boolean getIsLocalJob() {
return this.isLocalJob;
synchronized void setCaseDirectoryPath(Path caseDirectoryPath) {
this.caseDirectoryPath = caseDirectoryPath.toString();
}
/**
* Gets name of AIN that is processing the job.
* Gets the path to the case directory of the case associated with this job,
* may be null.
*
* @return Name of the node that is processing the job.
* @return The case directory path or null if the case directory has not
* been created yet.
*/
public String getNodeName() {
synchronized Path getCaseDirectoryPath() {
return Paths.get(caseDirectoryPath); // RJCTODO: This may not be such a good idea, perhaps a null is better if the path is empty string
}
synchronized void setPriority(Integer priority) {
this.priority = priority;
}
/**
* RJCTODO
*
* @return
*/
synchronized Integer getPriority() {
return this.priority;
}
/**
* RJCTODO
*
* @param newState
*/
synchronized void setStage(Stage newState) {
setStage(newState, Date.from(Instant.now()));
}
/**
* RJCTODO
*
* @param state
* @param stateStartedDate
*/
synchronized void setStage(Stage newState, Date stateStartedDate) {
if (Stage.CANCELLED == this.stage && Stage.COMPLETED != newState) {
/**
* Do not overwrite canceling status with anything other than
* completed status.
*/
return;
}
this.stage = newState;
this.stageStartDate = stateStartedDate;
}
/**
* RJCTODO:
*
* @return
*/
synchronized Stage getStage() {
return this.stage;
}
/**
* RJCTODO
*
* @return
*/
synchronized Date getStageStartDate() {
return this.stageStartDate;
}
/**
* RJCTODO
*
* @return
*/
synchronized StageDetails getStageDetails() {
String description;
Date startDate;
if (null != this.ingestJob) {
IngestJob.ProgressSnapshot progress = this.ingestJob.getSnapshot();
IngestJob.DataSourceIngestModuleHandle ingestModuleHandle = progress.runningDataSourceIngestModule();
if (null != ingestModuleHandle) {
/**
* A first or second stage data source level ingest module is
* running. Reporting this takes precedence over reporting
* generic file analysis.
*/
startDate = ingestModuleHandle.startTime();
if (!ingestModuleHandle.isCancelled()) {
description = ingestModuleHandle.displayName();
} else {
description = String.format(Stage.CANCELLING_MODULE.getDisplayText(), ingestModuleHandle.displayName()); // RJCTODO: FIx this
}
} else {
/**
* If no data source level ingest module is running, then either
* it is still the first stage of analysis and file level ingest
* modules are running or another ingest job is still running.
* Note that there can be multiple ingest jobs running in
* parallel. For example, there is an ingest job created to
* ingest each extracted virtual machine.
*/
description = Stage.ANALYZING_FILES.getDisplayText();
startDate = progress.fileIngestStartTime();
}
} else {
description = this.stage.getDisplayText();
startDate = this.stageStartDate;
}
return new StageDetails(description, startDate);
}
/**
* RJCTODO
*
* @param ingestStatus
*/
// RJCTODO: Consider moving this class into AIM and making this private
synchronized void setIngestJob(IngestJob ingestJob) {
this.ingestJob = ingestJob;
}
/**
* RJCTODO
*
* @return
*/
// RJCTODO: Consider moving this class into AIM and making this private.
// Or move the AID into a separate package. Or do not worry about it.
synchronized IngestJob getIngestJob() {
return this.ingestJob;
}
/**
* RJCTODO Gets name of the node associated with the job, possibly a remote
* hose if the job is in progress.
*
* @return The node name.
*/
String getNodeName() {
return nodeName;
}
/**
* Sets name of AIN that is processing the job.
* RJCTODO
*
* @param nodeName Name of the node that is processing the job.
* @param obj
*
* @return
*/
public void setNodeName(String nodeName) {
this.nodeName = nodeName;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof AutoIngestJob)) {
@ -467,43 +256,32 @@ class AutoIngestJob implements Comparable<AutoIngestJob> {
if (obj == this) {
return true;
}
AutoIngestJob rhs = (AutoIngestJob) obj;
return this.imageFolderPath.toString().equals(rhs.imageFolderPath.toString());
return this.getManifest().getFilePath().equals(((AutoIngestJob) obj).getManifest().getFilePath());
}
/**
* RJCTODO
*
* @return
*/
@Override
public int hashCode() {
// RJCTODO: Update this
int hash = 7;
hash = 71 * hash + Objects.hashCode(this.imageFolderPath);
hash = 71 * hash + Objects.hashCode(this.imageFolderCreateDate);
hash = 71 * hash + Objects.hashCode(this.caseName);
// hash = 71 * hash + Objects.hashCode(this.dateCreated);
return hash;
}
/**
* Default sorting is by ready file creation date, descending
* RJCTODO Default sorting is by ready file creation date, descending
*
* @param o
*
* @return
*/
@Override
public int compareTo(AutoIngestJob o) {
return -this.imageFolderCreateDate.compareTo(o.getDateCreated());
}
/**
* Determine if this job object is a higher priority than the otherJob.
*
* @param otherJob The job to compare against.
*
* @return true if this job is a higher priority, otherwise false.
*/
boolean isHigherPriorityThan(AutoIngestJob otherJob) {
if (this.prioritizedFileTimeStamp == null) {
return false;
}
if (otherJob.prioritizedFileTimeStamp == null) {
return true;
}
return (this.prioritizedFileTimeStamp.compareTo(otherJob.prioritizedFileTimeStamp) > 0);
return -this.getManifest().getDateFileCreated().compareTo(o.getManifest().getDateFileCreated());
}
/**
@ -512,33 +290,17 @@ class AutoIngestJob implements Comparable<AutoIngestJob> {
*/
static class ReverseDateCompletedComparator implements Comparator<AutoIngestJob> {
/**
* RJCTODO
*
* @param o1
* @param o2
*
* @return
*/
@Override
public int compare(AutoIngestJob o1, AutoIngestJob o2) {
return -o1.getDateCompleted().compareTo(o2.getDateCompleted());
}
}
/**
* Custom comparator that allows us to sort List<AutoIngestJob> on reverse
* chronological date created (descending)
*/
static class ReverseDateCreatedComparator implements Comparator<AutoIngestJob> {
@Override
public int compare(AutoIngestJob o1, AutoIngestJob o2) {
return -o1.getDateCreated().compareTo(o2.getDateCreated());
}
}
/**
* Custom comparator that allows us to sort List<AutoIngestJob> on reverse
* chronological date started (descending)
*/
static class ReverseDateStartedComparator implements Comparator<AutoIngestJob> {
@Override
public int compare(AutoIngestJob o1, AutoIngestJob o2) {
return -o1.getStatus().getActivityStartDate().compareTo(o2.getStatus().getActivityStartDate());
return -o1.getStageStartDate().compareTo(o2.getStageStartDate());
}
}
@ -546,32 +308,29 @@ class AutoIngestJob implements Comparable<AutoIngestJob> {
* Custom comparator that sorts the pending list with prioritized cases
* first, then nonprioritized cases. Prioritized cases are last in, first
* out. Nonprioritized cases are first in, first out. Prioritized times are
* from the creation time of the ".prioritized" state file. Non prioritized
* from the creation time of the "prioritized" state file. Non prioritized
* are from the folder creation time.
*/
public static class PrioritizedPendingListComparator implements Comparator<AutoIngestJob> {
public static class PriorityComparator implements Comparator<AutoIngestJob> {
/**
* RJCTODO
*
* @param o1
* @param o2
*
* @return
*/
@Override
public int compare(AutoIngestJob o1, AutoIngestJob o2) {
Date dateCreated1 = o1.getDateCreated();
Date dateCreated2 = o2.getDateCreated();
Date datePrioritized1 = o1.getPrioritizedFileTimeStamp();
Date datePrioritized2 = o2.getPrioritizedFileTimeStamp();
if (datePrioritized1 != null && datePrioritized2 != null) {
// both are prioritized, sort on prioritized file date, last in first out
return datePrioritized2.compareTo(datePrioritized1);
} else if (datePrioritized1 == null && datePrioritized2 == null) {
// both are not prioritized, sort on folder creation date, first in first out
return dateCreated1.compareTo(dateCreated2);
} else if (datePrioritized1 != null) {
// left hand side is prioritized
return -1;
Integer result = o1.getPriority().compareTo(o2.getPriority());
if (0 != result) {
return result;
} else {
// datePrioritized2 != null, so right hand side is prioritized
return 1;
return o1.getManifest().getDateFileCreated().compareTo(o2.getManifest().getDateFileCreated());
}
}
}
/**
@ -581,15 +340,92 @@ class AutoIngestJob implements Comparable<AutoIngestJob> {
*/
static class AlphabeticalComparator implements Comparator<AutoIngestJob> {
/**
* RJCTODO
*
* @param o1
* @param o2
*
* @return
*/
@Override
public int compare(AutoIngestJob o1, AutoIngestJob o2) {
if (o1.getNodeName().equalsIgnoreCase(localHostName)) {
if (o1.getNodeName().equalsIgnoreCase(LOCAL_HOST_NAME)) {
return -1; // o1 is for current case, float to top
} else if (o2.getNodeName().equalsIgnoreCase(localHostName)) {
} else if (o2.getNodeName().equalsIgnoreCase(LOCAL_HOST_NAME)) {
return 1; // o2 is for current case, float to top
} else {
return o1.getCaseName().compareToIgnoreCase(o2.getCaseName());
return o1.getManifest().getCaseName().compareToIgnoreCase(o2.getManifest().getCaseName());
}
}
}
enum Stage {
PENDING("Pending"),
STARTING("Starting"),
UPDATING_SHARED_CONFIG("Updating shared configuration"),
CHECKING_SERVICES("Checking services"),
OPENING_CASE("Opening case"),
IDENTIFYING_DATA_SOURCE("Identifying data source type"),
ADDING_DATA_SOURCE("Adding data source"),
ANALYZING_DATA_SOURCE("Analyzing data source"),
ANALYZING_FILES("Analyzing files"),
EXPORTING_FILES("Exporting files"),
CANCELLING_MODULE("Cancelling module"),
CANCELLED("Cancelling"),
COMPLETED("Completed");
private final String displayText;
private Stage(String displayText) {
this.displayText = displayText;
}
String getDisplayText() {
return displayText;
}
}
/**
* RJCTODO
*/
@Immutable
static final class StageDetails {
private final String description;
private final Date startDate;
/**
* RJCTODO
*
* @param description
* @param startDate
*/
private StageDetails(String description, Date startDate) {
this.description = description;
this.startDate = startDate;
}
/**
* RJCTODO
*
* @return
*/
String getDescription() {
return this.description;
}
/**
* RJCTODO
*
* @return
*/
Date getStartDate() {
return this.startDate;
}
}
}

View File

@ -0,0 +1,48 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2015 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.autoingest;
import java.io.Serializable;
/**
* Event published when an automated ingest manager completes processing an
* automated ingest job.
*/
public final class AutoIngestJobCompletedEvent extends AutoIngestJobEvent implements Serializable {
private static final long serialVersionUID = 1L;
private final boolean retry;
/**
* RJCTODO
*/
public AutoIngestJobCompletedEvent(AutoIngestJob job, boolean shouldRetry) {
super(AutoIngestManager.Event.JOB_COMPLETED, job);
this.retry = shouldRetry;
}
/**
* RJCTODO
* @return
*/
public boolean shouldRetry() {
return this.retry;
}
}

View File

@ -0,0 +1,51 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2015 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.autoingest;
import java.io.Serializable;
import javax.annotation.concurrent.Immutable;
import org.sleuthkit.autopsy.events.AutopsyEvent;
/**
* RJCTODO
*/
@Immutable
abstract class AutoIngestJobEvent extends AutopsyEvent implements Serializable {
private static final long serialVersionUID = 1L;
private final AutoIngestJob job;
/**
* RJCTODO
*
*/
AutoIngestJobEvent(AutoIngestManager.Event eventSubType, AutoIngestJob job) {
super(eventSubType.toString(), null, null);
this.job = job;
}
/**
* RJCTODO
* @return
*/
AutoIngestJob getJob() {
return this.job;
}
}

View File

@ -21,7 +21,6 @@ package org.sleuthkit.autopsy.autoingest;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.nio.file.Path;
import java.nio.file.Paths;
@ -29,7 +28,6 @@ import java.text.SimpleDateFormat;
import java.time.Instant;
import java.util.Date;
import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.NetworkUtils;
import org.sleuthkit.autopsy.coordinationservice.CoordinationService;
import org.sleuthkit.autopsy.coordinationservice.CoordinationService.Lock;
@ -41,114 +39,99 @@ import org.sleuthkit.autopsy.ingest.IngestManager.IngestManagerException;
/**
* A logger for the processing of an auto ingest job by an auto ingest node. The
* log messages are written to the case auto ingest log, a user-friendly log of
* of the automated processing for a case.
*
* The auto ingest log for a case is not intended to be a comprehensive.
* Advanced users doing troubleshooting of an automated ingest cluster should
* also consult the Autopsy and system logs as needed.
* log messages are written to the case auto ingest log in the case directory.
* When an error message is logges, an alert file is also written to the case
* directory.
*/
final class AutoIngestJobLogger {
static final String ROOT_NAMESPACE = "autopsy"; //ELTODO - remove this after AIM is moved into Autopsy. It belongs there.
private static final Logger autopsyLogger = Logger.getLogger(AutoIngestJobLogger.class.getName());
private static final String LOG_FILE_NAME = "auto_ingest_log.txt";
private static final int LOCK_TIME_OUT = 15;
private static final TimeUnit LOCK_TIME_OUT_UNIT = TimeUnit.MINUTES;
private static final String DATE_FORMAT_STRING = "yyyy/MM/dd HH:mm:ss";
private static final SimpleDateFormat simpleDateFormat = new SimpleDateFormat(DATE_FORMAT_STRING);
private final Path imageFolderPath;
private final Path caseFolderPath;
private static final SimpleDateFormat logDateFormat = new SimpleDateFormat(DATE_FORMAT_STRING);
private final Path manifestPath;
private final String dataSourceFileName;
private final Path caseDirectoryPath;
private final String hostName;
private enum MessageLevel {
INFO, WARNING, ERROR
/**
* Message category added to log messages to make searching for various
* classes of messages easier, e.g., to make error messages stand out.
*/
private enum MessageCategory {
/**
* Qualifies a log message about normal automated ingest processing.
*/
INFO,
/**
* Qualifies a log message about an unexpected event or condtion during
* automated ingest processing.
*/
WARNING,
/**
* Qualifies a log message about an error event or condition during
* automated ingest processing.
*/
ERROR
}
/**
* Gets the path to the auto ingest log for a case.
* Gets the path to the automated ingest log for a case.
*
* @param caseFolderPath The path to the case folder for the case
* @param caseDirectoryPath The path to the case directory where the log
* resides.
*
* @return The path to the auto ingest log for the case.
* @return The path to the automated ingest case log for the case.
*/
static Path getLogPath(Path caseFolderPath) {
return Paths.get(caseFolderPath.toString(), LOG_FILE_NAME);
static Path getLogPath(Path caseDirectoryPath) {
return Paths.get(caseDirectoryPath.toString(), LOG_FILE_NAME);
}
/**
* Constructs a logger for the processing of an auto ingest job by an auto
* ingest node. The log messages are written to the case auto ingest log, a
* user-friendly log of of the automated processing for a case.
* user-friendly log of of the automated processing for a case that resides
* in the case directory.
*
* @param imageFolderPath The image folder for the auto ingest job.
* @param caseFolderPath The case folder for the case.
* The auto iongest log for a case is not intended to be a comprehensive.
* Advanced users doing troubleshooting of an automated ingest cluster
* should also consult the Autopsy and system logs as needed.
*
* @param manifestPath The manifest for the auto ingest job.
* @param caseDirectoryPath The case directory.
*/
AutoIngestJobLogger(Path imageFolderPath, Path caseFolderPath) {
this.imageFolderPath = imageFolderPath;
this.caseFolderPath = caseFolderPath;
AutoIngestJobLogger(Path manifestPath, String dataSourceFileName, Path caseDirectoryPath) {
this.manifestPath = manifestPath;
this.dataSourceFileName = dataSourceFileName;
this.caseDirectoryPath = caseDirectoryPath;
hostName = NetworkUtils.getLocalHostName();
}
/**
* Logs the cancellation of an auto ingest job during processing.
*
* @throws InterruptedException if interrupted while blocked waiting to
* acquire an exclusive lock on the log file
* path.
* @throws InterruptedException if interrupted while blocked waiting
* to acquire an exclusive lock on the
* log file.
* @throws AutoIngestJobLoggerException if there is a problem writing to the
* log file.
*/
void logJobCancelled() throws InterruptedException {
log(MessageLevel.WARNING, "", "Auto ingest job cancelled during processing");
}
/**
* Logs an error opening or creating a case.
*
* @throws InterruptedException if interrupted while blocked waiting to
* acquire an exclusive lock on the log file
* path.
*/
void logUnableToOpenCase() throws InterruptedException {
log(MessageLevel.ERROR, "", "Unable to create or open case");
}
/**
* Logs the lack of at least one manifest file in the image folder.
*
* @throws InterruptedException if interrupted while blocked waiting to
* acquire an exclusive lock on the log file
* path.
*/
void logMissingManifest() throws InterruptedException {
log(MessageLevel.ERROR, "", "Missing manifest file");
}
/**
* Logs the presence of a manifest file that matches more than one data
* source.
*
* @param manifestFileName The file name of the ambiguous manifest.
*
* @throws InterruptedException if interrupted while blocked waiting to
* acquire an exclusive lock on the log file
* path.
*/
void logAmbiguousManifest(String manifestFileName) throws InterruptedException {
log(MessageLevel.ERROR, "", String.format("Manifest file %s matches multiple data sources", manifestFileName));
void logJobCancelled() throws InterruptedException, AutoIngestJobLoggerException {
log(MessageCategory.WARNING, "Auto ingest job cancelled during processing");
}
/**
* Logs the presence of a manifest file without a matching data source.
*
* @param manifestFileName
*
* @throws InterruptedException if interrupted while blocked waiting to
* acquire an exclusive lock on the log file
* path.
* @throws InterruptedException if interrupted while blocked waiting
* to acquire an exclusive lock on the
* log file.
* @throws AutoIngestJobLoggerException if there is a problem writing to the
* log file.
*/
void logMissingDataSource(String manifestFileName) throws InterruptedException {
log(MessageLevel.ERROR, "", String.format("Data source for manifest file %s is either missing or is not a supported type", manifestFileName));
void logMissingDataSource() throws InterruptedException, AutoIngestJobLoggerException {
log(MessageCategory.ERROR, "Data source file not found"); // RJCTODO: Check for this
}
/**
@ -161,257 +144,420 @@ final class AutoIngestJobLogger {
* acquire an exclusive lock on the log file
* path.
*/
void logDataSourceTypeIdError(String dataSource, Exception ex) throws InterruptedException {
log(MessageLevel.ERROR, dataSource, String.format("Unable to identify data source type: %s", ex.getLocalizedMessage()));
void logDataSourceTypeIdError(Exception ex) throws InterruptedException, AutoIngestJobLoggerException {
log(MessageCategory.ERROR, String.format("Error identifying data source type: %s", ex.getLocalizedMessage()));
}
/**
* RJCTODO
* @throws InterruptedException
* @throws viking.autoingest.AutoIngestJobLogger.AutoIngestJobLoggerException
*/
void logFailedToIdentifyDataSource() throws InterruptedException, AutoIngestJobLoggerException {
log(MessageCategory.ERROR, String.format("Failed to identifying data source type, cannot ingest"));
}
/**
* RJCTODO
* @param dataSourceType
* @throws InterruptedException
* @throws viking.autoingest.AutoIngestJobLogger.AutoIngestJobLoggerException
*/
void logDataSourceTypeId(String dataSourceType) throws InterruptedException, AutoIngestJobLoggerException {
log(MessageCategory.INFO, String.format("Identified data source as %s", dataSourceType));
}
/**
* Logs cancellation of the addition of a data source to the case database.
*
* @param dataSource The data source.
* @param dataSourceType The data source type.
*
* @throws InterruptedException if interrupted while blocked waiting to
* acquire an exclusive lock on the log file
* path.
* @throws InterruptedException if interrupted while blocked waiting
* to acquire an exclusive lock on the
* log file.
* @throws CoordinationServiceException if a problem with the coordination
* service prevents acquisition of a
* lock on the log file.
* @throws AutoIngestJobLoggerException if there is a problem writing to the
* log file.
*/
void logDataSourceProcessorCancelled(String dataSource, String dataSourceType) throws InterruptedException {
log(MessageLevel.WARNING, dataSource, String.format("Cancelled adding data source to case as %s", dataSourceType));
void logDataSourceProcessorCancelled(String dataSourceType) throws InterruptedException, AutoIngestJobLoggerException { // RJCTODO: Is this used now?
log(MessageCategory.WARNING, String.format("Cancelled adding data source to case as %s", dataSourceType));
}
/**
* Logs the addition of a data source to the case database.
*
* @param dataSource The data source.
* @param dataSourceType The data source type.
*
* @throws InterruptedException if interrupted while blocked waiting to
* acquire an exclusive lock on the log file
* path.
* @throws InterruptedException if interrupted while blocked waiting
* to acquire an exclusive lock on the
* log file.
* @throws CoordinationServiceException if a problem with the coordination
* service prevents acquisition of a
* lock on the log file.
* @throws AutoIngestJobLoggerException if there is a problem writing to the
* log file.
*/
void logDataSourceAdded(String dataSource, String dataSourceType) throws InterruptedException {
log(MessageLevel.INFO, dataSource, String.format("Added data source to case as %s", dataSourceType));
void logDataSourceAdded(String dataSourceType) throws InterruptedException, AutoIngestJobLoggerException {
log(MessageCategory.INFO, String.format("Added data source to case as %s", dataSourceType));
}
/**
* Logs an error reported by a data source processor when adding a data
* source to the case database.
* Logs a critical error reported by a data source processor when adding a
* data source to the case database.
*
* @param dataSource The data source.
* @param dataSourceType The data source type.
* @param errorMessage The error message.
*
* @throws InterruptedException if interrupted while blocked waiting to
* acquire an exclusive lock on the log file
* path.
* @throws InterruptedException if interrupted while blocked waiting
* to acquire an exclusive lock on the
* log file.
* @throws CoordinationServiceException if a problem with the coordination
* service prevents acquisition of a
* lock on the log file.
* @throws AutoIngestJobLoggerException if there is a problem writing to the
* log file.
*/
void logDataSourceProcessorError(String dataSource, String errorMessage) throws InterruptedException {
log(MessageLevel.ERROR, dataSource, String.format("Critical error adding data source to case: %s", errorMessage));
void logDataSourceProcessorError(String dataSourceType, String errorMessage) throws InterruptedException, AutoIngestJobLoggerException {
log(MessageCategory.ERROR, String.format("Critical error adding data source to case as %s: %s", dataSourceType, errorMessage));
}
/**
* Logs a non-critical error reported by a data source processor when adding
* a data source to the case database.
*
* @param dataSourceType The data source type.
* @param errorMessage The error message.
*
* @throws InterruptedException if interrupted while blocked waiting
* to acquire an exclusive lock on the
* log file.
* @throws CoordinationServiceException if a problem with the coordination
* service prevents acquisition of a
* lock on the log file.
* @throws AutoIngestJobLoggerException if there is a problem writing to the
* log file.
*/
void logDataSourceProcessorWarning(String dataSourceType, String errorMessage) throws InterruptedException, AutoIngestJobLoggerException {
log(MessageCategory.WARNING, String.format("Critical error adding data source to case as %s: %s", dataSourceType, errorMessage));
}
/**
* Logs an error adding a data source to the case database.
*
* @param dataSource The data source.
* @param dataSourceType The data source type.
* @param dataSource The data source.
*
* @throws InterruptedException if interrupted while blocked waiting to
* acquire an exclusive lock on the log file
* path.
* @throws InterruptedException if interrupted while blocked waiting
* to acquire an exclusive lock on the
* log file.
* @throws CoordinationServiceException if a problem with the coordination
* service prevents acquisition of a
* lock on the log file.
* @throws AutoIngestJobLoggerException if there is a problem writing to the
* log file.
*/
void logFailedToAddDataSource(String dataSource, String dataSourceType) throws InterruptedException {
log(MessageLevel.ERROR, dataSource, String.format("Failed to add data source to case as %s", dataSourceType));
void logFailedToAddDataSource(String dataSourceType) throws InterruptedException, AutoIngestJobLoggerException { // RJCTODO: Why this and logDataSourceProcessorError? Bd handling of critical vs. non-critical?
log(MessageCategory.ERROR, String.format("Failed to add data source to case as %s", dataSourceType));
}
/**
* Logs failure to analyze a data source because the analysis could not be
* started due to an ingest manager exception.
*
* @param dataSource The data source.
* @param ex The ingest manager exception.
*
* @throws InterruptedException if interrupted while blocked waiting to
* acquire an exclusive lock on the log file
* path.
* RJCTODO: Document and homogenize messages
* @param errors
* @throws InterruptedException
* @throws viking.autoingest.AutoIngestJobLogger.AutoIngestJobLoggerException
*/
void logAnalysisStartupError(String dataSource, IngestManagerException ex) throws InterruptedException {
log(MessageLevel.ERROR, dataSource, String.format("Analysis of data source by ingest modules not started: %s", ex.getLocalizedMessage()));
void logIngestJobSettingsErrors(List<String> errors) throws InterruptedException, AutoIngestJobLoggerException {
for (String error : errors) {
log(MessageCategory.ERROR, String.format("Settings error, analysis of data source by ingest modules not started: %s", error));
}
}
/**
* Logs failure to analyze a data source due to ingest module startup
* errors.
*
* @param dataSource The data source.
* @param errors The ingest module errors.
* @param errors The ingest module errors.
*
* @throws InterruptedException if interrupted while blocked waiting to
* acquire an exclusive lock on the log file
* path.
* @throws InterruptedException if interrupted while blocked waiting
* to acquire an exclusive lock on the
* log file.
* @throws CoordinationServiceException if a problem with the coordination
* service prevents acquisition of a
* lock on the log file.
* @throws AutoIngestJobLoggerException if there is a problem writing to the
* log file.
*/
void logIngestModuleStartupErrors(String dataSource, List<IngestModuleError> errors) throws InterruptedException {
void logIngestModuleStartupErrors(List<IngestModuleError> errors) throws InterruptedException, AutoIngestJobLoggerException {
for (IngestModuleError error : errors) {
log(MessageLevel.ERROR, dataSource, String.format("Analysis of data source by ingest modules not started, %s startup error: %s", error.getModuleDisplayName(), error.getThrowable().getLocalizedMessage()));
log(MessageCategory.ERROR, String.format("Analysis of data source by ingest modules not started, %s startup error: %s", error.getModuleDisplayName(), error.getThrowable().getLocalizedMessage()));
}
}
/**
* Logs failure to analyze a data source because the analysis could not be
* started due to an ingest manager exception.
*
* @param ex The ingest manager exception.
*
* @throws InterruptedException if interrupted while blocked waiting
* to acquire an exclusive lock on the
* log file.
* @throws CoordinationServiceException if a problem with the coordination
* service prevents acquisition of a
* lock on the log file.
* @throws AutoIngestJobLoggerException if there is a problem writing to the
* log file.
*/
void logAnalysisStartupError(IngestManagerException ex) throws InterruptedException, AutoIngestJobLoggerException {
log(MessageCategory.ERROR, String.format("Analysis of data source by ingest modules not started: %s", ex.getLocalizedMessage()));
}
/**
* Logs the completion of analysis of a data source by the ingest modules.
*
* @param dataSource The data source
*
* @throws InterruptedException if interrupted while blocked waiting to
* acquire an exclusive lock on the log file
* path.
* @throws InterruptedException if interrupted while blocked waiting
* to acquire an exclusive lock on the
* log file.
* @throws CoordinationServiceException if a problem with the coordination
* service prevents acquisition of a
* lock on the log file.
* @throws AutoIngestJobLoggerException if there is a problem writing to the
* log file.
*/
void logAnalysisCompleted(String dataSource) throws InterruptedException {
log(MessageLevel.INFO, dataSource, "Analysis of data source by ingest modules completed");
void logAnalysisCompleted() throws InterruptedException, AutoIngestJobLoggerException {
log(MessageCategory.INFO, "Analysis of data source by ingest modules completed");
}
/**
* Logs the cancellation of analysis of a data source by an individual
* ingest module.
*
* @param dataSource The data source.
* @param cancelledModuleName The display name of the cancelled ingest
* module.
*
* @throws InterruptedException if interrupted while blocked waiting to
* acquire an exclusive lock on the log file
* path.
* @throws InterruptedException if interrupted while blocked waiting
* to acquire an exclusive lock on the
* log file.
* @throws CoordinationServiceException if a problem with the coordination
* service prevents acquisition of a
* lock on the log file.
* @throws AutoIngestJobLoggerException if there is a problem writing to the
* log file.
*/
void logIngestModuleCancelled(String dataSource, String cancelledModuleName) throws InterruptedException {
log(MessageLevel.WARNING, dataSource, String.format("%s analysis of data source cancelled", cancelledModuleName));
void logIngestModuleCancelled(String cancelledModuleName) throws InterruptedException, AutoIngestJobLoggerException {
log(MessageCategory.WARNING, String.format("%s analysis of data source cancelled", cancelledModuleName));
}
/**
* Logs the cancellation of analysis of a data source by the ingest modules.
*
* @param dataSource The data source.
* @param reason The reason for cancellation.
* @param reason The reason for cancellation.
*
* @throws InterruptedException if interrupted while blocked waiting to
* acquire an exclusive lock on the log file
* path.
* @throws InterruptedException if interrupted while blocked waiting
* to acquire an exclusive lock on the
* log file.
* @throws CoordinationServiceException if a problem with the coordination
* service prevents acquisition of a
* lock on the log file.
* @throws AutoIngestJobLoggerException if there is a problem writing to the
* log file.
*/
void logAnalysisCancelled(String dataSource, String reason) throws InterruptedException {
log(MessageLevel.WARNING, dataSource, String.format("Analysis of data source by ingest modules cancelled: %s", reason));
}
/**
* Logs an automated file export initialization error.
*
* @param dataSource The data source.
* @param ex The error
*
* @throws InterruptedException if interrupted while blocked waiting to
* acquire an exclusive lock on the log file
* path.
*/
void logfileExportStartupError(FileExporter.FileExportException ex) throws InterruptedException {
log(MessageLevel.ERROR, "", String.format("Automated file export could not be initialized: %s", ex.getLocalizedMessage()));
void logAnalysisCancelled(String reason) throws InterruptedException, AutoIngestJobLoggerException {
log(MessageCategory.WARNING, String.format("Analysis of data source by ingest modules cancelled: %s", reason));
}
/**
* Logs that automated file export is not enabled.
*
* @throws InterruptedException if interrupted while blocked waiting to
* acquire an exclusive lock on the log file
* path.
* @throws InterruptedException if interrupted while blocked waiting
* to acquire an exclusive lock on the
* log file.
* @throws CoordinationServiceException if a problem with the coordination
* service prevents acquisition of a
* lock on the log file.
* @throws AutoIngestJobLoggerException if there is a problem writing to the
* log file.
*/
void logFileExportDisabled() throws InterruptedException {
/*
* TODO (VIK-1714): Should this be a WARNING with corresponding error
* state files instead?
*/
log(MessageLevel.INFO, "", "Automated file export is not enabled");
void logFileExportDisabled() throws InterruptedException, AutoIngestJobLoggerException {
log(MessageCategory.INFO, "Automated file export is not enabled");
}
/**
* Logs an automated file export error for a data source.
*
* @param dataSource The data source.
* @param ex The error
*
* @throws InterruptedException if interrupted while blocked waiting to
* acquire an exclusive lock on the log file
* path.
* RJCTODO
* @throws InterruptedException
* @throws viking.autoingest.AutoIngestJobLogger.AutoIngestJobLoggerException
*/
void logFileExportError(String dataSource, FileExporter.FileExportException ex) throws InterruptedException {
log(MessageLevel.ERROR, dataSource, String.format("Automated file export error for data source: %s", ex.getLocalizedMessage()));
void logFileExportCompleted() throws InterruptedException, AutoIngestJobLoggerException {
log(MessageCategory.INFO, "Automated file export completed");
}
/**
* RJCTODO
* @param ex
* @throws InterruptedException
* @throws viking.autoingest.AutoIngestJobLogger.AutoIngestJobLoggerException
*/
void logFileExportError(Exception ex) throws InterruptedException, AutoIngestJobLoggerException {
log(MessageCategory.ERROR, String.format("Error exporting files: %s", ex.getMessage()));
}
/**
* Logs discovery of a crashed auto ingest job for which recovery will be
* attempted.
*
* @throws InterruptedException if interrupted while blocked waiting to
* acquire an exclusive lock on the log file
* path.
* @throws InterruptedException if interrupted while blocked waiting
* to acquire an exclusive lock on the
* log file.
* @throws CoordinationServiceException if a problem with the coordination
* service prevents acquisition of a
* lock on the log file.
* @throws AutoIngestJobLoggerException if there is a problem writing to the
* log file.
*/
void logCrashRecoveryWithRetry() throws InterruptedException {
log(MessageLevel.ERROR, "", "Detected crash while processing, adding data sources again and reprocessing");
void logCrashRecoveryWithRetry() throws InterruptedException, AutoIngestJobLoggerException {
log(MessageCategory.ERROR, "Detected crash while processing, reprocessing");
}
/**
* Logs discovery of a crashed auto ingest job for which recovery will not
* be attempted because the retry limit for the job has been reached.
*
* @throws InterruptedException if interrupted while blocked waiting to
* acquire an exclusive lock on the log file
* path.
* @throws InterruptedException if interrupted while blocked waiting
* to acquire an exclusive lock on the
* log file.
* @throws CoordinationServiceException if a problem with the coordination
* service prevents acquisition of a
* lock on the log file.
* @throws AutoIngestJobLoggerException if there is a problem writing to the
* log file.
*/
void logCrashRecoveryNoRetry() throws InterruptedException {
log(MessageLevel.ERROR, "", "Detected crash while processing, reached retry limit for processing of image folder");
void logCrashRecoveryNoRetry() throws InterruptedException, AutoIngestJobLoggerException {
log(MessageCategory.ERROR, "Detected crash while processing, reached retry limit for processing");
}
/**
* Logs an unexpected runtime exception, e.g., an exception caught by the
* auto ingest job processing exception firewall.
* automated ingest job processing exception firewall.
*
* @throws InterruptedException if interrupted while blocked waiting to
* acquire an exclusive lock on the log file
* path.
* @throws InterruptedException if interrupted while blocked waiting
* to acquire an exclusive lock on the
* log file.
* @throws CoordinationServiceException if a problem with the coordination
* service prevents acquisition of a
* lock on the log file.
* @throws AutoIngestJobLoggerException if there is a problem writing to the
* log file.
*/
void logRuntimeException(Exception ex) throws InterruptedException {
log(MessageLevel.ERROR, "", ex.getLocalizedMessage());
void logErrorCondition(String message) throws InterruptedException, AutoIngestJobLoggerException {
log(MessageCategory.ERROR, message);
}
/**
* Logs a message for an ingest job.
* Writes a message to the case auto ingest log. If the message is an error
* message, also creates an alert file in the case directory for the job. If
* either or both of these operations fail, the details are written to the
* auto ingest log before a more generic exception is thrown to ensure that
* no information is lost.
*
* @param dataSource The data source the message is concerned with, may be
* the empty string.
* @param level A qualifier, e.g., a message level
* @param message
* @param category The message category.
*
* @throws InterruptedException if interrupted while blocked waiting
* to acquire an exclusive lock on the
* log file.
* @throws AutoIngestJobLoggerException if there is a problem writing to the
* log file and/or creating an alert
* file.
*/
/**
* Writes a message to the case auto ingest loga nd optionally creates an
* alert file in the case directory for the job. If either or both of these
* operations fail, the details are written to the system auto ingest log
* before a more generic exception is thrown to ensure that no information
* is lost.
*
* @param category The message category.
* @param message The message.
* @param createAlertFile Whether or not to create an alert file.
*
* @throws
* viking.autoingest.AutoIngestJobLogger.AutoIngestJobLoggerException
* @throws InterruptedException
*/
private void log(MessageLevel level, String dataSource, String message) throws InterruptedException {
private void log(MessageCategory category, String message) throws AutoIngestJobLoggerException, InterruptedException {
/*
* An exclusive lock on the log file path is used to serialize access to
* the log file by each auto ingest node so that log entries do not
* become garbled.
*/
String logLockPath = getLogPath(caseFolderPath).toString();
//ELTODO try (Lock lock = CoordinationService.getInstance(AutoIngestManager.ROOT_NAMESPACE).tryGetExclusiveLock(CoordinationService.CategoryNode.CASES, logLockPath, LOCK_TIME_OUT, LOCK_TIME_OUT_UNIT)) {
try (Lock lock = CoordinationService.getInstance(ROOT_NAMESPACE).tryGetExclusiveLock(CoordinationService.CategoryNode.CASES, logLockPath, LOCK_TIME_OUT, LOCK_TIME_OUT_UNIT)) {
String genericExceptionMessage = String.format("Failed to write to case auto ingest log and/or failed to write alert file for %s", manifestPath);
String autoIngestLogPrefix = String.format("Failed to write case auto ingest message (\"%s\") for %s", message, manifestPath);
String logLockPath = getLogPath(caseDirectoryPath).toString();
try (Lock lock = CoordinationService.getInstance(CoordinationServiceNamespace.getRoot()).tryGetExclusiveLock(CoordinationService.CategoryNode.CASES, logLockPath, LOCK_TIME_OUT, LOCK_TIME_OUT_UNIT)) {
if (null != lock) {
File logFile = getLogPath(caseFolderPath).toFile();
File logFile = getLogPath(caseDirectoryPath).toFile();
try (PrintWriter writer = new PrintWriter(new BufferedWriter(new FileWriter(logFile, logFile.exists())), true)) {
writer.println(String.format("%s %s: %s\\%s: %-8s: %s", simpleDateFormat.format((Date.from(Instant.now()).getTime())), hostName, imageFolderPath, dataSource, level.toString(), message));
writer.println(String.format("%s %s: %s\\%s: %-8s: %s", logDateFormat.format((Date.from(Instant.now()).getTime())), hostName, manifestPath, dataSourceFileName, category.toString(), message));
} catch (Exception ex) {
AutoIngestSystemLogger.getLogger().log(Level.SEVERE, String.format("%s due to I/O error", autoIngestLogPrefix), ex);
throw new AutoIngestJobLoggerException(genericExceptionMessage, ex);
}
} else {
autopsyLogger.log(Level.SEVERE, String.format("Failed to write message (\"%s\") for processing of %s for %s due to lock timeout", message, imageFolderPath, caseFolderPath));
AutoIngestSystemLogger.getLogger().log(Level.SEVERE, String.format("%s due to lock timeout", autoIngestLogPrefix));
throw new AutoIngestJobLoggerException(genericExceptionMessage);
}
} catch (CoordinationServiceException | IOException ex) {
/*
* Write to the Autopsy Log here and do not rethrow. Our current
* policy is to not treat logging issues as show stoppers for auto
* ingest.
*
* TODO (VIK-1707): Is this the right thing to do?
*/
autopsyLogger.log(Level.SEVERE, String.format("Failed to write case log message (\"%s\") for processing of %s for %s", message, imageFolderPath, caseFolderPath), ex);
} catch (InterruptedException ex) {
AutoIngestSystemLogger.getLogger().log(Level.SEVERE, String.format("%s due to interrupt", autoIngestLogPrefix), ex);
throw ex;
} catch (CoordinationServiceException ex) {
AutoIngestSystemLogger.getLogger().log(Level.SEVERE, String.format("%s due to coordination service exception", autoIngestLogPrefix), ex);
throw new AutoIngestJobLoggerException(genericExceptionMessage);
} finally {
if (MessageCategory.INFO != category) {
try {
AutoIngestAlertFile.create(caseDirectoryPath);
} catch (AutoIngestAlertFile.AutoIngestAlertFileException alertex) {
AutoIngestSystemLogger.getLogger().log(Level.SEVERE, String.format("Error creating alert file for %s", manifestPath), alertex);
/*
* Note that this instance of the generic exception replaces
* any instance thrown in the the try bloc, but it does not
* matter since the instances are identical.
*/
throw new AutoIngestJobLoggerException(genericExceptionMessage, alertex);
}
}
}
}
/**
* Exception thrown if an automated ingest log message cannot be written.
*/
static final class AutoIngestJobLoggerException extends Exception {
private static final long serialVersionUID = 1L;
/**
* Constructs an exception to throw if an automated ingest log message
* cannot be written.
*
* @param message The exception message.
*/
public AutoIngestJobLoggerException(String message) {
super(message);
}
/**
* Constructs an exception to throw if an automated ingest log message
* cannot be written.
*
* @param message The exception message.
* @param cause The exception cause, if it was a Throwable.
*/
public AutoIngestJobLoggerException(String message, Throwable cause) {
super(message, cause);
}
}

View File

@ -0,0 +1,38 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2015 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.autoingest;
import java.io.Serializable;
/**
* Event published when auto ingest manager (AIM) starts processing an auto
* ingest job.
*/
public final class AutoIngestJobStartedEvent extends AutoIngestJobEvent implements Serializable {
private static final long serialVersionUID = 1L;
/**
* RJCTODO
*/
public AutoIngestJobStartedEvent(AutoIngestJob job) {
super(AutoIngestManager.Event.JOB_STARTED, job);
}
}

View File

@ -0,0 +1,38 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2015 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.autoingest;
import java.io.Serializable;
/**
* Event published periodically when an automated ingest manager (AIM) is processing
* an automated ingest job.
*/
public final class AutoIngestJobStatusEvent extends AutoIngestJobEvent implements Serializable {
private static final long serialVersionUID = 1L;
/**
* RJCTODO
*/
public AutoIngestJobStatusEvent(AutoIngestJob job) {
super(AutoIngestManager.Event.JOB_STATUS_UPDATED, job);
}
}

View File

@ -0,0 +1,78 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2015 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.autoingest;
import java.io.Serializable;
/**
*
* @author elivis
*/
public class AutoIngestManager {
/*
* Events published by an auto ingest manager. The events are published
* locally to auto ingest manager clients that register as observers and are
* broadcast to other auto ingest nodes. // RJCTODO: Is this true?
*/
enum Event {
INPUT_SCAN_COMPLETED,
JOB_STARTED,
JOB_STATUS_UPDATED,
JOB_COMPLETED,
CASE_PRIORITIZED,
CASE_DELETED,
PAUSED_BY_REQUEST,
PAUSED_FOR_SYSTEM_ERROR,
RESUMED
}
/**
* The outcome of a case deletion operation.
*/
public static final class CaseDeletionResult implements Serializable {
private static final long serialVersionUID = 1L;
/*
* A case may be completely deleted, partially deleted, or not deleted
* at all.
*/
enum Status {
/**
* The case folder could not be either physically or logically
* (DELETED state file written) deleted.
*/
FAILED,
/**
* The case folder was deleted, but one or more of the image folders
* for the case could not be either physically or logically (DELETED
* state file written) deleted.
*/
PARTIALLY_COMPLETED,
/**
* The case folder and all of its image folders were either
* physically or logically (DELETED state file written) deleted.
*/
COMPLETED;
}
}
}

View File

@ -0,0 +1,92 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2015 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.autoingest;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.sql.Timestamp;
import java.util.logging.FileHandler;
import java.util.logging.Formatter;
import java.util.logging.LogRecord;
import java.util.logging.Logger;
import javax.annotation.concurrent.GuardedBy;
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
/**
* A logger for the auto ingest system log, separate from both the case auto
* ingest log and the application log.
*/
final class AutoIngestSystemLogger {
private static final int LOG_SIZE = 0; // In bytes, zero is unlimited
private static final int LOG_FILE_COUNT = 10;
private static final Logger LOGGER = Logger.getLogger("AutoIngest"); //NON-NLS
@GuardedBy("AutoIngestSystemLogger")
private static boolean configured;
/**
* Gets a logger for the auto ingest system log, separate from both the case
* auto ingest log and the application log.
*
* @return The logger.
*/
synchronized final static Logger getLogger() {
if (!configured) {
Path logFilePath = Paths.get(PlatformUtil.getUserDirectory().getAbsolutePath(), "var", "log", "auto_ingest.log");
try {
FileHandler fileHandler = new FileHandler(logFilePath.toString(), LOG_SIZE, LOG_FILE_COUNT);
fileHandler.setEncoding(PlatformUtil.getLogFileEncoding());
fileHandler.setFormatter(new Formatter() {
@Override
public String format(LogRecord record) {
Throwable thrown = record.getThrown();
String stackTrace = ""; //NON-NLS
while (thrown != null) {
stackTrace += thrown.toString() + "\n";
for (StackTraceElement traceElem : record.getThrown().getStackTrace()) {
stackTrace += "\t" + traceElem.toString() + "\n"; //NON-NLS
}
thrown = thrown.getCause();
}
return (new Timestamp(record.getMillis())).toString() + " " //NON-NLS
+ record.getSourceClassName() + " " //NON-NLS
+ record.getSourceMethodName() + "\n" //NON-NLS
+ record.getLevel() + ": " //NON-NLS
+ this.formatMessage(record) + "\n" //NON-NLS
+ stackTrace;
}
});
LOGGER.addHandler(fileHandler);
LOGGER.setUseParentHandlers(false);
} catch (SecurityException | IOException ex) {
throw new RuntimeException(String.format("Error initializing file handler for %s", logFilePath), ex); //NON-NLS
}
configured = true;
}
return LOGGER;
}
/**
* Prevents instantiation of this utility class.
*/
private AutoIngestSystemLogger() {
}
}

View File

@ -0,0 +1,33 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2015 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.autoingest;
/**
* RJCTODO
*/
final class CoordinationServiceNamespace {
static final String ROOT_COORD_SCV_NAMESPACE = "autopsy"; // RJCTODO: Move this elsewhere
static String getRoot() {
return ROOT_COORD_SCV_NAMESPACE;
}
private CoordinationServiceNamespace() {
}
}

View File

@ -0,0 +1,130 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2015 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.autoingest;
import java.io.IOException;
import java.io.Serializable;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import javax.annotation.concurrent.Immutable;
/**
* RJCTODO
*/
@Immutable
public final class Manifest implements Serializable {
private static final long serialVersionUID = 1L;
private final String filePath;
private final Date dateFileCreated;
private final String caseName;
private final String deviceId;
private final String dataSourcePath;
private final Map<String, String> manifestProperties;
/**
* RJCTODO
*
* @param manifestFilePath
* @param caseName
* @param deviceId
* @param dataSourcePath
* @param manifestProperties
*
* @throws IOException
*/
public Manifest(Path manifestFilePath, String caseName, String deviceId, Path dataSourcePath, Map<String, String> manifestProperties) throws IOException {
this.filePath = manifestFilePath.toString();
BasicFileAttributes attrs = Files.readAttributes(manifestFilePath, BasicFileAttributes.class);
this.dateFileCreated = new Date(attrs.creationTime().toMillis());
this.caseName = caseName;
this.deviceId = deviceId;
this.dataSourcePath = dataSourcePath.toString();
this.manifestProperties = new HashMap<>(manifestProperties);
}
/**
* RJCTODO
*
* @return
*/
public Path getFilePath() {
return Paths.get(this.filePath);
}
/**
* RJCTODO
*
* @return
* @throws IOException
*/
public Date getDateFileCreated() {
return this.dateFileCreated;
}
/**
* RJCTODO
*
* @return
*/
public String getCaseName() {
return caseName;
}
/**
* RJCTODO
*
* @return
*/
public String getDeviceId() {
return deviceId;
}
/**
* RJCTODO
*
* @return
*/
public Path getDataSourcePath() {
return Paths.get(dataSourcePath);
}
/**
* RJCTODO
* @return
*/
public String getDataSourceFileName() {
return Paths.get(dataSourcePath).getFileName().toString();
}
/**
* RJCTODO
*
* @return
*/
public Map<String, String> getManifestProperties() {
return new HashMap<>(manifestProperties);
}
}

View File

@ -0,0 +1,58 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2015 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.autoingest;
import java.nio.file.Path;
/**
* RJCTODO:
*/
public interface ManifestFileParser {
boolean fileIsManifest(Path filePath);
Manifest parse(Path filePath) throws ManifestFileParserException;
/**
* Exception thrown if a manifest file cannot be parsed. RJCTODO
*/
public final static class ManifestFileParserException extends Exception {
private static final long serialVersionUID = 1L;
/**
* Constructs an exception to throw if a manifest file cannot be parsed.
*
* @param message The exception message.
*/
public ManifestFileParserException(String message) {
super(message);
}
/**
* Constructs an exception to throw if a manifest file cannot be parsed.
*
* @param message The exception message.
* @param cause The exception cause, if it was a Throwable.
*/
public ManifestFileParserException(String message, Throwable cause) {
super(message, cause);
}
}
}

View File

@ -0,0 +1,150 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2015 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.autoingest;
import java.nio.ByteBuffer;
/**
* RJCTODO
*/
// RJCTODO: Consider making this encapsulate the locking as well, and to set the data as well
final class ManifestNodeData {
enum ProcessingStatus {
PENDING,
PROCESSING,
COMPLETED,
}
private static final int DEFAULT_PRIORITY = 0;
private final boolean nodeDataIsSet;
private ProcessingStatus status;
private int priority;
private int numberOfCrashes;
/**
* RJCTODO
*
* @param nodeData
*/
ManifestNodeData(byte[] nodeData) {
ByteBuffer buffer = ByteBuffer.wrap(nodeData);
this.nodeDataIsSet = buffer.hasRemaining();
if (this.nodeDataIsSet) {
int rawStatus = buffer.getInt();
if (ProcessingStatus.PENDING.ordinal() == rawStatus) {
this.status = ProcessingStatus.PENDING;
} else if (ProcessingStatus.PROCESSING.ordinal() == rawStatus) {
this.status = ProcessingStatus.PROCESSING;
} else if (ProcessingStatus.COMPLETED.ordinal() == rawStatus) {
this.status = ProcessingStatus.COMPLETED;
}
this.priority = buffer.getInt();
this.numberOfCrashes = buffer.getInt();
} else {
this.status = ProcessingStatus.PENDING;
this.priority = DEFAULT_PRIORITY;
this.numberOfCrashes = 0;
}
}
/**
* RJCTODO
*/
ManifestNodeData(ProcessingStatus status, int priority, int numberOfCrashes) {
this.nodeDataIsSet = false;
this.status = status;
this.priority = priority;
this.numberOfCrashes = numberOfCrashes;
}
/**
* RJCTODO
*
* @return
*/
boolean isSet() {
return this.nodeDataIsSet;
}
/**
* RJCTODO
*
* @return
*/
ProcessingStatus getStatus() {
return this.status;
}
/**
*
* @param status
*/
void setStatus(ProcessingStatus status) {
this.status = status;
}
/**
*
* @return
*/
int getPriority() {
return this.priority;
}
/**
*
* @param priority
*/
void setPriority(int priority) {
this.priority = priority;
}
/**
* RJCTODO
*
* @return
*/
int getNumberOfCrashes() {
return this.numberOfCrashes;
}
/**
* RJCTODO
*
* @param attempts
*/
void setNumberOfCrashes(int attempts) {
this.numberOfCrashes = attempts;
}
/**
* RJCTODO
*
* @return
*/
byte[] toArray() {
ByteBuffer buffer = ByteBuffer.allocate(Integer.BYTES * 3);
buffer.putInt(this.status.ordinal());
buffer.putInt(this.priority);
buffer.putInt(this.numberOfCrashes);
return buffer.array();
}
}

View File

@ -0,0 +1,122 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2015 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.autoingest;
import java.io.IOException;
import java.nio.file.Path;
import java.util.HashMap;
import javax.annotation.concurrent.Immutable;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpression;
import javax.xml.xpath.XPathFactory;
import org.openide.util.lookup.ServiceProvider;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.xml.sax.SAXException;
/**
* RJCTODO
*/
@Immutable
@ServiceProvider(service = ManifestFileParser.class)
public final class NMECManifestFileParser implements ManifestFileParser {
private static final String MANIFEST_FILE_NAME_SIGNATURE = "_Manifest.xml";
private static final String ROOT_ELEM_TAG_NAME = "NMEC_Manifest";
private static final String CASE_NAME_XPATH = "/NMEC_Manifest/Collection/Name/text()";
private static final String DEVICE_ID_XPATH = "/NMEC_Manifest/Collection/Image/ID/text()";
private static final String DATA_SOURCE_NAME_XPATH = "/NMEC_Manifest/Collection/Image/Name/text()";
/**
* RJCTODO
*
* @param filePath
*
* @return
*/
@Override
public boolean fileIsManifest(Path filePath) {
boolean fileIsManifest = false;
try {
Path fileName = filePath.getFileName();
if (fileName.toString().endsWith(MANIFEST_FILE_NAME_SIGNATURE)) {
Document doc = this.createManifestDOM(filePath);
Element docElement = doc.getDocumentElement();
fileIsManifest = docElement.getTagName().equals(ROOT_ELEM_TAG_NAME);
}
} catch (Exception unused) {
fileIsManifest = false;
}
return fileIsManifest;
}
/**
* RJCTODO
*
* @param filePath
*
* @return
*
* @throws viking.autoingest.ManifestFileParser.ManifestFileParserException
*/
@Override
public Manifest parse(Path filePath) throws ManifestFileParserException {
if (!fileIsManifest(filePath)) {
throw new ManifestFileParserException(String.format("%s not recognized as a manifest", filePath));
}
try {
Document doc = this.createManifestDOM(filePath);
XPath xpath = XPathFactory.newInstance().newXPath();
XPathExpression expr = xpath.compile(CASE_NAME_XPATH);
String caseName = (String) expr.evaluate(doc, XPathConstants.STRING);
expr = xpath.compile(DEVICE_ID_XPATH);
String deviceId = (String) expr.evaluate(doc, XPathConstants.STRING);
expr = xpath.compile(DATA_SOURCE_NAME_XPATH);
String dataSourceName = (String) expr.evaluate(doc, XPathConstants.STRING);
Path dataSourcePath = filePath.getParent().resolve(dataSourceName);
return new Manifest(filePath, caseName, deviceId, dataSourcePath, new HashMap<>());
} catch (Exception ex) {
throw new ManifestFileParserException(String.format("Error parsing manifest %s", filePath), ex);
}
}
/**
* RJCTODO
*
* @param manifestFilePath
*
* @return
*
* @throws ParserConfigurationException
* @throws SAXException
* @throws IOException
*/
private Document createManifestDOM(Path manifestFilePath) throws ParserConfigurationException, SAXException, IOException {
DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder docBuilder = docBuilderFactory.newDocumentBuilder();
return docBuilder.parse(manifestFilePath.toFile());
}
}

View File

@ -48,7 +48,7 @@ final class PathUtils {
*
* @return The path of the case folder, or null if it is not found.
*/
static Path findCaseFolder(Path folderToSearch, String caseName) {
static Path findCaseDirectory(Path folderToSearch, String caseName) {
File searchFolder = new File(folderToSearch.toString());
if (!searchFolder.isDirectory()) {
return null;
@ -147,7 +147,7 @@ final class PathUtils {
*
* @return A case name, with the time stamp suffix removed.
*/
static String caseNameFromCaseFolderPath(Path caseFolderPath) {
static String caseNameFromCaseDirectoryPath(Path caseFolderPath) {
String caseName = caseFolderPath.getFileName().toString();
if (caseName.length() > TimeStampUtils.getTimeStampLength()) {
return caseName.substring(0, caseName.length() - TimeStampUtils.getTimeStampLength());
@ -183,21 +183,15 @@ final class PathUtils {
File file = new File(folder, fileName);
if (file.isDirectory() && fileName.length() > TimeStampUtils.getTimeStampLength()) {
Path filePath = Paths.get(file.getPath());
try {
if (!StateFile.exists(filePath, StateFile.Type.DELETED)) {
if (TimeStampUtils.endsWithTimeStamp(fileName)) {
if (null != caseName) {
String fileNamePrefix = fileName.substring(0, fileName.length() - TimeStampUtils.getTimeStampLength());
if (fileNamePrefix.equals(caseName)) {
return hasCaseMetadataFile(filePath);
}
} else {
return hasCaseMetadataFile(filePath);
}
if (TimeStampUtils.endsWithTimeStamp(fileName)) {
if (null != caseName) {
String fileNamePrefix = fileName.substring(0, fileName.length() - TimeStampUtils.getTimeStampLength());
if (fileNamePrefix.equals(caseName)) {
return hasCaseMetadataFile(filePath);
}
} else {
return hasCaseMetadataFile(filePath);
}
} catch (IOException ex) {
logger.log(Level.SEVERE, String.format("Failed to access %s", file.getPath()), ex);
}
}
return false;

View File

@ -0,0 +1,214 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2015 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.autoingest;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.logging.Level;
import org.sleuthkit.autopsy.casemodule.CaseMetadata;
import org.sleuthkit.autopsy.casemodule.GeneralFilter;
import org.sleuthkit.autopsy.coreutils.Logger;
final class PathUtils {
private static final Logger logger = Logger.getLogger(PathUtils.class.getName());
private static final List<String> CASE_METADATA_FILE_EXTS = Arrays.asList(new String[]{CaseMetadata.getFileExtension()});
private static final GeneralFilter caseMetadataFileFilter = new GeneralFilter(CASE_METADATA_FILE_EXTS, "Autopsy Case File");
/**
* Searches a given folder for the most recently modified case folder for a
* case.
*
* @param folderToSearch The folder to be searched.
* @param caseName The name of the case for which a case folder is to
* be found.
*
* @return The path of the case folder, or null if it is not found.
*/
static Path findCaseDirectory(Path folderToSearch, String caseName) {
File searchFolder = new File(folderToSearch.toString());
if (!searchFolder.isDirectory()) {
return null;
}
Path caseFolderPath = null;
String[] candidateFolders = searchFolder.list(new CaseFolderFilter(caseName));
long mostRecentModified = 0;
for (String candidateFolder : candidateFolders) {
File file = new File(candidateFolder);
if (file.lastModified() >= mostRecentModified) {
mostRecentModified = file.lastModified();
caseFolderPath = Paths.get(folderToSearch.toString(), file.getPath());
}
}
return caseFolderPath;
}
/**
* Gets a listing of case folders in a given folder.
*
* @param folderToSearch The path of the folder to search.
*
* @return A list of the output case folder paths.
*/
static List<Path> findCaseFolders(Path folderToSearch) {
File searchFolder = new File(folderToSearch.toString());
if (!searchFolder.isDirectory()) {
return Collections.emptyList();
}
String[] caseFolders = searchFolder.list(new CaseFolderFilter(null));
List<Path> caseFolderPaths = new ArrayList<>();
for (String path : caseFolders) {
caseFolderPaths.add(Paths.get(folderToSearch.toString(), path));
}
return caseFolderPaths;
}
/**
* Determines whether or not there is a case metadata file in a given
* folder.
*
* @param folderPath Path to the folder to search.
*
* @return True or false.
*/
static boolean hasCaseMetadataFile(Path folderPath) {
/**
* TODO: If need be, this can be rewritten without the FilenameFilter so
* that it does not necessarily visit every file in the folder.
*/
File folder = folderPath.toFile();
if (!folder.isDirectory()) {
return false;
}
String[] caseDataFiles = folder.list((File folder1, String fileName) -> {
File file = new File(folder1, fileName);
if (file.isFile()) {
return caseMetadataFileFilter.accept(file);
}
return false;
});
return caseDataFiles.length != 0;
}
/**
* Extracts the path to the case images folder path from an image folder
* path.
*
* @param rootImageFoldersPath The root image folders path.
* @param imageFolderPath The image folder path.
*
* @return The root input folder path for a case.
*/
static Path caseImagesPathFromImageFolderPath(Path rootImageFoldersPath, Path imageFolderPath) {
return rootImageFoldersPath.resolve(imageFolderPath.subpath(0, rootImageFoldersPath.getNameCount() + 1).getFileName());
}
/**
* Extracts the case name from an image folder path.
*
* @param rootImageFoldersPath The root image folders path.
* @param imageFolderPath The image folder path.
*
* @return The case name.
*/
static String caseNameFromImageFolderPath(Path rootImageFoldersPath, Path imageFolderPath) {
Path caseImagesPath = PathUtils.caseImagesPathFromImageFolderPath(rootImageFoldersPath, imageFolderPath);
return caseImagesPath.getFileName().toString();
}
/**
* Extracts the case name from a case folder path.
*
* @param caseFolderPath A case folder path.
*
* @return A case name, with the time stamp suffix removed.
*/
static String caseNameFromCaseDirectoryPath(Path caseFolderPath) {
String caseName = caseFolderPath.getFileName().toString();
if (caseName.length() > TimeStampUtils.getTimeStampLength()) {
return caseName.substring(0, caseName.length() - TimeStampUtils.getTimeStampLength());
} else {
return caseName;
}
}
/**
* Creates a case folder path. Does not create the folder described by the
* path.
*
* @param caseFoldersPath The root case folders path.
* @param caseName The name of the case.
*
* @return A case folder path with a time stamp suffix.
*/
static Path createCaseFolderPath(Path caseFoldersPath, String caseName) {
String folderName = caseName + "_" + TimeStampUtils.createTimeStamp();
return Paths.get(caseFoldersPath.toString(), folderName);
}
private static class CaseFolderFilter implements FilenameFilter {
private final String caseName;
CaseFolderFilter(String caseName) {
this.caseName = caseName;
}
@Override
public boolean accept(File folder, String fileName) {
File file = new File(folder, fileName);
if (file.isDirectory() && fileName.length() > TimeStampUtils.getTimeStampLength()) {
Path filePath = Paths.get(file.getPath());
try {
if (!StateFile.exists(filePath, StateFile.Type.DELETED)) {
if (TimeStampUtils.endsWithTimeStamp(fileName)) {
if (null != caseName) {
String fileNamePrefix = fileName.substring(0, fileName.length() - TimeStampUtils.getTimeStampLength());
if (fileNamePrefix.equals(caseName)) {
return hasCaseMetadataFile(filePath);
}
} else {
return hasCaseMetadataFile(filePath);
}
}
}
} catch (IOException ex) {
logger.log(Level.SEVERE, String.format("Failed to access %s", file.getPath()), ex);
}
}
return false;
}
}
/**
* Supress creation of instances of this class.
*/
private PathUtils() {
}
}