Merge branch 'develop' of https://github.com/sleuthkit/autopsy into 4251-osaccount-update-philosophy

This commit is contained in:
Raman Arora 2021-04-01 03:50:26 -04:00
commit abc8ec680a
8 changed files with 124 additions and 58 deletions

View File

@ -1,3 +1,4 @@
OsAccountDataPanel_administrator_title=Administrator
OsAccountDataPanel_basic_address=Address OsAccountDataPanel_basic_address=Address
OsAccountDataPanel_basic_admin=Administrator OsAccountDataPanel_basic_admin=Administrator
OsAccountDataPanel_basic_creationDate=Creation Date OsAccountDataPanel_basic_creationDate=Creation Date
@ -5,6 +6,10 @@ OsAccountDataPanel_basic_fullname=Full Name
OsAccountDataPanel_basic_login=Login OsAccountDataPanel_basic_login=Login
OsAccountDataPanel_basic_title=Basic Properties OsAccountDataPanel_basic_title=Basic Properties
OsAccountDataPanel_basic_type=Type OsAccountDataPanel_basic_type=Type
OsAccountDataPanel_data_accessed_title=Last Login
OsAccountDataPanel_host_count_title=Login Count
# {0} - hostName
OsAccountDataPanel_host_section_title={0} Details
OsAccountDataPanel_realm_address=Address OsAccountDataPanel_realm_address=Address
OsAccountDataPanel_realm_confidence=Confidence OsAccountDataPanel_realm_confidence=Confidence
OsAccountDataPanel_realm_name=Name OsAccountDataPanel_realm_name=Name

View File

@ -41,6 +41,7 @@ import javax.swing.SwingWorker;
import org.openide.util.NbBundle.Messages; import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.contentviewers.osaccount.SectionData.RowData; import org.sleuthkit.autopsy.contentviewers.osaccount.SectionData.RowData;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.DataSource;
import org.sleuthkit.datamodel.Host; import org.sleuthkit.datamodel.Host;
import org.sleuthkit.datamodel.OsAccount; import org.sleuthkit.datamodel.OsAccount;
@ -48,6 +49,7 @@ import org.sleuthkit.datamodel.OsAccountAttribute;
import org.sleuthkit.datamodel.OsAccountInstance; import org.sleuthkit.datamodel.OsAccountInstance;
import org.sleuthkit.datamodel.OsAccountManager; import org.sleuthkit.datamodel.OsAccountManager;
import org.sleuthkit.datamodel.OsAccountRealm; import org.sleuthkit.datamodel.OsAccountRealm;
import org.sleuthkit.datamodel.SleuthkitCase;
/** /**
* Panel for displaying the properties of an OsAccount. * Panel for displaying the properties of an OsAccount.
@ -82,7 +84,6 @@ public class OsAccountDataPanel extends JPanel {
* @param account OsAccount to display, if null is passed the panel will * @param account OsAccount to display, if null is passed the panel will
* appear blank. * appear blank.
*/ */
// void setOsAccount(OsAccount account) {
void setOsAccountId(Long osAccountId) { void setOsAccountId(Long osAccountId) {
removeAll(); removeAll();
revalidate(); revalidate();
@ -225,10 +226,33 @@ public class OsAccountDataPanel extends JPanel {
return data; return data;
} }
@Messages({
"# {0} - hostName",
"OsAccountDataPanel_host_section_title={0} Details",
"OsAccountDataPanel_host_count_title=Login Count",
"OsAccountDataPanel_data_accessed_title=Last Login",
"OsAccountDataPanel_administrator_title=Administrator"
})
private SectionData buildHostData(Host host, List<OsAccountAttribute> attributeList) { private SectionData buildHostData(Host host, List<OsAccountAttribute> attributeList) {
SectionData data = new SectionData(host.getName()); SectionData data = new SectionData(Bundle.OsAccountDataPanel_host_section_title(host.getName()));
for (OsAccountAttribute attribute : attributeList) { for (OsAccountAttribute attribute : attributeList) {
data.addData(attribute.getAttributeType().getDisplayName(), attribute.getDisplayString()); String displayName = attribute.getAttributeType().getDisplayName();
String value = attribute.getDisplayString();
if(attribute.getAttributeType().getTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COUNT.getTypeID()) {
displayName = Bundle.OsAccountDataPanel_host_count_title();
} else if(attribute.getAttributeType().getTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_IS_ADMIN.getTypeID()) {
displayName = Bundle.OsAccountDataPanel_administrator_title();
if(attribute.getValueInt() == 0) {
value = "False";
} else {
value = "True";
}
} else if(attribute.getAttributeType().getTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID()) {
displayName = Bundle.OsAccountDataPanel_data_accessed_title();
}
data.addData(displayName, value);
} }
return data; return data;
@ -254,7 +278,7 @@ public class OsAccountDataPanel extends JPanel {
* @param row The row in the layout. * @param row The row in the layout.
*/ */
private void addPropertyName(String key, int row) { private void addPropertyName(String key, int row) {
JLabel label = new JLabel(key); JLabel label = new JLabel(key + ":");
add(label, getPropertyNameContraints(row)); add(label, getPropertyNameContraints(row));
} }
@ -359,7 +383,9 @@ public class OsAccountDataPanel extends JPanel {
protected WorkerResults doInBackground() throws Exception { protected WorkerResults doInBackground() throws Exception {
Map<Host, List<OsAccountAttribute>> hostMap = new HashMap<>(); Map<Host, List<OsAccountAttribute>> hostMap = new HashMap<>();
Map<Host, DataSource> instanceMap = new HashMap<>(); Map<Host, DataSource> instanceMap = new HashMap<>();
OsAccountManager osAccountManager = Case.getCurrentCase().getSleuthkitCase().getOsAccountManager(); SleuthkitCase skCase = Case.getCurrentCase().getSleuthkitCase();
OsAccountManager osAccountManager = skCase.getOsAccountManager();
OsAccountRealm realm = skCase.getOsAccountRealmManager().getRealmById(account.getRealmId());
if(account == null) { if(account == null) {
account = osAccountManager.getOsAccountByObjectId(accountId); account = osAccountManager.getOsAccountByObjectId(accountId);
@ -414,7 +440,7 @@ public class OsAccountDataPanel extends JPanel {
} }
} }
return new WorkerResults(hostMap, instanceMap); return new WorkerResults(hostMap, instanceMap, realm);
} }
@Override @Override
@ -442,20 +468,21 @@ public class OsAccountDataPanel extends JPanel {
hostDataMap.forEach((K, V) -> data.add(buildHostData(K, V))); hostDataMap.forEach((K, V) -> data.add(buildHostData(K, V)));
} }
// TODO - load realm on background thread OsAccountRealm realm = results.getRealm();
//OsAccountRealm realm = account.getRealm(); if (realm != null) {
//if (realm != null) { data.add(buildRealmProperties(realm));
// data.add(buildRealmProperties(realm));
//}
Map<Host, DataSource> instanceMap = results.getDataSourceMap();
if (!instanceMap.isEmpty()) {
SectionData instanceSection = new SectionData("Instances");
instanceMap.forEach((K, V) -> instanceSection.addData(K.getName(), V.getName()));
data.add(instanceSection);
} }
// Removing the instance section for now. Leaving code here for
// future use.
// Map<Host, DataSource> instanceMap = results.getDataSourceMap();
// if (!instanceMap.isEmpty()) {
// SectionData instanceSection = new SectionData("Instances");
// instanceMap.forEach((K, V) -> instanceSection.addData(K.getName(), V.getName()));
//
// data.add(instanceSection);
// }
addDataComponents(data); addDataComponents(data);
revalidate(); revalidate();
@ -472,6 +499,7 @@ public class OsAccountDataPanel extends JPanel {
private final Map<Host, List<OsAccountAttribute>> attributeMap; private final Map<Host, List<OsAccountAttribute>> attributeMap;
private final Map<Host, DataSource> instanceMap; private final Map<Host, DataSource> instanceMap;
private final OsAccountRealm realm;
/** /**
* Construct a new WorkerResult object. * Construct a new WorkerResult object.
@ -481,9 +509,10 @@ public class OsAccountDataPanel extends JPanel {
* @param instanceMap A map of data to display OsAccount instance * @param instanceMap A map of data to display OsAccount instance
* information. * information.
*/ */
WorkerResults(Map<Host, List<OsAccountAttribute>> attributeMap, Map<Host, DataSource> instanceMap) { WorkerResults(Map<Host, List<OsAccountAttribute>> attributeMap, Map<Host, DataSource> instanceMap, OsAccountRealm realm) {
this.attributeMap = attributeMap; this.attributeMap = attributeMap;
this.instanceMap = instanceMap; this.instanceMap = instanceMap;
this.realm = realm;
} }
/** /**
@ -505,5 +534,9 @@ public class OsAccountDataPanel extends JPanel {
Map<Host, DataSource> getDataSourceMap() { Map<Host, DataSource> getDataSourceMap() {
return instanceMap; return instanceMap;
} }
OsAccountRealm getRealm() {
return realm;
}
} }
} }

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2014-2018 Basis Technology Corp. * Copyright 2014-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -63,7 +63,7 @@ public final class IngestJob {
return displayName; return displayName;
} }
} }
/** /**
* Ingest job mode. * Ingest job mode.
*/ */
@ -71,7 +71,7 @@ public final class IngestJob {
BATCH, BATCH,
STREAMING STREAMING
} }
private static final Logger logger = Logger.getLogger(IngestJob.class.getName()); private static final Logger logger = Logger.getLogger(IngestJob.class.getName());
private final static AtomicLong nextId = new AtomicLong(0L); private final static AtomicLong nextId = new AtomicLong(0L);
private final long id; private final long id;
@ -113,12 +113,12 @@ public final class IngestJob {
this(Arrays.asList(dataSource), settings); this(Arrays.asList(dataSource), settings);
this.files.addAll(files); this.files.addAll(files);
} }
/** /**
* Constructs an ingest job that analyzes one data source, possibly using * Constructs an ingest job that analyzes one data source, possibly using an
* an ingest stream. * ingest stream.
* *
* @param settings The ingest job settings. * @param settings The ingest job settings.
*/ */
IngestJob(DataSource dataSource, Mode ingestMode, IngestJobSettings settings) { IngestJob(DataSource dataSource, Mode ingestMode, IngestJobSettings settings) {
this.id = IngestJob.nextId.getAndIncrement(); this.id = IngestJob.nextId.getAndIncrement();
@ -149,10 +149,10 @@ public final class IngestJob {
boolean hasIngestPipeline() { boolean hasIngestPipeline() {
return (!settings.getEnabledIngestModuleTemplates().isEmpty()); return (!settings.getEnabledIngestModuleTemplates().isEmpty());
} }
/** /**
* Add a set of files (by object ID) to be ingested. * Add a set of files (by object ID) to be ingested.
* *
* @param fileObjIds the list of file IDs * @param fileObjIds the list of file IDs
*/ */
void addStreamingIngestFiles(List<Long> fileObjIds) { void addStreamingIngestFiles(List<Long> fileObjIds) {
@ -164,7 +164,7 @@ public final class IngestJob {
IngestJobPipeline streamingIngestPipeline = ingestJobPipelines.values().iterator().next(); IngestJobPipeline streamingIngestPipeline = ingestJobPipelines.values().iterator().next();
streamingIngestPipeline.addStreamingIngestFiles(fileObjIds); streamingIngestPipeline.addStreamingIngestFiles(fileObjIds);
} }
/** /**
* Start data source processing for streaming ingest. * Start data source processing for streaming ingest.
*/ */
@ -185,7 +185,7 @@ public final class IngestJob {
* @return A collection of ingest module start up errors, empty on success. * @return A collection of ingest module start up errors, empty on success.
*/ */
List<IngestModuleError> start() { List<IngestModuleError> start() {
/* /*
* Set up the pipeline(s) * Set up the pipeline(s)
*/ */
@ -199,11 +199,11 @@ public final class IngestJob {
this.ingestJobPipelines.put(ingestJobPipeline.getId(), ingestJobPipeline); this.ingestJobPipelines.put(ingestJobPipeline.getId(), ingestJobPipeline);
} }
incompleteJobsCount.set(ingestJobPipelines.size()); incompleteJobsCount.set(ingestJobPipelines.size());
/* /*
* Try to start each data source ingest job. Note that there is an * Try to start each data source ingest job. Note that there is an
* assumption here that if there is going to be a module * assumption here that if there is going to be a module startup
* startup failure, it will be for the first ingest job pipeline. * failure, it will be for the first ingest job pipeline.
* *
* TODO (RC): Consider separating module start up from pipeline startup * TODO (RC): Consider separating module start up from pipeline startup
* so that no processing is done if this assumption is false. * so that no processing is done if this assumption is false.
@ -229,14 +229,14 @@ public final class IngestJob {
return errors; return errors;
} }
/** /**
* Get the ingest mode for this job (batch or streaming). * Get the ingest mode for this job (batch or streaming).
* *
* @return the ingest mode. * @return the ingest mode.
*/ */
Mode getIngestMode() { Mode getIngestMode() {
return ingestMode; return ingestMode;
} }
/** /**
@ -251,8 +251,8 @@ public final class IngestJob {
/** /**
* Gets a snapshot of the progress of this ingest job. * Gets a snapshot of the progress of this ingest job.
* *
* @param getIngestTasksSnapshot * @param getIngestTasksSnapshot
* *
* @return The snapshot. * @return The snapshot.
*/ */
public ProgressSnapshot getSnapshot(boolean getIngestTasksSnapshot) { public ProgressSnapshot getSnapshot(boolean getIngestTasksSnapshot) {
@ -295,10 +295,21 @@ public final class IngestJob {
* @param reason The reason for cancellation. * @param reason The reason for cancellation.
*/ */
public void cancel(CancellationReason reason) { public void cancel(CancellationReason reason) {
this.cancellationReason = reason; cancellationReason = reason;
this.ingestJobPipelines.values().stream().forEach((job) -> { /*
job.cancel(reason); * Cancel the ingest pipelines for each data source. This is done in a
}); * separate thread to avoid a potential deadlock. The deadlock is
* possible because this method can be called in a thread that acquires
* the ingest manager's ingest jobs list lock and then tries to acquire
* the ingest pipeline stage transition lock, while an ingest thread
* that has acquired the stage transition lock is trying to acquire the
* ingest manager's ingest jobs list lock.
*/
new Thread(() -> {
this.ingestJobPipelines.values().stream().forEach((job) -> {
job.cancel(reason);
});
}).start();
} }
/** /**
@ -508,8 +519,9 @@ public final class IngestJob {
* used to get basic information about the module and to request * used to get basic information about the module and to request
* cancellation of the module. * cancellation of the module.
* *
* @param ingestJobPipeline The ingestJobPipeline that owns the data source level ingest module. * @param ingestJobPipeline The ingestJobPipeline that owns the data
* @param module The data source level ingest module. * source level ingest module.
* @param module The data source level ingest module.
*/ */
private DataSourceIngestModuleHandle(IngestJobPipeline ingestJobPipeline, DataSourceIngestPipeline.DataSourcePipelineModule module) { private DataSourceIngestModuleHandle(IngestJobPipeline ingestJobPipeline, DataSourceIngestPipeline.DataSourcePipelineModule module) {
this.ingestJobPipeline = ingestJobPipeline; this.ingestJobPipeline = ingestJobPipeline;

View File

@ -125,7 +125,9 @@ public class IngestManager implements IngestProgressSnapshotProvider {
private final int numberOfFileIngestThreads; private final int numberOfFileIngestThreads;
private final AtomicLong nextIngestManagerTaskId = new AtomicLong(0L); private final AtomicLong nextIngestManagerTaskId = new AtomicLong(0L);
private final ExecutorService startIngestJobsExecutor = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setNameFormat("IM-start-ingest-jobs-%d").build()); //NON-NLS; private final ExecutorService startIngestJobsExecutor = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setNameFormat("IM-start-ingest-jobs-%d").build()); //NON-NLS;
@GuardedBy("startIngestJobFutures")
private final Map<Long, Future<Void>> startIngestJobFutures = new ConcurrentHashMap<>(); private final Map<Long, Future<Void>> startIngestJobFutures = new ConcurrentHashMap<>();
@GuardedBy("ingestJobsById")
private final Map<Long, IngestJob> ingestJobsById = new HashMap<>(); private final Map<Long, IngestJob> ingestJobsById = new HashMap<>();
private final ExecutorService dataSourceLevelIngestJobTasksExecutor = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setNameFormat("IM-data-source-ingest-%d").build()); //NON-NLS; private final ExecutorService dataSourceLevelIngestJobTasksExecutor = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setNameFormat("IM-data-source-ingest-%d").build()); //NON-NLS;
private final ExecutorService fileLevelIngestJobTasksExecutor; private final ExecutorService fileLevelIngestJobTasksExecutor;
@ -338,7 +340,9 @@ public class IngestManager implements IngestProgressSnapshotProvider {
if (job.hasIngestPipeline()) { if (job.hasIngestPipeline()) {
long taskId = nextIngestManagerTaskId.incrementAndGet(); long taskId = nextIngestManagerTaskId.incrementAndGet();
Future<Void> task = startIngestJobsExecutor.submit(new StartIngestJobTask(taskId, job)); Future<Void> task = startIngestJobsExecutor.submit(new StartIngestJobTask(taskId, job));
startIngestJobFutures.put(taskId, task); synchronized (startIngestJobFutures) {
startIngestJobFutures.put(taskId, task);
}
} }
} }
} }
@ -357,7 +361,9 @@ public class IngestManager implements IngestProgressSnapshotProvider {
if (job.hasIngestPipeline()) { if (job.hasIngestPipeline()) {
long taskId = nextIngestManagerTaskId.incrementAndGet(); long taskId = nextIngestManagerTaskId.incrementAndGet();
Future<Void> task = startIngestJobsExecutor.submit(new StartIngestJobTask(taskId, job)); Future<Void> task = startIngestJobsExecutor.submit(new StartIngestJobTask(taskId, job));
startIngestJobFutures.put(taskId, task); synchronized (startIngestJobFutures) {
startIngestJobFutures.put(taskId, task);
}
} }
} }
} }
@ -518,9 +524,11 @@ public class IngestManager implements IngestProgressSnapshotProvider {
* @param reason The cancellation reason. * @param reason The cancellation reason.
*/ */
public void cancelAllIngestJobs(IngestJob.CancellationReason reason) { public void cancelAllIngestJobs(IngestJob.CancellationReason reason) {
startIngestJobFutures.values().forEach((handle) -> { synchronized (startIngestJobFutures) {
handle.cancel(true); startIngestJobFutures.values().forEach((handle) -> {
}); handle.cancel(true);
});
}
synchronized (ingestJobsById) { synchronized (ingestJobsById) {
this.ingestJobsById.values().forEach((job) -> { this.ingestJobsById.values().forEach((job) -> {
job.cancel(reason); job.cancel(reason);
@ -939,8 +947,10 @@ public class IngestManager implements IngestProgressSnapshotProvider {
if (progress != null) { if (progress != null) {
progress.setDisplayName(NbBundle.getMessage(this.getClass(), "IngestManager.StartIngestJobsTask.run.cancelling", displayName)); progress.setDisplayName(NbBundle.getMessage(this.getClass(), "IngestManager.StartIngestJobsTask.run.cancelling", displayName));
} }
Future<?> handle = startIngestJobFutures.remove(threadId); synchronized (startIngestJobFutures) {
handle.cancel(true); Future<?> handle = startIngestJobFutures.remove(threadId);
handle.cancel(true);
}
return true; return true;
} }
}); });
@ -954,7 +964,9 @@ public class IngestManager implements IngestProgressSnapshotProvider {
if (null != progress) { if (null != progress) {
progress.finish(); progress.finish();
} }
startIngestJobFutures.remove(threadId); synchronized (startIngestJobFutures) {
startIngestJobFutures.remove(threadId);
}
} }
} }

View File

@ -213,6 +213,7 @@ public class HTMLReport implements TableReportModule {
* Copies a suitable icon for the given data type in the output directory * Copies a suitable icon for the given data type in the output directory
* and returns the icon file name to use for the given data type. * and returns the icon file name to use for the given data type.
*/ */
@SuppressWarnings( "deprecation" )
private String useDataTypeIcon(String dataType) { private String useDataTypeIcon(String dataType) {
String iconFilePath; String iconFilePath;
String iconFileName; String iconFileName;

View File

@ -37,14 +37,14 @@ import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
*/ */
class EvalAccountObj extends EvaluatableObject { class EvalAccountObj extends EvaluatableObject {
private AccountObjectType obj; private final AccountObjectType obj;
public EvalAccountObj(AccountObjectType a_obj, String a_id, String a_spacing) { EvalAccountObj(AccountObjectType a_obj, String a_id, String a_spacing) {
obj = a_obj; obj = a_obj;
id = a_id; id = a_id;
spacing = a_spacing; spacing = a_spacing;
} }
@SuppressWarnings( "deprecation" )
@Override @Override
public synchronized ObservableResult evaluate() { public synchronized ObservableResult evaluate() {
@ -103,7 +103,7 @@ class EvalAccountObj extends EvaluatableObject {
// The assumption here is that there aren't going to be too many network shares, so we // The assumption here is that there aren't going to be too many network shares, so we
// can cycle through all of them. // can cycle through all of them.
try { try {
List<BlackboardArtifact> finalHits = new ArrayList<BlackboardArtifact>(); List<BlackboardArtifact> finalHits = new ArrayList<>();
Case case1 = Case.getCurrentCaseThrows(); Case case1 = Case.getCurrentCaseThrows();
SleuthkitCase sleuthkitCase = case1.getSleuthkitCase(); SleuthkitCase sleuthkitCase = case1.getSleuthkitCase();

View File

@ -48,11 +48,9 @@ import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard.BlackboardException; import org.sleuthkit.datamodel.Blackboard.BlackboardException;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_OS_ACCOUNT;
import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_DELETED; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_DELETED;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_ID;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_NAME; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_NAME;
import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.DataSource;

View File

@ -107,6 +107,11 @@ class PstParser implements AutoCloseable{
logger.log(Level.INFO, "Found encrypted PST file."); //NON-NLS logger.log(Level.INFO, "Found encrypted PST file."); //NON-NLS
return ParseResult.ENCRYPT; return ParseResult.ENCRYPT;
} }
if (ex.getMessage().toLowerCase().startsWith("unable to")) {
logger.log(Level.WARNING, ex.getMessage());
logger.log(Level.WARNING, String.format("Error in parsing PST file %s, file may be empty or corrupt", file.getName()));
return ParseResult.ERROR;
}
String msg = file.getName() + ": Failed to create internal java-libpst PST file to parse:\n" + ex.getMessage(); //NON-NLS String msg = file.getName() + ": Failed to create internal java-libpst PST file to parse:\n" + ex.getMessage(); //NON-NLS
logger.log(Level.WARNING, msg, ex); logger.log(Level.WARNING, msg, ex);
return ParseResult.ERROR; return ParseResult.ERROR;