mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-17 10:17:41 +00:00
Merge branch 'develop' of https://github.com/sleuthkit/autopsy into 4251-osaccount-update-philosophy
This commit is contained in:
commit
abc8ec680a
@ -1,3 +1,4 @@
|
||||
OsAccountDataPanel_administrator_title=Administrator
|
||||
OsAccountDataPanel_basic_address=Address
|
||||
OsAccountDataPanel_basic_admin=Administrator
|
||||
OsAccountDataPanel_basic_creationDate=Creation Date
|
||||
@ -5,6 +6,10 @@ OsAccountDataPanel_basic_fullname=Full Name
|
||||
OsAccountDataPanel_basic_login=Login
|
||||
OsAccountDataPanel_basic_title=Basic Properties
|
||||
OsAccountDataPanel_basic_type=Type
|
||||
OsAccountDataPanel_data_accessed_title=Last Login
|
||||
OsAccountDataPanel_host_count_title=Login Count
|
||||
# {0} - hostName
|
||||
OsAccountDataPanel_host_section_title={0} Details
|
||||
OsAccountDataPanel_realm_address=Address
|
||||
OsAccountDataPanel_realm_confidence=Confidence
|
||||
OsAccountDataPanel_realm_name=Name
|
||||
|
@ -41,6 +41,7 @@ import javax.swing.SwingWorker;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.contentviewers.osaccount.SectionData.RowData;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.DataSource;
|
||||
import org.sleuthkit.datamodel.Host;
|
||||
import org.sleuthkit.datamodel.OsAccount;
|
||||
@ -48,6 +49,7 @@ import org.sleuthkit.datamodel.OsAccountAttribute;
|
||||
import org.sleuthkit.datamodel.OsAccountInstance;
|
||||
import org.sleuthkit.datamodel.OsAccountManager;
|
||||
import org.sleuthkit.datamodel.OsAccountRealm;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
|
||||
/**
|
||||
* Panel for displaying the properties of an OsAccount.
|
||||
@ -82,7 +84,6 @@ public class OsAccountDataPanel extends JPanel {
|
||||
* @param account OsAccount to display, if null is passed the panel will
|
||||
* appear blank.
|
||||
*/
|
||||
// void setOsAccount(OsAccount account) {
|
||||
void setOsAccountId(Long osAccountId) {
|
||||
removeAll();
|
||||
revalidate();
|
||||
@ -225,10 +226,33 @@ public class OsAccountDataPanel extends JPanel {
|
||||
return data;
|
||||
}
|
||||
|
||||
@Messages({
|
||||
"# {0} - hostName",
|
||||
"OsAccountDataPanel_host_section_title={0} Details",
|
||||
"OsAccountDataPanel_host_count_title=Login Count",
|
||||
"OsAccountDataPanel_data_accessed_title=Last Login",
|
||||
"OsAccountDataPanel_administrator_title=Administrator"
|
||||
})
|
||||
private SectionData buildHostData(Host host, List<OsAccountAttribute> attributeList) {
|
||||
SectionData data = new SectionData(host.getName());
|
||||
SectionData data = new SectionData(Bundle.OsAccountDataPanel_host_section_title(host.getName()));
|
||||
for (OsAccountAttribute attribute : attributeList) {
|
||||
data.addData(attribute.getAttributeType().getDisplayName(), attribute.getDisplayString());
|
||||
String displayName = attribute.getAttributeType().getDisplayName();
|
||||
String value = attribute.getDisplayString();
|
||||
|
||||
if(attribute.getAttributeType().getTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COUNT.getTypeID()) {
|
||||
displayName = Bundle.OsAccountDataPanel_host_count_title();
|
||||
} else if(attribute.getAttributeType().getTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_IS_ADMIN.getTypeID()) {
|
||||
displayName = Bundle.OsAccountDataPanel_administrator_title();
|
||||
if(attribute.getValueInt() == 0) {
|
||||
value = "False";
|
||||
} else {
|
||||
value = "True";
|
||||
}
|
||||
} else if(attribute.getAttributeType().getTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID()) {
|
||||
displayName = Bundle.OsAccountDataPanel_data_accessed_title();
|
||||
}
|
||||
|
||||
data.addData(displayName, value);
|
||||
}
|
||||
|
||||
return data;
|
||||
@ -254,7 +278,7 @@ public class OsAccountDataPanel extends JPanel {
|
||||
* @param row The row in the layout.
|
||||
*/
|
||||
private void addPropertyName(String key, int row) {
|
||||
JLabel label = new JLabel(key);
|
||||
JLabel label = new JLabel(key + ":");
|
||||
add(label, getPropertyNameContraints(row));
|
||||
}
|
||||
|
||||
@ -359,7 +383,9 @@ public class OsAccountDataPanel extends JPanel {
|
||||
protected WorkerResults doInBackground() throws Exception {
|
||||
Map<Host, List<OsAccountAttribute>> hostMap = new HashMap<>();
|
||||
Map<Host, DataSource> instanceMap = new HashMap<>();
|
||||
OsAccountManager osAccountManager = Case.getCurrentCase().getSleuthkitCase().getOsAccountManager();
|
||||
SleuthkitCase skCase = Case.getCurrentCase().getSleuthkitCase();
|
||||
OsAccountManager osAccountManager = skCase.getOsAccountManager();
|
||||
OsAccountRealm realm = skCase.getOsAccountRealmManager().getRealmById(account.getRealmId());
|
||||
|
||||
if(account == null) {
|
||||
account = osAccountManager.getOsAccountByObjectId(accountId);
|
||||
@ -414,7 +440,7 @@ public class OsAccountDataPanel extends JPanel {
|
||||
}
|
||||
}
|
||||
|
||||
return new WorkerResults(hostMap, instanceMap);
|
||||
return new WorkerResults(hostMap, instanceMap, realm);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -442,20 +468,21 @@ public class OsAccountDataPanel extends JPanel {
|
||||
hostDataMap.forEach((K, V) -> data.add(buildHostData(K, V)));
|
||||
}
|
||||
|
||||
// TODO - load realm on background thread
|
||||
//OsAccountRealm realm = account.getRealm();
|
||||
//if (realm != null) {
|
||||
// data.add(buildRealmProperties(realm));
|
||||
//}
|
||||
|
||||
Map<Host, DataSource> instanceMap = results.getDataSourceMap();
|
||||
if (!instanceMap.isEmpty()) {
|
||||
SectionData instanceSection = new SectionData("Instances");
|
||||
instanceMap.forEach((K, V) -> instanceSection.addData(K.getName(), V.getName()));
|
||||
|
||||
data.add(instanceSection);
|
||||
OsAccountRealm realm = results.getRealm();
|
||||
if (realm != null) {
|
||||
data.add(buildRealmProperties(realm));
|
||||
}
|
||||
|
||||
// Removing the instance section for now. Leaving code here for
|
||||
// future use.
|
||||
// Map<Host, DataSource> instanceMap = results.getDataSourceMap();
|
||||
// if (!instanceMap.isEmpty()) {
|
||||
// SectionData instanceSection = new SectionData("Instances");
|
||||
// instanceMap.forEach((K, V) -> instanceSection.addData(K.getName(), V.getName()));
|
||||
//
|
||||
// data.add(instanceSection);
|
||||
// }
|
||||
|
||||
addDataComponents(data);
|
||||
|
||||
revalidate();
|
||||
@ -472,6 +499,7 @@ public class OsAccountDataPanel extends JPanel {
|
||||
|
||||
private final Map<Host, List<OsAccountAttribute>> attributeMap;
|
||||
private final Map<Host, DataSource> instanceMap;
|
||||
private final OsAccountRealm realm;
|
||||
|
||||
/**
|
||||
* Construct a new WorkerResult object.
|
||||
@ -481,9 +509,10 @@ public class OsAccountDataPanel extends JPanel {
|
||||
* @param instanceMap A map of data to display OsAccount instance
|
||||
* information.
|
||||
*/
|
||||
WorkerResults(Map<Host, List<OsAccountAttribute>> attributeMap, Map<Host, DataSource> instanceMap) {
|
||||
WorkerResults(Map<Host, List<OsAccountAttribute>> attributeMap, Map<Host, DataSource> instanceMap, OsAccountRealm realm) {
|
||||
this.attributeMap = attributeMap;
|
||||
this.instanceMap = instanceMap;
|
||||
this.realm = realm;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -505,5 +534,9 @@ public class OsAccountDataPanel extends JPanel {
|
||||
Map<Host, DataSource> getDataSourceMap() {
|
||||
return instanceMap;
|
||||
}
|
||||
|
||||
OsAccountRealm getRealm() {
|
||||
return realm;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2014-2018 Basis Technology Corp.
|
||||
* Copyright 2014-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -115,8 +115,8 @@ public final class IngestJob {
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs an ingest job that analyzes one data source, possibly using
|
||||
* an ingest stream.
|
||||
* Constructs an ingest job that analyzes one data source, possibly using an
|
||||
* ingest stream.
|
||||
*
|
||||
* @param settings The ingest job settings.
|
||||
*/
|
||||
@ -202,8 +202,8 @@ public final class IngestJob {
|
||||
|
||||
/*
|
||||
* Try to start each data source ingest job. Note that there is an
|
||||
* assumption here that if there is going to be a module
|
||||
* startup failure, it will be for the first ingest job pipeline.
|
||||
* assumption here that if there is going to be a module startup
|
||||
* failure, it will be for the first ingest job pipeline.
|
||||
*
|
||||
* TODO (RC): Consider separating module start up from pipeline startup
|
||||
* so that no processing is done if this assumption is false.
|
||||
@ -295,10 +295,21 @@ public final class IngestJob {
|
||||
* @param reason The reason for cancellation.
|
||||
*/
|
||||
public void cancel(CancellationReason reason) {
|
||||
this.cancellationReason = reason;
|
||||
cancellationReason = reason;
|
||||
/*
|
||||
* Cancel the ingest pipelines for each data source. This is done in a
|
||||
* separate thread to avoid a potential deadlock. The deadlock is
|
||||
* possible because this method can be called in a thread that acquires
|
||||
* the ingest manager's ingest jobs list lock and then tries to acquire
|
||||
* the ingest pipeline stage transition lock, while an ingest thread
|
||||
* that has acquired the stage transition lock is trying to acquire the
|
||||
* ingest manager's ingest jobs list lock.
|
||||
*/
|
||||
new Thread(() -> {
|
||||
this.ingestJobPipelines.values().stream().forEach((job) -> {
|
||||
job.cancel(reason);
|
||||
});
|
||||
}).start();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -508,7 +519,8 @@ public final class IngestJob {
|
||||
* used to get basic information about the module and to request
|
||||
* cancellation of the module.
|
||||
*
|
||||
* @param ingestJobPipeline The ingestJobPipeline that owns the data source level ingest module.
|
||||
* @param ingestJobPipeline The ingestJobPipeline that owns the data
|
||||
* source level ingest module.
|
||||
* @param module The data source level ingest module.
|
||||
*/
|
||||
private DataSourceIngestModuleHandle(IngestJobPipeline ingestJobPipeline, DataSourceIngestPipeline.DataSourcePipelineModule module) {
|
||||
|
@ -125,7 +125,9 @@ public class IngestManager implements IngestProgressSnapshotProvider {
|
||||
private final int numberOfFileIngestThreads;
|
||||
private final AtomicLong nextIngestManagerTaskId = new AtomicLong(0L);
|
||||
private final ExecutorService startIngestJobsExecutor = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setNameFormat("IM-start-ingest-jobs-%d").build()); //NON-NLS;
|
||||
@GuardedBy("startIngestJobFutures")
|
||||
private final Map<Long, Future<Void>> startIngestJobFutures = new ConcurrentHashMap<>();
|
||||
@GuardedBy("ingestJobsById")
|
||||
private final Map<Long, IngestJob> ingestJobsById = new HashMap<>();
|
||||
private final ExecutorService dataSourceLevelIngestJobTasksExecutor = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setNameFormat("IM-data-source-ingest-%d").build()); //NON-NLS;
|
||||
private final ExecutorService fileLevelIngestJobTasksExecutor;
|
||||
@ -338,10 +340,12 @@ public class IngestManager implements IngestProgressSnapshotProvider {
|
||||
if (job.hasIngestPipeline()) {
|
||||
long taskId = nextIngestManagerTaskId.incrementAndGet();
|
||||
Future<Void> task = startIngestJobsExecutor.submit(new StartIngestJobTask(taskId, job));
|
||||
synchronized (startIngestJobFutures) {
|
||||
startIngestJobFutures.put(taskId, task);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Queues an ingest job for for a data source. Either all of the files in
|
||||
@ -357,10 +361,12 @@ public class IngestManager implements IngestProgressSnapshotProvider {
|
||||
if (job.hasIngestPipeline()) {
|
||||
long taskId = nextIngestManagerTaskId.incrementAndGet();
|
||||
Future<Void> task = startIngestJobsExecutor.submit(new StartIngestJobTask(taskId, job));
|
||||
synchronized (startIngestJobFutures) {
|
||||
startIngestJobFutures.put(taskId, task);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Immediately starts an ingest job for one or more data sources.
|
||||
@ -518,9 +524,11 @@ public class IngestManager implements IngestProgressSnapshotProvider {
|
||||
* @param reason The cancellation reason.
|
||||
*/
|
||||
public void cancelAllIngestJobs(IngestJob.CancellationReason reason) {
|
||||
synchronized (startIngestJobFutures) {
|
||||
startIngestJobFutures.values().forEach((handle) -> {
|
||||
handle.cancel(true);
|
||||
});
|
||||
}
|
||||
synchronized (ingestJobsById) {
|
||||
this.ingestJobsById.values().forEach((job) -> {
|
||||
job.cancel(reason);
|
||||
@ -939,8 +947,10 @@ public class IngestManager implements IngestProgressSnapshotProvider {
|
||||
if (progress != null) {
|
||||
progress.setDisplayName(NbBundle.getMessage(this.getClass(), "IngestManager.StartIngestJobsTask.run.cancelling", displayName));
|
||||
}
|
||||
synchronized (startIngestJobFutures) {
|
||||
Future<?> handle = startIngestJobFutures.remove(threadId);
|
||||
handle.cancel(true);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
});
|
||||
@ -954,9 +964,11 @@ public class IngestManager implements IngestProgressSnapshotProvider {
|
||||
if (null != progress) {
|
||||
progress.finish();
|
||||
}
|
||||
synchronized (startIngestJobFutures) {
|
||||
startIngestJobFutures.remove(threadId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
@ -213,6 +213,7 @@ public class HTMLReport implements TableReportModule {
|
||||
* Copies a suitable icon for the given data type in the output directory
|
||||
* and returns the icon file name to use for the given data type.
|
||||
*/
|
||||
@SuppressWarnings( "deprecation" )
|
||||
private String useDataTypeIcon(String dataType) {
|
||||
String iconFilePath;
|
||||
String iconFileName;
|
||||
|
@ -37,14 +37,14 @@ import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
*/
|
||||
class EvalAccountObj extends EvaluatableObject {
|
||||
|
||||
private AccountObjectType obj;
|
||||
private final AccountObjectType obj;
|
||||
|
||||
public EvalAccountObj(AccountObjectType a_obj, String a_id, String a_spacing) {
|
||||
EvalAccountObj(AccountObjectType a_obj, String a_id, String a_spacing) {
|
||||
obj = a_obj;
|
||||
id = a_id;
|
||||
spacing = a_spacing;
|
||||
}
|
||||
|
||||
@SuppressWarnings( "deprecation" )
|
||||
@Override
|
||||
public synchronized ObservableResult evaluate() {
|
||||
|
||||
@ -103,7 +103,7 @@ class EvalAccountObj extends EvaluatableObject {
|
||||
// The assumption here is that there aren't going to be too many network shares, so we
|
||||
// can cycle through all of them.
|
||||
try {
|
||||
List<BlackboardArtifact> finalHits = new ArrayList<BlackboardArtifact>();
|
||||
List<BlackboardArtifact> finalHits = new ArrayList<>();
|
||||
|
||||
Case case1 = Case.getCurrentCaseThrows();
|
||||
SleuthkitCase sleuthkitCase = case1.getSleuthkitCase();
|
||||
|
@ -48,11 +48,9 @@ import org.sleuthkit.autopsy.ingest.IngestJobContext;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Blackboard.BlackboardException;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_OS_ACCOUNT;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_DELETED;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_ID;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_NAME;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.DataSource;
|
||||
|
@ -107,6 +107,11 @@ class PstParser implements AutoCloseable{
|
||||
logger.log(Level.INFO, "Found encrypted PST file."); //NON-NLS
|
||||
return ParseResult.ENCRYPT;
|
||||
}
|
||||
if (ex.getMessage().toLowerCase().startsWith("unable to")) {
|
||||
logger.log(Level.WARNING, ex.getMessage());
|
||||
logger.log(Level.WARNING, String.format("Error in parsing PST file %s, file may be empty or corrupt", file.getName()));
|
||||
return ParseResult.ERROR;
|
||||
}
|
||||
String msg = file.getName() + ": Failed to create internal java-libpst PST file to parse:\n" + ex.getMessage(); //NON-NLS
|
||||
logger.log(Level.WARNING, msg, ex);
|
||||
return ParseResult.ERROR;
|
||||
|
Loading…
x
Reference in New Issue
Block a user