mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-17 18:17:43 +00:00
Merge branch 'develop' of github.com:sleuthkit/autopsy into 7399-heapDumpFile
This commit is contained in:
commit
e3383be0fa
@ -1,3 +1,4 @@
|
|||||||
|
OsAccountDataPanel_administrator_title=Administrator
|
||||||
OsAccountDataPanel_basic_address=Address
|
OsAccountDataPanel_basic_address=Address
|
||||||
OsAccountDataPanel_basic_admin=Administrator
|
OsAccountDataPanel_basic_admin=Administrator
|
||||||
OsAccountDataPanel_basic_creationDate=Creation Date
|
OsAccountDataPanel_basic_creationDate=Creation Date
|
||||||
@ -5,6 +6,10 @@ OsAccountDataPanel_basic_fullname=Full Name
|
|||||||
OsAccountDataPanel_basic_login=Login
|
OsAccountDataPanel_basic_login=Login
|
||||||
OsAccountDataPanel_basic_title=Basic Properties
|
OsAccountDataPanel_basic_title=Basic Properties
|
||||||
OsAccountDataPanel_basic_type=Type
|
OsAccountDataPanel_basic_type=Type
|
||||||
|
OsAccountDataPanel_data_accessed_title=Last Login
|
||||||
|
OsAccountDataPanel_host_count_title=Login Count
|
||||||
|
# {0} - hostName
|
||||||
|
OsAccountDataPanel_host_section_title={0} Details
|
||||||
OsAccountDataPanel_realm_address=Address
|
OsAccountDataPanel_realm_address=Address
|
||||||
OsAccountDataPanel_realm_confidence=Confidence
|
OsAccountDataPanel_realm_confidence=Confidence
|
||||||
OsAccountDataPanel_realm_name=Name
|
OsAccountDataPanel_realm_name=Name
|
||||||
|
@ -41,6 +41,7 @@ import javax.swing.SwingWorker;
|
|||||||
import org.openide.util.NbBundle.Messages;
|
import org.openide.util.NbBundle.Messages;
|
||||||
import org.sleuthkit.autopsy.casemodule.Case;
|
import org.sleuthkit.autopsy.casemodule.Case;
|
||||||
import org.sleuthkit.autopsy.contentviewers.osaccount.SectionData.RowData;
|
import org.sleuthkit.autopsy.contentviewers.osaccount.SectionData.RowData;
|
||||||
|
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||||
import org.sleuthkit.datamodel.DataSource;
|
import org.sleuthkit.datamodel.DataSource;
|
||||||
import org.sleuthkit.datamodel.Host;
|
import org.sleuthkit.datamodel.Host;
|
||||||
import org.sleuthkit.datamodel.OsAccount;
|
import org.sleuthkit.datamodel.OsAccount;
|
||||||
@ -48,6 +49,7 @@ import org.sleuthkit.datamodel.OsAccountAttribute;
|
|||||||
import org.sleuthkit.datamodel.OsAccountInstance;
|
import org.sleuthkit.datamodel.OsAccountInstance;
|
||||||
import org.sleuthkit.datamodel.OsAccountManager;
|
import org.sleuthkit.datamodel.OsAccountManager;
|
||||||
import org.sleuthkit.datamodel.OsAccountRealm;
|
import org.sleuthkit.datamodel.OsAccountRealm;
|
||||||
|
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Panel for displaying the properties of an OsAccount.
|
* Panel for displaying the properties of an OsAccount.
|
||||||
@ -82,7 +84,6 @@ public class OsAccountDataPanel extends JPanel {
|
|||||||
* @param account OsAccount to display, if null is passed the panel will
|
* @param account OsAccount to display, if null is passed the panel will
|
||||||
* appear blank.
|
* appear blank.
|
||||||
*/
|
*/
|
||||||
// void setOsAccount(OsAccount account) {
|
|
||||||
void setOsAccountId(Long osAccountId) {
|
void setOsAccountId(Long osAccountId) {
|
||||||
removeAll();
|
removeAll();
|
||||||
revalidate();
|
revalidate();
|
||||||
@ -225,10 +226,33 @@ public class OsAccountDataPanel extends JPanel {
|
|||||||
return data;
|
return data;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Messages({
|
||||||
|
"# {0} - hostName",
|
||||||
|
"OsAccountDataPanel_host_section_title={0} Details",
|
||||||
|
"OsAccountDataPanel_host_count_title=Login Count",
|
||||||
|
"OsAccountDataPanel_data_accessed_title=Last Login",
|
||||||
|
"OsAccountDataPanel_administrator_title=Administrator"
|
||||||
|
})
|
||||||
private SectionData buildHostData(Host host, List<OsAccountAttribute> attributeList) {
|
private SectionData buildHostData(Host host, List<OsAccountAttribute> attributeList) {
|
||||||
SectionData data = new SectionData(host.getName());
|
SectionData data = new SectionData(Bundle.OsAccountDataPanel_host_section_title(host.getName()));
|
||||||
for (OsAccountAttribute attribute : attributeList) {
|
for (OsAccountAttribute attribute : attributeList) {
|
||||||
data.addData(attribute.getAttributeType().getDisplayName(), attribute.getDisplayString());
|
String displayName = attribute.getAttributeType().getDisplayName();
|
||||||
|
String value = attribute.getDisplayString();
|
||||||
|
|
||||||
|
if(attribute.getAttributeType().getTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COUNT.getTypeID()) {
|
||||||
|
displayName = Bundle.OsAccountDataPanel_host_count_title();
|
||||||
|
} else if(attribute.getAttributeType().getTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_IS_ADMIN.getTypeID()) {
|
||||||
|
displayName = Bundle.OsAccountDataPanel_administrator_title();
|
||||||
|
if(attribute.getValueInt() == 0) {
|
||||||
|
value = "False";
|
||||||
|
} else {
|
||||||
|
value = "True";
|
||||||
|
}
|
||||||
|
} else if(attribute.getAttributeType().getTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID()) {
|
||||||
|
displayName = Bundle.OsAccountDataPanel_data_accessed_title();
|
||||||
|
}
|
||||||
|
|
||||||
|
data.addData(displayName, value);
|
||||||
}
|
}
|
||||||
|
|
||||||
return data;
|
return data;
|
||||||
@ -254,7 +278,7 @@ public class OsAccountDataPanel extends JPanel {
|
|||||||
* @param row The row in the layout.
|
* @param row The row in the layout.
|
||||||
*/
|
*/
|
||||||
private void addPropertyName(String key, int row) {
|
private void addPropertyName(String key, int row) {
|
||||||
JLabel label = new JLabel(key);
|
JLabel label = new JLabel(key + ":");
|
||||||
add(label, getPropertyNameContraints(row));
|
add(label, getPropertyNameContraints(row));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -359,7 +383,9 @@ public class OsAccountDataPanel extends JPanel {
|
|||||||
protected WorkerResults doInBackground() throws Exception {
|
protected WorkerResults doInBackground() throws Exception {
|
||||||
Map<Host, List<OsAccountAttribute>> hostMap = new HashMap<>();
|
Map<Host, List<OsAccountAttribute>> hostMap = new HashMap<>();
|
||||||
Map<Host, DataSource> instanceMap = new HashMap<>();
|
Map<Host, DataSource> instanceMap = new HashMap<>();
|
||||||
OsAccountManager osAccountManager = Case.getCurrentCase().getSleuthkitCase().getOsAccountManager();
|
SleuthkitCase skCase = Case.getCurrentCase().getSleuthkitCase();
|
||||||
|
OsAccountManager osAccountManager = skCase.getOsAccountManager();
|
||||||
|
OsAccountRealm realm = skCase.getOsAccountRealmManager().getRealmById(account.getRealmId());
|
||||||
|
|
||||||
if(account == null) {
|
if(account == null) {
|
||||||
account = osAccountManager.getOsAccountByObjectId(accountId);
|
account = osAccountManager.getOsAccountByObjectId(accountId);
|
||||||
@ -414,7 +440,7 @@ public class OsAccountDataPanel extends JPanel {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return new WorkerResults(hostMap, instanceMap);
|
return new WorkerResults(hostMap, instanceMap, realm);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -442,20 +468,21 @@ public class OsAccountDataPanel extends JPanel {
|
|||||||
hostDataMap.forEach((K, V) -> data.add(buildHostData(K, V)));
|
hostDataMap.forEach((K, V) -> data.add(buildHostData(K, V)));
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO - load realm on background thread
|
OsAccountRealm realm = results.getRealm();
|
||||||
//OsAccountRealm realm = account.getRealm();
|
if (realm != null) {
|
||||||
//if (realm != null) {
|
data.add(buildRealmProperties(realm));
|
||||||
// data.add(buildRealmProperties(realm));
|
|
||||||
//}
|
|
||||||
|
|
||||||
Map<Host, DataSource> instanceMap = results.getDataSourceMap();
|
|
||||||
if (!instanceMap.isEmpty()) {
|
|
||||||
SectionData instanceSection = new SectionData("Instances");
|
|
||||||
instanceMap.forEach((K, V) -> instanceSection.addData(K.getName(), V.getName()));
|
|
||||||
|
|
||||||
data.add(instanceSection);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Removing the instance section for now. Leaving code here for
|
||||||
|
// future use.
|
||||||
|
// Map<Host, DataSource> instanceMap = results.getDataSourceMap();
|
||||||
|
// if (!instanceMap.isEmpty()) {
|
||||||
|
// SectionData instanceSection = new SectionData("Instances");
|
||||||
|
// instanceMap.forEach((K, V) -> instanceSection.addData(K.getName(), V.getName()));
|
||||||
|
//
|
||||||
|
// data.add(instanceSection);
|
||||||
|
// }
|
||||||
|
|
||||||
addDataComponents(data);
|
addDataComponents(data);
|
||||||
|
|
||||||
revalidate();
|
revalidate();
|
||||||
@ -472,6 +499,7 @@ public class OsAccountDataPanel extends JPanel {
|
|||||||
|
|
||||||
private final Map<Host, List<OsAccountAttribute>> attributeMap;
|
private final Map<Host, List<OsAccountAttribute>> attributeMap;
|
||||||
private final Map<Host, DataSource> instanceMap;
|
private final Map<Host, DataSource> instanceMap;
|
||||||
|
private final OsAccountRealm realm;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Construct a new WorkerResult object.
|
* Construct a new WorkerResult object.
|
||||||
@ -481,9 +509,10 @@ public class OsAccountDataPanel extends JPanel {
|
|||||||
* @param instanceMap A map of data to display OsAccount instance
|
* @param instanceMap A map of data to display OsAccount instance
|
||||||
* information.
|
* information.
|
||||||
*/
|
*/
|
||||||
WorkerResults(Map<Host, List<OsAccountAttribute>> attributeMap, Map<Host, DataSource> instanceMap) {
|
WorkerResults(Map<Host, List<OsAccountAttribute>> attributeMap, Map<Host, DataSource> instanceMap, OsAccountRealm realm) {
|
||||||
this.attributeMap = attributeMap;
|
this.attributeMap = attributeMap;
|
||||||
this.instanceMap = instanceMap;
|
this.instanceMap = instanceMap;
|
||||||
|
this.realm = realm;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -505,5 +534,9 @@ public class OsAccountDataPanel extends JPanel {
|
|||||||
Map<Host, DataSource> getDataSourceMap() {
|
Map<Host, DataSource> getDataSourceMap() {
|
||||||
return instanceMap;
|
return instanceMap;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
OsAccountRealm getRealm() {
|
||||||
|
return realm;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -70,7 +70,7 @@ public final class IconsUtil {
|
|||||||
} else if (typeID == ARTIFACT_TYPE.TSK_SPEED_DIAL_ENTRY.getTypeID()) {
|
} else if (typeID == ARTIFACT_TYPE.TSK_SPEED_DIAL_ENTRY.getTypeID()) {
|
||||||
imageFile = "speeddialentry.png"; //NON-NLS
|
imageFile = "speeddialentry.png"; //NON-NLS
|
||||||
} else if (typeID == ARTIFACT_TYPE.TSK_BLUETOOTH_PAIRING.getTypeID()) {
|
} else if (typeID == ARTIFACT_TYPE.TSK_BLUETOOTH_PAIRING.getTypeID()) {
|
||||||
imageFile = "bluetooth.png"; //NON-NLS
|
imageFile = "Bluetooth.png"; //NON-NLS
|
||||||
} else if (typeID == ARTIFACT_TYPE.TSK_GPS_BOOKMARK.getTypeID()) {
|
} else if (typeID == ARTIFACT_TYPE.TSK_GPS_BOOKMARK.getTypeID()) {
|
||||||
imageFile = "gpsfav.png"; //NON-NLS
|
imageFile = "gpsfav.png"; //NON-NLS
|
||||||
} else if (typeID == ARTIFACT_TYPE.TSK_GPS_LAST_KNOWN_LOCATION.getTypeID()) {
|
} else if (typeID == ARTIFACT_TYPE.TSK_GPS_LAST_KNOWN_LOCATION.getTypeID()) {
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
/*
|
/*
|
||||||
* Autopsy Forensic Browser
|
* Autopsy Forensic Browser
|
||||||
*
|
*
|
||||||
* Copyright 2014-2018 Basis Technology Corp.
|
* Copyright 2014-2021 Basis Technology Corp.
|
||||||
* Contact: carrier <at> sleuthkit <dot> org
|
* Contact: carrier <at> sleuthkit <dot> org
|
||||||
*
|
*
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
@ -63,7 +63,7 @@ public final class IngestJob {
|
|||||||
return displayName;
|
return displayName;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Ingest job mode.
|
* Ingest job mode.
|
||||||
*/
|
*/
|
||||||
@ -71,7 +71,7 @@ public final class IngestJob {
|
|||||||
BATCH,
|
BATCH,
|
||||||
STREAMING
|
STREAMING
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final Logger logger = Logger.getLogger(IngestJob.class.getName());
|
private static final Logger logger = Logger.getLogger(IngestJob.class.getName());
|
||||||
private final static AtomicLong nextId = new AtomicLong(0L);
|
private final static AtomicLong nextId = new AtomicLong(0L);
|
||||||
private final long id;
|
private final long id;
|
||||||
@ -113,12 +113,12 @@ public final class IngestJob {
|
|||||||
this(Arrays.asList(dataSource), settings);
|
this(Arrays.asList(dataSource), settings);
|
||||||
this.files.addAll(files);
|
this.files.addAll(files);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructs an ingest job that analyzes one data source, possibly using
|
* Constructs an ingest job that analyzes one data source, possibly using an
|
||||||
* an ingest stream.
|
* ingest stream.
|
||||||
*
|
*
|
||||||
* @param settings The ingest job settings.
|
* @param settings The ingest job settings.
|
||||||
*/
|
*/
|
||||||
IngestJob(DataSource dataSource, Mode ingestMode, IngestJobSettings settings) {
|
IngestJob(DataSource dataSource, Mode ingestMode, IngestJobSettings settings) {
|
||||||
this.id = IngestJob.nextId.getAndIncrement();
|
this.id = IngestJob.nextId.getAndIncrement();
|
||||||
@ -149,10 +149,10 @@ public final class IngestJob {
|
|||||||
boolean hasIngestPipeline() {
|
boolean hasIngestPipeline() {
|
||||||
return (!settings.getEnabledIngestModuleTemplates().isEmpty());
|
return (!settings.getEnabledIngestModuleTemplates().isEmpty());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Add a set of files (by object ID) to be ingested.
|
* Add a set of files (by object ID) to be ingested.
|
||||||
*
|
*
|
||||||
* @param fileObjIds the list of file IDs
|
* @param fileObjIds the list of file IDs
|
||||||
*/
|
*/
|
||||||
void addStreamingIngestFiles(List<Long> fileObjIds) {
|
void addStreamingIngestFiles(List<Long> fileObjIds) {
|
||||||
@ -164,7 +164,7 @@ public final class IngestJob {
|
|||||||
IngestJobPipeline streamingIngestPipeline = ingestJobPipelines.values().iterator().next();
|
IngestJobPipeline streamingIngestPipeline = ingestJobPipelines.values().iterator().next();
|
||||||
streamingIngestPipeline.addStreamingIngestFiles(fileObjIds);
|
streamingIngestPipeline.addStreamingIngestFiles(fileObjIds);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Start data source processing for streaming ingest.
|
* Start data source processing for streaming ingest.
|
||||||
*/
|
*/
|
||||||
@ -185,7 +185,7 @@ public final class IngestJob {
|
|||||||
* @return A collection of ingest module start up errors, empty on success.
|
* @return A collection of ingest module start up errors, empty on success.
|
||||||
*/
|
*/
|
||||||
List<IngestModuleError> start() {
|
List<IngestModuleError> start() {
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Set up the pipeline(s)
|
* Set up the pipeline(s)
|
||||||
*/
|
*/
|
||||||
@ -199,11 +199,11 @@ public final class IngestJob {
|
|||||||
this.ingestJobPipelines.put(ingestJobPipeline.getId(), ingestJobPipeline);
|
this.ingestJobPipelines.put(ingestJobPipeline.getId(), ingestJobPipeline);
|
||||||
}
|
}
|
||||||
incompleteJobsCount.set(ingestJobPipelines.size());
|
incompleteJobsCount.set(ingestJobPipelines.size());
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Try to start each data source ingest job. Note that there is an
|
* Try to start each data source ingest job. Note that there is an
|
||||||
* assumption here that if there is going to be a module
|
* assumption here that if there is going to be a module startup
|
||||||
* startup failure, it will be for the first ingest job pipeline.
|
* failure, it will be for the first ingest job pipeline.
|
||||||
*
|
*
|
||||||
* TODO (RC): Consider separating module start up from pipeline startup
|
* TODO (RC): Consider separating module start up from pipeline startup
|
||||||
* so that no processing is done if this assumption is false.
|
* so that no processing is done if this assumption is false.
|
||||||
@ -229,14 +229,14 @@ public final class IngestJob {
|
|||||||
|
|
||||||
return errors;
|
return errors;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the ingest mode for this job (batch or streaming).
|
* Get the ingest mode for this job (batch or streaming).
|
||||||
*
|
*
|
||||||
* @return the ingest mode.
|
* @return the ingest mode.
|
||||||
*/
|
*/
|
||||||
Mode getIngestMode() {
|
Mode getIngestMode() {
|
||||||
return ingestMode;
|
return ingestMode;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -251,8 +251,8 @@ public final class IngestJob {
|
|||||||
/**
|
/**
|
||||||
* Gets a snapshot of the progress of this ingest job.
|
* Gets a snapshot of the progress of this ingest job.
|
||||||
*
|
*
|
||||||
* @param getIngestTasksSnapshot
|
* @param getIngestTasksSnapshot
|
||||||
*
|
*
|
||||||
* @return The snapshot.
|
* @return The snapshot.
|
||||||
*/
|
*/
|
||||||
public ProgressSnapshot getSnapshot(boolean getIngestTasksSnapshot) {
|
public ProgressSnapshot getSnapshot(boolean getIngestTasksSnapshot) {
|
||||||
@ -295,10 +295,21 @@ public final class IngestJob {
|
|||||||
* @param reason The reason for cancellation.
|
* @param reason The reason for cancellation.
|
||||||
*/
|
*/
|
||||||
public void cancel(CancellationReason reason) {
|
public void cancel(CancellationReason reason) {
|
||||||
this.cancellationReason = reason;
|
cancellationReason = reason;
|
||||||
this.ingestJobPipelines.values().stream().forEach((job) -> {
|
/*
|
||||||
job.cancel(reason);
|
* Cancel the ingest pipelines for each data source. This is done in a
|
||||||
});
|
* separate thread to avoid a potential deadlock. The deadlock is
|
||||||
|
* possible because this method can be called in a thread that acquires
|
||||||
|
* the ingest manager's ingest jobs list lock and then tries to acquire
|
||||||
|
* the ingest pipeline stage transition lock, while an ingest thread
|
||||||
|
* that has acquired the stage transition lock is trying to acquire the
|
||||||
|
* ingest manager's ingest jobs list lock.
|
||||||
|
*/
|
||||||
|
new Thread(() -> {
|
||||||
|
this.ingestJobPipelines.values().stream().forEach((job) -> {
|
||||||
|
job.cancel(reason);
|
||||||
|
});
|
||||||
|
}).start();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -508,8 +519,9 @@ public final class IngestJob {
|
|||||||
* used to get basic information about the module and to request
|
* used to get basic information about the module and to request
|
||||||
* cancellation of the module.
|
* cancellation of the module.
|
||||||
*
|
*
|
||||||
* @param ingestJobPipeline The ingestJobPipeline that owns the data source level ingest module.
|
* @param ingestJobPipeline The ingestJobPipeline that owns the data
|
||||||
* @param module The data source level ingest module.
|
* source level ingest module.
|
||||||
|
* @param module The data source level ingest module.
|
||||||
*/
|
*/
|
||||||
private DataSourceIngestModuleHandle(IngestJobPipeline ingestJobPipeline, DataSourceIngestPipeline.DataSourcePipelineModule module) {
|
private DataSourceIngestModuleHandle(IngestJobPipeline ingestJobPipeline, DataSourceIngestPipeline.DataSourcePipelineModule module) {
|
||||||
this.ingestJobPipeline = ingestJobPipeline;
|
this.ingestJobPipeline = ingestJobPipeline;
|
||||||
|
@ -125,7 +125,9 @@ public class IngestManager implements IngestProgressSnapshotProvider {
|
|||||||
private final int numberOfFileIngestThreads;
|
private final int numberOfFileIngestThreads;
|
||||||
private final AtomicLong nextIngestManagerTaskId = new AtomicLong(0L);
|
private final AtomicLong nextIngestManagerTaskId = new AtomicLong(0L);
|
||||||
private final ExecutorService startIngestJobsExecutor = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setNameFormat("IM-start-ingest-jobs-%d").build()); //NON-NLS;
|
private final ExecutorService startIngestJobsExecutor = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setNameFormat("IM-start-ingest-jobs-%d").build()); //NON-NLS;
|
||||||
|
@GuardedBy("startIngestJobFutures")
|
||||||
private final Map<Long, Future<Void>> startIngestJobFutures = new ConcurrentHashMap<>();
|
private final Map<Long, Future<Void>> startIngestJobFutures = new ConcurrentHashMap<>();
|
||||||
|
@GuardedBy("ingestJobsById")
|
||||||
private final Map<Long, IngestJob> ingestJobsById = new HashMap<>();
|
private final Map<Long, IngestJob> ingestJobsById = new HashMap<>();
|
||||||
private final ExecutorService dataSourceLevelIngestJobTasksExecutor = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setNameFormat("IM-data-source-ingest-%d").build()); //NON-NLS;
|
private final ExecutorService dataSourceLevelIngestJobTasksExecutor = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setNameFormat("IM-data-source-ingest-%d").build()); //NON-NLS;
|
||||||
private final ExecutorService fileLevelIngestJobTasksExecutor;
|
private final ExecutorService fileLevelIngestJobTasksExecutor;
|
||||||
@ -338,7 +340,9 @@ public class IngestManager implements IngestProgressSnapshotProvider {
|
|||||||
if (job.hasIngestPipeline()) {
|
if (job.hasIngestPipeline()) {
|
||||||
long taskId = nextIngestManagerTaskId.incrementAndGet();
|
long taskId = nextIngestManagerTaskId.incrementAndGet();
|
||||||
Future<Void> task = startIngestJobsExecutor.submit(new StartIngestJobTask(taskId, job));
|
Future<Void> task = startIngestJobsExecutor.submit(new StartIngestJobTask(taskId, job));
|
||||||
startIngestJobFutures.put(taskId, task);
|
synchronized (startIngestJobFutures) {
|
||||||
|
startIngestJobFutures.put(taskId, task);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -357,7 +361,9 @@ public class IngestManager implements IngestProgressSnapshotProvider {
|
|||||||
if (job.hasIngestPipeline()) {
|
if (job.hasIngestPipeline()) {
|
||||||
long taskId = nextIngestManagerTaskId.incrementAndGet();
|
long taskId = nextIngestManagerTaskId.incrementAndGet();
|
||||||
Future<Void> task = startIngestJobsExecutor.submit(new StartIngestJobTask(taskId, job));
|
Future<Void> task = startIngestJobsExecutor.submit(new StartIngestJobTask(taskId, job));
|
||||||
startIngestJobFutures.put(taskId, task);
|
synchronized (startIngestJobFutures) {
|
||||||
|
startIngestJobFutures.put(taskId, task);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -518,9 +524,11 @@ public class IngestManager implements IngestProgressSnapshotProvider {
|
|||||||
* @param reason The cancellation reason.
|
* @param reason The cancellation reason.
|
||||||
*/
|
*/
|
||||||
public void cancelAllIngestJobs(IngestJob.CancellationReason reason) {
|
public void cancelAllIngestJobs(IngestJob.CancellationReason reason) {
|
||||||
startIngestJobFutures.values().forEach((handle) -> {
|
synchronized (startIngestJobFutures) {
|
||||||
handle.cancel(true);
|
startIngestJobFutures.values().forEach((handle) -> {
|
||||||
});
|
handle.cancel(true);
|
||||||
|
});
|
||||||
|
}
|
||||||
synchronized (ingestJobsById) {
|
synchronized (ingestJobsById) {
|
||||||
this.ingestJobsById.values().forEach((job) -> {
|
this.ingestJobsById.values().forEach((job) -> {
|
||||||
job.cancel(reason);
|
job.cancel(reason);
|
||||||
@ -939,8 +947,10 @@ public class IngestManager implements IngestProgressSnapshotProvider {
|
|||||||
if (progress != null) {
|
if (progress != null) {
|
||||||
progress.setDisplayName(NbBundle.getMessage(this.getClass(), "IngestManager.StartIngestJobsTask.run.cancelling", displayName));
|
progress.setDisplayName(NbBundle.getMessage(this.getClass(), "IngestManager.StartIngestJobsTask.run.cancelling", displayName));
|
||||||
}
|
}
|
||||||
Future<?> handle = startIngestJobFutures.remove(threadId);
|
synchronized (startIngestJobFutures) {
|
||||||
handle.cancel(true);
|
Future<?> handle = startIngestJobFutures.remove(threadId);
|
||||||
|
handle.cancel(true);
|
||||||
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -954,7 +964,9 @@ public class IngestManager implements IngestProgressSnapshotProvider {
|
|||||||
if (null != progress) {
|
if (null != progress) {
|
||||||
progress.finish();
|
progress.finish();
|
||||||
}
|
}
|
||||||
startIngestJobFutures.remove(threadId);
|
synchronized (startIngestJobFutures) {
|
||||||
|
startIngestJobFutures.remove(threadId);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -322,11 +322,11 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
|
|||||||
* performing the task.
|
* performing the task.
|
||||||
*/
|
*/
|
||||||
abstract void performTask(IngestJobPipeline ingestJobPipeline, T task) throws IngestModuleException;
|
abstract void performTask(IngestJobPipeline ingestJobPipeline, T task) throws IngestModuleException;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void shutDown() {
|
public void shutDown() {
|
||||||
module.shutDown();
|
module.shutDown();
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -213,6 +213,7 @@ public class HTMLReport implements TableReportModule {
|
|||||||
* Copies a suitable icon for the given data type in the output directory
|
* Copies a suitable icon for the given data type in the output directory
|
||||||
* and returns the icon file name to use for the given data type.
|
* and returns the icon file name to use for the given data type.
|
||||||
*/
|
*/
|
||||||
|
@SuppressWarnings( "deprecation" )
|
||||||
private String useDataTypeIcon(String dataType) {
|
private String useDataTypeIcon(String dataType) {
|
||||||
String iconFilePath;
|
String iconFilePath;
|
||||||
String iconFileName;
|
String iconFileName;
|
||||||
|
@ -37,14 +37,14 @@ import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
|||||||
*/
|
*/
|
||||||
class EvalAccountObj extends EvaluatableObject {
|
class EvalAccountObj extends EvaluatableObject {
|
||||||
|
|
||||||
private AccountObjectType obj;
|
private final AccountObjectType obj;
|
||||||
|
|
||||||
public EvalAccountObj(AccountObjectType a_obj, String a_id, String a_spacing) {
|
EvalAccountObj(AccountObjectType a_obj, String a_id, String a_spacing) {
|
||||||
obj = a_obj;
|
obj = a_obj;
|
||||||
id = a_id;
|
id = a_id;
|
||||||
spacing = a_spacing;
|
spacing = a_spacing;
|
||||||
}
|
}
|
||||||
|
@SuppressWarnings( "deprecation" )
|
||||||
@Override
|
@Override
|
||||||
public synchronized ObservableResult evaluate() {
|
public synchronized ObservableResult evaluate() {
|
||||||
|
|
||||||
@ -103,7 +103,7 @@ class EvalAccountObj extends EvaluatableObject {
|
|||||||
// The assumption here is that there aren't going to be too many network shares, so we
|
// The assumption here is that there aren't going to be too many network shares, so we
|
||||||
// can cycle through all of them.
|
// can cycle through all of them.
|
||||||
try {
|
try {
|
||||||
List<BlackboardArtifact> finalHits = new ArrayList<BlackboardArtifact>();
|
List<BlackboardArtifact> finalHits = new ArrayList<>();
|
||||||
|
|
||||||
Case case1 = Case.getCurrentCaseThrows();
|
Case case1 = Case.getCurrentCaseThrows();
|
||||||
SleuthkitCase sleuthkitCase = case1.getSleuthkitCase();
|
SleuthkitCase sleuthkitCase = case1.getSleuthkitCase();
|
||||||
|
@ -4,7 +4,7 @@ AddEditCategoryDialog.categoryLabel.text=Category:
|
|||||||
AddEditCategoryDialog.domainSuffixLabel.text=Domain Suffix:
|
AddEditCategoryDialog.domainSuffixLabel.text=Domain Suffix:
|
||||||
AddEditCategoryDialog.saveButton.text=Save
|
AddEditCategoryDialog.saveButton.text=Save
|
||||||
AddEditCategoryDialog.cancelButton.text=Cancel
|
AddEditCategoryDialog.cancelButton.text=Cancel
|
||||||
WebCategoriesOptionsPanel.panelDescription.text=This module allows you to classify web sites based on domain names.
|
WebCategoriesOptionsPanel.panelDescription.text=<html>This feature allows the Recent Activity ingest module to perform custom categorization of web sites based on domain suffixes.</html>
|
||||||
WebCategoriesOptionsPanel.categoriesTitle.text=Categories:
|
WebCategoriesOptionsPanel.categoriesTitle.text=Categories:
|
||||||
WebCategoriesOptionsPanel.newEntryButton.text=New Entry
|
WebCategoriesOptionsPanel.newEntryButton.text=New Entry
|
||||||
WebCategoriesOptionsPanel.editEntryButton.text=Edit Entry
|
WebCategoriesOptionsPanel.editEntryButton.text=Edit Entry
|
||||||
|
@ -27,7 +27,7 @@ AddEditCategoryDialog.categoryLabel.text=Category:
|
|||||||
AddEditCategoryDialog.domainSuffixLabel.text=Domain Suffix:
|
AddEditCategoryDialog.domainSuffixLabel.text=Domain Suffix:
|
||||||
AddEditCategoryDialog.saveButton.text=Save
|
AddEditCategoryDialog.saveButton.text=Save
|
||||||
AddEditCategoryDialog.cancelButton.text=Cancel
|
AddEditCategoryDialog.cancelButton.text=Cancel
|
||||||
WebCategoriesOptionsPanel.panelDescription.text=This module allows you to classify web sites based on domain names.
|
WebCategoriesOptionsPanel.panelDescription.text=<html>This feature allows the Recent Activity ingest module to perform custom categorization of web sites based on domain suffixes.</html>
|
||||||
WebCategoriesOptionsPanel.categoriesTitle.text=Categories:
|
WebCategoriesOptionsPanel.categoriesTitle.text=Categories:
|
||||||
WebCategoriesOptionsPanel.newEntryButton.text=New Entry
|
WebCategoriesOptionsPanel.newEntryButton.text=New Entry
|
||||||
WebCategoriesOptionsPanel.editEntryButton.text=Edit Entry
|
WebCategoriesOptionsPanel.editEntryButton.text=Edit Entry
|
||||||
|
@ -197,8 +197,8 @@
|
|||||||
</Container>
|
</Container>
|
||||||
<Component class="javax.swing.JLabel" name="ingestRunningWarning">
|
<Component class="javax.swing.JLabel" name="ingestRunningWarning">
|
||||||
<Properties>
|
<Properties>
|
||||||
<Property name="foreground" type="java.awt.Color" editor="org.netbeans.modules.form.RADConnectionPropertyEditor">
|
<Property name="icon" type="javax.swing.Icon" editor="org.netbeans.modules.form.editors2.IconEditor">
|
||||||
<Connection code="java.awt.Color.RED" type="code"/>
|
<Image iconType="3" name="/org/sleuthkit/autopsy/images/warning16.png"/>
|
||||||
</Property>
|
</Property>
|
||||||
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
|
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
|
||||||
<ResourceString bundle="org/sleuthkit/autopsy/url/analytics/domaincategorization/Bundle.properties" key="WebCategoriesOptionsPanel.ingestRunningWarning.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, "{key}")"/>
|
<ResourceString bundle="org/sleuthkit/autopsy/url/analytics/domaincategorization/Bundle.properties" key="WebCategoriesOptionsPanel.ingestRunningWarning.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, "{key}")"/>
|
||||||
|
@ -373,7 +373,7 @@ public class WebCategoriesOptionsPanel extends IngestModuleGlobalSettingsPanel i
|
|||||||
gridBagConstraints.weightx = 1.0;
|
gridBagConstraints.weightx = 1.0;
|
||||||
add(bottomStrut, gridBagConstraints);
|
add(bottomStrut, gridBagConstraints);
|
||||||
|
|
||||||
ingestRunningWarning.setForeground(java.awt.Color.RED);
|
ingestRunningWarning.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/images/warning16.png"))); // NOI18N
|
||||||
ingestRunningWarning.setText(org.openide.util.NbBundle.getMessage(WebCategoriesOptionsPanel.class, "WebCategoriesOptionsPanel.ingestRunningWarning.text")); // NOI18N
|
ingestRunningWarning.setText(org.openide.util.NbBundle.getMessage(WebCategoriesOptionsPanel.class, "WebCategoriesOptionsPanel.ingestRunningWarning.text")); // NOI18N
|
||||||
gridBagConstraints = new java.awt.GridBagConstraints();
|
gridBagConstraints = new java.awt.GridBagConstraints();
|
||||||
gridBagConstraints.gridx = 0;
|
gridBagConstraints.gridx = 0;
|
||||||
|
@ -48,11 +48,9 @@ import org.sleuthkit.autopsy.ingest.IngestJobContext;
|
|||||||
import org.sleuthkit.datamodel.AbstractFile;
|
import org.sleuthkit.datamodel.AbstractFile;
|
||||||
import org.sleuthkit.datamodel.Blackboard.BlackboardException;
|
import org.sleuthkit.datamodel.Blackboard.BlackboardException;
|
||||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||||
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_OS_ACCOUNT;
|
|
||||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_DELETED;
|
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_DELETED;
|
||||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH;
|
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH;
|
||||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_ID;
|
|
||||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_NAME;
|
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_NAME;
|
||||||
import org.sleuthkit.datamodel.Content;
|
import org.sleuthkit.datamodel.Content;
|
||||||
import org.sleuthkit.datamodel.DataSource;
|
import org.sleuthkit.datamodel.DataSource;
|
||||||
|
@ -1986,7 +1986,7 @@ class ExtractRegistry extends Extract {
|
|||||||
String dir = homeDir.replaceFirst("^(%\\w*%)", "");
|
String dir = homeDir.replaceFirst("^(%\\w*%)", "");
|
||||||
dir = dir.replace("\\", "/");
|
dir = dir.replace("\\", "/");
|
||||||
attributes.add(createOsAccountAttribute(TSK_HOME_DIR, dir, osAccount, host, file));
|
attributes.add(createOsAccountAttribute(TSK_HOME_DIR, dir, osAccount, host, file));
|
||||||
osAccount.addAttributes(attributes);
|
accountMgr.addOsAccountAttributes(osAccount, attributes);
|
||||||
}
|
}
|
||||||
|
|
||||||
accountMgr.updateOsAccount(osAccount);
|
accountMgr.updateOsAccount(osAccount);
|
||||||
@ -2163,8 +2163,9 @@ class ExtractRegistry extends Extract {
|
|||||||
groups, osAccount, host, regFile));
|
groups, osAccount, host, regFile));
|
||||||
}
|
}
|
||||||
|
|
||||||
osAccount.addAttributes(attributes);
|
OsAccountManager accountMgr = tskCase.getOsAccountManager();
|
||||||
tskCase.getOsAccountManager().updateOsAccount(osAccount);
|
accountMgr.addOsAccountAttributes(osAccount, attributes);
|
||||||
|
accountMgr.updateOsAccount(osAccount);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -445,6 +445,7 @@ def normalize_db_entry(line, files_table, vs_parts_table, vs_info_table, fs_info
|
|||||||
os_account_index = line.find('INSERT INTO "tsk_os_accounts"') > -1 or line.find('INSERT INTO tsk_os_accounts') > -1
|
os_account_index = line.find('INSERT INTO "tsk_os_accounts"') > -1 or line.find('INSERT INTO tsk_os_accounts') > -1
|
||||||
os_account_attr_index = line.find('INSERT INTO "tsk_os_account_attributes"') > -1 or line.find('INSERT INTO tsk_os_account_attributes') > -1
|
os_account_attr_index = line.find('INSERT INTO "tsk_os_account_attributes"') > -1 or line.find('INSERT INTO tsk_os_account_attributes') > -1
|
||||||
os_account_instances_index = line.find('INSERT INTO "tsk_os_account_instances"') > -1 or line.find('INSERT INTO tsk_os_account_instances') > -1
|
os_account_instances_index = line.find('INSERT INTO "tsk_os_account_instances"') > -1 or line.find('INSERT INTO tsk_os_account_instances') > -1
|
||||||
|
data_artifacts_index = line.find('INSERT INTO "tsk_data_artifacts"') > -1 or line.find('INSERT INTO tsk_data_artifacts') > -1
|
||||||
|
|
||||||
parens = line[line.find('(') + 1 : line.rfind(')')]
|
parens = line[line.find('(') + 1 : line.rfind(')')]
|
||||||
no_space_parens = parens.replace(" ", "")
|
no_space_parens = parens.replace(" ", "")
|
||||||
@ -670,6 +671,19 @@ def normalize_db_entry(line, files_table, vs_parts_table, vs_info_table, fs_info
|
|||||||
fields_list[1] = accounts_table[os_account_id]
|
fields_list[1] = accounts_table[os_account_id]
|
||||||
newLine = ('INSERT INTO "tsk_os_account_instances" VALUES(' + ','.join(fields_list[1:]) + ');') # remove id
|
newLine = ('INSERT INTO "tsk_os_account_instances" VALUES(' + ','.join(fields_list[1:]) + ');') # remove id
|
||||||
return newLine
|
return newLine
|
||||||
|
elif data_artifacts_index:
|
||||||
|
art_obj_id = int(fields_list[0])
|
||||||
|
if art_obj_id in files_table.keys():
|
||||||
|
fields_list[0] = files_table[art_obj_id]
|
||||||
|
else:
|
||||||
|
fields_list[0] = 'Artifact Object ID Omitted'
|
||||||
|
account_obj_id = int(fields_list[1])
|
||||||
|
if account_obj_id in files_table.keys():
|
||||||
|
fields_list[1] = files_table[account_obj_id]
|
||||||
|
else:
|
||||||
|
fields_list[1] = 'Account Object ID Omitted'
|
||||||
|
newLine = ('INSERT INTO "tsk_data_artifacts" VALUES(' + ','.join(fields_list[:]) + ');') # remove ids
|
||||||
|
return newLine
|
||||||
else:
|
else:
|
||||||
return line
|
return line
|
||||||
|
|
||||||
@ -798,7 +812,7 @@ def build_id_accounts_table(db_cursor, isPostgreSQL):
|
|||||||
"""
|
"""
|
||||||
# for each row in the db, take the object id and account SID then creates a tuple in the dictionary
|
# for each row in the db, take the object id and account SID then creates a tuple in the dictionary
|
||||||
# with the object id as the key and the OS Account's SID as the value
|
# with the object id as the key and the OS Account's SID as the value
|
||||||
mapping = dict([(row[0], row[1]) for row in sql_select_execute(db_cursor, isPostgreSQL, "SELECT os_account_obj_id, unique_id FROM tsk_os_accounts")])
|
mapping = dict([(row[0], row[1]) for row in sql_select_execute(db_cursor, isPostgreSQL, "SELECT os_account_obj_id, addr FROM tsk_os_accounts")])
|
||||||
return mapping
|
return mapping
|
||||||
|
|
||||||
def build_id_obj_path_table(files_table, objects_table, artifacts_table, reports_table, images_table, accounts_table):
|
def build_id_obj_path_table(files_table, objects_table, artifacts_table, reports_table, images_table, accounts_table):
|
||||||
@ -810,7 +824,7 @@ def build_id_obj_path_table(files_table, objects_table, artifacts_table, reports
|
|||||||
artifacts_table: obj_id, artifact_type_name
|
artifacts_table: obj_id, artifact_type_name
|
||||||
reports_table: obj_id, path
|
reports_table: obj_id, path
|
||||||
images_table: obj_id, name
|
images_table: obj_id, name
|
||||||
accounts_table: obj_id, unique_id
|
accounts_table: obj_id, addr
|
||||||
"""
|
"""
|
||||||
# make a copy of files_table and update it with new data from artifacts_table and reports_table
|
# make a copy of files_table and update it with new data from artifacts_table and reports_table
|
||||||
mapping = files_table.copy()
|
mapping = files_table.copy()
|
||||||
@ -830,7 +844,7 @@ def build_id_obj_path_table(files_table, objects_table, artifacts_table, reports
|
|||||||
elif par_obj_id in images_table.keys():
|
elif par_obj_id in images_table.keys():
|
||||||
path = images_table[par_obj_id]
|
path = images_table[par_obj_id]
|
||||||
mapping[k] = path + "/" + artifacts_table[k]
|
mapping[k] = path + "/" + artifacts_table[k]
|
||||||
elif k in accounts_table.keys(): # For an OS Account object ID we use its unique_id field which is the account SID
|
elif k in accounts_table.keys(): # For an OS Account object ID we use its addr field which is the account SID
|
||||||
mapping[k] = accounts_table[k]
|
mapping[k] = accounts_table[k]
|
||||||
elif v[0] not in mapping.keys():
|
elif v[0] not in mapping.keys():
|
||||||
if v[0] in artifacts_table.keys():
|
if v[0] in artifacts_table.keys():
|
||||||
|
@ -107,6 +107,11 @@ class PstParser implements AutoCloseable{
|
|||||||
logger.log(Level.INFO, "Found encrypted PST file."); //NON-NLS
|
logger.log(Level.INFO, "Found encrypted PST file."); //NON-NLS
|
||||||
return ParseResult.ENCRYPT;
|
return ParseResult.ENCRYPT;
|
||||||
}
|
}
|
||||||
|
if (ex.getMessage().toLowerCase().startsWith("unable to")) {
|
||||||
|
logger.log(Level.WARNING, ex.getMessage());
|
||||||
|
logger.log(Level.WARNING, String.format("Error in parsing PST file %s, file may be empty or corrupt", file.getName()));
|
||||||
|
return ParseResult.ERROR;
|
||||||
|
}
|
||||||
String msg = file.getName() + ": Failed to create internal java-libpst PST file to parse:\n" + ex.getMessage(); //NON-NLS
|
String msg = file.getName() + ": Failed to create internal java-libpst PST file to parse:\n" + ex.getMessage(); //NON-NLS
|
||||||
logger.log(Level.WARNING, msg, ex);
|
logger.log(Level.WARNING, msg, ex);
|
||||||
return ParseResult.ERROR;
|
return ParseResult.ERROR;
|
||||||
|
Loading…
x
Reference in New Issue
Block a user