mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-17 18:17:43 +00:00
Merge branch 'develop' of https://github.com/sleuthkit/autopsy into 2882-AllowResetWhileCaseOpen
This commit is contained in:
commit
309075fa57
@ -70,7 +70,7 @@ public final class IconsUtil {
|
|||||||
} else if (typeID == ARTIFACT_TYPE.TSK_SPEED_DIAL_ENTRY.getTypeID()) {
|
} else if (typeID == ARTIFACT_TYPE.TSK_SPEED_DIAL_ENTRY.getTypeID()) {
|
||||||
imageFile = "speeddialentry.png"; //NON-NLS
|
imageFile = "speeddialentry.png"; //NON-NLS
|
||||||
} else if (typeID == ARTIFACT_TYPE.TSK_BLUETOOTH_PAIRING.getTypeID()) {
|
} else if (typeID == ARTIFACT_TYPE.TSK_BLUETOOTH_PAIRING.getTypeID()) {
|
||||||
imageFile = "bluetooth.png"; //NON-NLS
|
imageFile = "Bluetooth.png"; //NON-NLS
|
||||||
} else if (typeID == ARTIFACT_TYPE.TSK_GPS_BOOKMARK.getTypeID()) {
|
} else if (typeID == ARTIFACT_TYPE.TSK_GPS_BOOKMARK.getTypeID()) {
|
||||||
imageFile = "gpsfav.png"; //NON-NLS
|
imageFile = "gpsfav.png"; //NON-NLS
|
||||||
} else if (typeID == ARTIFACT_TYPE.TSK_GPS_LAST_KNOWN_LOCATION.getTypeID()) {
|
} else if (typeID == ARTIFACT_TYPE.TSK_GPS_LAST_KNOWN_LOCATION.getTypeID()) {
|
||||||
|
@ -63,7 +63,7 @@ public final class IngestJob {
|
|||||||
return displayName;
|
return displayName;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Ingest job mode.
|
* Ingest job mode.
|
||||||
*/
|
*/
|
||||||
@ -71,7 +71,7 @@ public final class IngestJob {
|
|||||||
BATCH,
|
BATCH,
|
||||||
STREAMING
|
STREAMING
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final Logger logger = Logger.getLogger(IngestJob.class.getName());
|
private static final Logger logger = Logger.getLogger(IngestJob.class.getName());
|
||||||
private final static AtomicLong nextId = new AtomicLong(0L);
|
private final static AtomicLong nextId = new AtomicLong(0L);
|
||||||
private final long id;
|
private final long id;
|
||||||
@ -113,12 +113,12 @@ public final class IngestJob {
|
|||||||
this(Arrays.asList(dataSource), settings);
|
this(Arrays.asList(dataSource), settings);
|
||||||
this.files.addAll(files);
|
this.files.addAll(files);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructs an ingest job that analyzes one data source, possibly using
|
* Constructs an ingest job that analyzes one data source, possibly using an
|
||||||
* an ingest stream.
|
* ingest stream.
|
||||||
*
|
*
|
||||||
* @param settings The ingest job settings.
|
* @param settings The ingest job settings.
|
||||||
*/
|
*/
|
||||||
IngestJob(DataSource dataSource, Mode ingestMode, IngestJobSettings settings) {
|
IngestJob(DataSource dataSource, Mode ingestMode, IngestJobSettings settings) {
|
||||||
this.id = IngestJob.nextId.getAndIncrement();
|
this.id = IngestJob.nextId.getAndIncrement();
|
||||||
@ -149,10 +149,10 @@ public final class IngestJob {
|
|||||||
boolean hasIngestPipeline() {
|
boolean hasIngestPipeline() {
|
||||||
return (!settings.getEnabledIngestModuleTemplates().isEmpty());
|
return (!settings.getEnabledIngestModuleTemplates().isEmpty());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Add a set of files (by object ID) to be ingested.
|
* Add a set of files (by object ID) to be ingested.
|
||||||
*
|
*
|
||||||
* @param fileObjIds the list of file IDs
|
* @param fileObjIds the list of file IDs
|
||||||
*/
|
*/
|
||||||
void addStreamingIngestFiles(List<Long> fileObjIds) {
|
void addStreamingIngestFiles(List<Long> fileObjIds) {
|
||||||
@ -164,7 +164,7 @@ public final class IngestJob {
|
|||||||
IngestJobPipeline streamingIngestPipeline = ingestJobPipelines.values().iterator().next();
|
IngestJobPipeline streamingIngestPipeline = ingestJobPipelines.values().iterator().next();
|
||||||
streamingIngestPipeline.addStreamingIngestFiles(fileObjIds);
|
streamingIngestPipeline.addStreamingIngestFiles(fileObjIds);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Start data source processing for streaming ingest.
|
* Start data source processing for streaming ingest.
|
||||||
*/
|
*/
|
||||||
@ -185,7 +185,7 @@ public final class IngestJob {
|
|||||||
* @return A collection of ingest module start up errors, empty on success.
|
* @return A collection of ingest module start up errors, empty on success.
|
||||||
*/
|
*/
|
||||||
List<IngestModuleError> start() {
|
List<IngestModuleError> start() {
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Set up the pipeline(s)
|
* Set up the pipeline(s)
|
||||||
*/
|
*/
|
||||||
@ -199,11 +199,11 @@ public final class IngestJob {
|
|||||||
this.ingestJobPipelines.put(ingestJobPipeline.getId(), ingestJobPipeline);
|
this.ingestJobPipelines.put(ingestJobPipeline.getId(), ingestJobPipeline);
|
||||||
}
|
}
|
||||||
incompleteJobsCount.set(ingestJobPipelines.size());
|
incompleteJobsCount.set(ingestJobPipelines.size());
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Try to start each data source ingest job. Note that there is an
|
* Try to start each data source ingest job. Note that there is an
|
||||||
* assumption here that if there is going to be a module
|
* assumption here that if there is going to be a module startup
|
||||||
* startup failure, it will be for the first ingest job pipeline.
|
* failure, it will be for the first ingest job pipeline.
|
||||||
*
|
*
|
||||||
* TODO (RC): Consider separating module start up from pipeline startup
|
* TODO (RC): Consider separating module start up from pipeline startup
|
||||||
* so that no processing is done if this assumption is false.
|
* so that no processing is done if this assumption is false.
|
||||||
@ -229,14 +229,14 @@ public final class IngestJob {
|
|||||||
|
|
||||||
return errors;
|
return errors;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the ingest mode for this job (batch or streaming).
|
* Get the ingest mode for this job (batch or streaming).
|
||||||
*
|
*
|
||||||
* @return the ingest mode.
|
* @return the ingest mode.
|
||||||
*/
|
*/
|
||||||
Mode getIngestMode() {
|
Mode getIngestMode() {
|
||||||
return ingestMode;
|
return ingestMode;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -251,8 +251,8 @@ public final class IngestJob {
|
|||||||
/**
|
/**
|
||||||
* Gets a snapshot of the progress of this ingest job.
|
* Gets a snapshot of the progress of this ingest job.
|
||||||
*
|
*
|
||||||
* @param getIngestTasksSnapshot
|
* @param getIngestTasksSnapshot
|
||||||
*
|
*
|
||||||
* @return The snapshot.
|
* @return The snapshot.
|
||||||
*/
|
*/
|
||||||
public ProgressSnapshot getSnapshot(boolean getIngestTasksSnapshot) {
|
public ProgressSnapshot getSnapshot(boolean getIngestTasksSnapshot) {
|
||||||
@ -508,8 +508,9 @@ public final class IngestJob {
|
|||||||
* used to get basic information about the module and to request
|
* used to get basic information about the module and to request
|
||||||
* cancellation of the module.
|
* cancellation of the module.
|
||||||
*
|
*
|
||||||
* @param ingestJobPipeline The ingestJobPipeline that owns the data source level ingest module.
|
* @param ingestJobPipeline The ingestJobPipeline that owns the data
|
||||||
* @param module The data source level ingest module.
|
* source level ingest module.
|
||||||
|
* @param module The data source level ingest module.
|
||||||
*/
|
*/
|
||||||
private DataSourceIngestModuleHandle(IngestJobPipeline ingestJobPipeline, DataSourceIngestPipeline.DataSourcePipelineModule module) {
|
private DataSourceIngestModuleHandle(IngestJobPipeline ingestJobPipeline, DataSourceIngestPipeline.DataSourcePipelineModule module) {
|
||||||
this.ingestJobPipeline = ingestJobPipeline;
|
this.ingestJobPipeline = ingestJobPipeline;
|
||||||
|
@ -4,7 +4,7 @@ AddEditCategoryDialog.categoryLabel.text=Category:
|
|||||||
AddEditCategoryDialog.domainSuffixLabel.text=Domain Suffix:
|
AddEditCategoryDialog.domainSuffixLabel.text=Domain Suffix:
|
||||||
AddEditCategoryDialog.saveButton.text=Save
|
AddEditCategoryDialog.saveButton.text=Save
|
||||||
AddEditCategoryDialog.cancelButton.text=Cancel
|
AddEditCategoryDialog.cancelButton.text=Cancel
|
||||||
WebCategoriesOptionsPanel.panelDescription.text=This module allows you to classify web sites based on domain names.
|
WebCategoriesOptionsPanel.panelDescription.text=<html>This feature allows the Recent Activity ingest module to perform custom categorization of web sites based on domain suffixes.</html>
|
||||||
WebCategoriesOptionsPanel.categoriesTitle.text=Categories:
|
WebCategoriesOptionsPanel.categoriesTitle.text=Categories:
|
||||||
WebCategoriesOptionsPanel.newEntryButton.text=New Entry
|
WebCategoriesOptionsPanel.newEntryButton.text=New Entry
|
||||||
WebCategoriesOptionsPanel.editEntryButton.text=Edit Entry
|
WebCategoriesOptionsPanel.editEntryButton.text=Edit Entry
|
||||||
|
@ -27,7 +27,7 @@ AddEditCategoryDialog.categoryLabel.text=Category:
|
|||||||
AddEditCategoryDialog.domainSuffixLabel.text=Domain Suffix:
|
AddEditCategoryDialog.domainSuffixLabel.text=Domain Suffix:
|
||||||
AddEditCategoryDialog.saveButton.text=Save
|
AddEditCategoryDialog.saveButton.text=Save
|
||||||
AddEditCategoryDialog.cancelButton.text=Cancel
|
AddEditCategoryDialog.cancelButton.text=Cancel
|
||||||
WebCategoriesOptionsPanel.panelDescription.text=This module allows you to classify web sites based on domain names.
|
WebCategoriesOptionsPanel.panelDescription.text=<html>This feature allows the Recent Activity ingest module to perform custom categorization of web sites based on domain suffixes.</html>
|
||||||
WebCategoriesOptionsPanel.categoriesTitle.text=Categories:
|
WebCategoriesOptionsPanel.categoriesTitle.text=Categories:
|
||||||
WebCategoriesOptionsPanel.newEntryButton.text=New Entry
|
WebCategoriesOptionsPanel.newEntryButton.text=New Entry
|
||||||
WebCategoriesOptionsPanel.editEntryButton.text=Edit Entry
|
WebCategoriesOptionsPanel.editEntryButton.text=Edit Entry
|
||||||
|
@ -197,8 +197,8 @@
|
|||||||
</Container>
|
</Container>
|
||||||
<Component class="javax.swing.JLabel" name="ingestRunningWarning">
|
<Component class="javax.swing.JLabel" name="ingestRunningWarning">
|
||||||
<Properties>
|
<Properties>
|
||||||
<Property name="foreground" type="java.awt.Color" editor="org.netbeans.modules.form.RADConnectionPropertyEditor">
|
<Property name="icon" type="javax.swing.Icon" editor="org.netbeans.modules.form.editors2.IconEditor">
|
||||||
<Connection code="java.awt.Color.RED" type="code"/>
|
<Image iconType="3" name="/org/sleuthkit/autopsy/images/warning16.png"/>
|
||||||
</Property>
|
</Property>
|
||||||
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
|
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
|
||||||
<ResourceString bundle="org/sleuthkit/autopsy/url/analytics/domaincategorization/Bundle.properties" key="WebCategoriesOptionsPanel.ingestRunningWarning.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, "{key}")"/>
|
<ResourceString bundle="org/sleuthkit/autopsy/url/analytics/domaincategorization/Bundle.properties" key="WebCategoriesOptionsPanel.ingestRunningWarning.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, "{key}")"/>
|
||||||
|
@ -373,7 +373,7 @@ public class WebCategoriesOptionsPanel extends IngestModuleGlobalSettingsPanel i
|
|||||||
gridBagConstraints.weightx = 1.0;
|
gridBagConstraints.weightx = 1.0;
|
||||||
add(bottomStrut, gridBagConstraints);
|
add(bottomStrut, gridBagConstraints);
|
||||||
|
|
||||||
ingestRunningWarning.setForeground(java.awt.Color.RED);
|
ingestRunningWarning.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/images/warning16.png"))); // NOI18N
|
||||||
ingestRunningWarning.setText(org.openide.util.NbBundle.getMessage(WebCategoriesOptionsPanel.class, "WebCategoriesOptionsPanel.ingestRunningWarning.text")); // NOI18N
|
ingestRunningWarning.setText(org.openide.util.NbBundle.getMessage(WebCategoriesOptionsPanel.class, "WebCategoriesOptionsPanel.ingestRunningWarning.text")); // NOI18N
|
||||||
gridBagConstraints = new java.awt.GridBagConstraints();
|
gridBagConstraints = new java.awt.GridBagConstraints();
|
||||||
gridBagConstraints.gridx = 0;
|
gridBagConstraints.gridx = 0;
|
||||||
|
@ -32,6 +32,7 @@ import java.util.Set;
|
|||||||
import java.util.regex.Matcher;
|
import java.util.regex.Matcher;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
import java.util.stream.Stream;
|
||||||
import org.apache.commons.lang.StringUtils;
|
import org.apache.commons.lang.StringUtils;
|
||||||
import org.openide.util.Lookup;
|
import org.openide.util.Lookup;
|
||||||
import org.openide.util.NbBundle.Messages;
|
import org.openide.util.NbBundle.Messages;
|
||||||
@ -86,6 +87,17 @@ class DomainCategoryRunner extends Extract {
|
|||||||
// NOTE: if CustomWebCategorizer ever changes name, this will need to be changed as well.
|
// NOTE: if CustomWebCategorizer ever changes name, this will need to be changed as well.
|
||||||
private static final String CUSTOM_CATEGORIZER_PATH = "org.sleuthkit.autopsy.url.analytics.domaincategorization.CustomWebCategorizer";
|
private static final String CUSTOM_CATEGORIZER_PATH = "org.sleuthkit.autopsy.url.analytics.domaincategorization.CustomWebCategorizer";
|
||||||
|
|
||||||
|
// the artifact types to be searched for domain categories
|
||||||
|
private static final List<BlackboardArtifact.Type> DOMAIN_CATEGORIZATION_TYPES = Stream.of(
|
||||||
|
BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK,
|
||||||
|
BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_CACHE,
|
||||||
|
BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE,
|
||||||
|
BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_DOWNLOAD,
|
||||||
|
BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY,
|
||||||
|
BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_SEARCH_QUERY)
|
||||||
|
.map(BlackboardArtifact.Type::new)
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get seconds from epoch from the mapping for the attribute type id.
|
* Get seconds from epoch from the mapping for the attribute type id.
|
||||||
*
|
*
|
||||||
@ -168,7 +180,7 @@ class DomainCategoryRunner extends Extract {
|
|||||||
* Main constructor.
|
* Main constructor.
|
||||||
*/
|
*/
|
||||||
DomainCategoryRunner() {
|
DomainCategoryRunner() {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -355,7 +367,7 @@ class DomainCategoryRunner extends Extract {
|
|||||||
Set<String> hostSuffixesSeen = new HashSet<>();
|
Set<String> hostSuffixesSeen = new HashSet<>();
|
||||||
try {
|
try {
|
||||||
List<BlackboardArtifact> listArtifacts = currentCase.getSleuthkitCase().getBlackboard().getArtifacts(
|
List<BlackboardArtifact> listArtifacts = currentCase.getSleuthkitCase().getBlackboard().getArtifacts(
|
||||||
Arrays.asList(new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_WEB_HISTORY)),
|
DOMAIN_CATEGORIZATION_TYPES,
|
||||||
Arrays.asList(dataSource.getId()));
|
Arrays.asList(dataSource.getId()));
|
||||||
|
|
||||||
logger.log(Level.INFO, "Processing {0} blackboard artifacts.", listArtifacts.size()); //NON-NLS
|
logger.log(Level.INFO, "Processing {0} blackboard artifacts.", listArtifacts.size()); //NON-NLS
|
||||||
@ -364,7 +376,8 @@ class DomainCategoryRunner extends Extract {
|
|||||||
for (BlackboardArtifact artifact : listArtifacts) {
|
for (BlackboardArtifact artifact : listArtifacts) {
|
||||||
// make sure we haven't cancelled
|
// make sure we haven't cancelled
|
||||||
if (context.dataSourceIngestIsCancelled()) {
|
if (context.dataSourceIngestIsCancelled()) {
|
||||||
break; //User cancelled the process.
|
//User cancelled the process.
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
// get the pertinent details for this artifact.
|
// get the pertinent details for this artifact.
|
||||||
|
@ -445,6 +445,7 @@ def normalize_db_entry(line, files_table, vs_parts_table, vs_info_table, fs_info
|
|||||||
os_account_index = line.find('INSERT INTO "tsk_os_accounts"') > -1 or line.find('INSERT INTO tsk_os_accounts') > -1
|
os_account_index = line.find('INSERT INTO "tsk_os_accounts"') > -1 or line.find('INSERT INTO tsk_os_accounts') > -1
|
||||||
os_account_attr_index = line.find('INSERT INTO "tsk_os_account_attributes"') > -1 or line.find('INSERT INTO tsk_os_account_attributes') > -1
|
os_account_attr_index = line.find('INSERT INTO "tsk_os_account_attributes"') > -1 or line.find('INSERT INTO tsk_os_account_attributes') > -1
|
||||||
os_account_instances_index = line.find('INSERT INTO "tsk_os_account_instances"') > -1 or line.find('INSERT INTO tsk_os_account_instances') > -1
|
os_account_instances_index = line.find('INSERT INTO "tsk_os_account_instances"') > -1 or line.find('INSERT INTO tsk_os_account_instances') > -1
|
||||||
|
data_artifacts_index = line.find('INSERT INTO "tsk_data_artifacts"') > -1 or line.find('INSERT INTO tsk_data_artifacts') > -1
|
||||||
|
|
||||||
parens = line[line.find('(') + 1 : line.rfind(')')]
|
parens = line[line.find('(') + 1 : line.rfind(')')]
|
||||||
no_space_parens = parens.replace(" ", "")
|
no_space_parens = parens.replace(" ", "")
|
||||||
@ -670,6 +671,19 @@ def normalize_db_entry(line, files_table, vs_parts_table, vs_info_table, fs_info
|
|||||||
fields_list[1] = accounts_table[os_account_id]
|
fields_list[1] = accounts_table[os_account_id]
|
||||||
newLine = ('INSERT INTO "tsk_os_account_instances" VALUES(' + ','.join(fields_list[1:]) + ');') # remove id
|
newLine = ('INSERT INTO "tsk_os_account_instances" VALUES(' + ','.join(fields_list[1:]) + ');') # remove id
|
||||||
return newLine
|
return newLine
|
||||||
|
elif data_artifacts_index:
|
||||||
|
art_obj_id = int(fields_list[0])
|
||||||
|
if art_obj_id in files_table.keys():
|
||||||
|
fields_list[0] = files_table[art_obj_id]
|
||||||
|
else:
|
||||||
|
fields_list[0] = 'Artifact Object ID Omitted'
|
||||||
|
account_obj_id = int(fields_list[1])
|
||||||
|
if account_obj_id in files_table.keys():
|
||||||
|
fields_list[1] = files_table[account_obj_id]
|
||||||
|
else:
|
||||||
|
fields_list[1] = 'Account Object ID Omitted'
|
||||||
|
newLine = ('INSERT INTO "tsk_data_artifacts" VALUES(' + ','.join(fields_list[:]) + ');') # remove ids
|
||||||
|
return newLine
|
||||||
else:
|
else:
|
||||||
return line
|
return line
|
||||||
|
|
||||||
@ -798,7 +812,7 @@ def build_id_accounts_table(db_cursor, isPostgreSQL):
|
|||||||
"""
|
"""
|
||||||
# for each row in the db, take the object id and account SID then creates a tuple in the dictionary
|
# for each row in the db, take the object id and account SID then creates a tuple in the dictionary
|
||||||
# with the object id as the key and the OS Account's SID as the value
|
# with the object id as the key and the OS Account's SID as the value
|
||||||
mapping = dict([(row[0], row[1]) for row in sql_select_execute(db_cursor, isPostgreSQL, "SELECT os_account_obj_id, unique_id FROM tsk_os_accounts")])
|
mapping = dict([(row[0], row[1]) for row in sql_select_execute(db_cursor, isPostgreSQL, "SELECT os_account_obj_id, addr FROM tsk_os_accounts")])
|
||||||
return mapping
|
return mapping
|
||||||
|
|
||||||
def build_id_obj_path_table(files_table, objects_table, artifacts_table, reports_table, images_table, accounts_table):
|
def build_id_obj_path_table(files_table, objects_table, artifacts_table, reports_table, images_table, accounts_table):
|
||||||
@ -810,7 +824,7 @@ def build_id_obj_path_table(files_table, objects_table, artifacts_table, reports
|
|||||||
artifacts_table: obj_id, artifact_type_name
|
artifacts_table: obj_id, artifact_type_name
|
||||||
reports_table: obj_id, path
|
reports_table: obj_id, path
|
||||||
images_table: obj_id, name
|
images_table: obj_id, name
|
||||||
accounts_table: obj_id, unique_id
|
accounts_table: obj_id, addr
|
||||||
"""
|
"""
|
||||||
# make a copy of files_table and update it with new data from artifacts_table and reports_table
|
# make a copy of files_table and update it with new data from artifacts_table and reports_table
|
||||||
mapping = files_table.copy()
|
mapping = files_table.copy()
|
||||||
@ -830,7 +844,7 @@ def build_id_obj_path_table(files_table, objects_table, artifacts_table, reports
|
|||||||
elif par_obj_id in images_table.keys():
|
elif par_obj_id in images_table.keys():
|
||||||
path = images_table[par_obj_id]
|
path = images_table[par_obj_id]
|
||||||
mapping[k] = path + "/" + artifacts_table[k]
|
mapping[k] = path + "/" + artifacts_table[k]
|
||||||
elif k in accounts_table.keys(): # For an OS Account object ID we use its unique_id field which is the account SID
|
elif k in accounts_table.keys(): # For an OS Account object ID we use its addr field which is the account SID
|
||||||
mapping[k] = accounts_table[k]
|
mapping[k] = accounts_table[k]
|
||||||
elif v[0] not in mapping.keys():
|
elif v[0] not in mapping.keys():
|
||||||
if v[0] in artifacts_table.keys():
|
if v[0] in artifacts_table.keys():
|
||||||
|
Loading…
x
Reference in New Issue
Block a user