mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-17 10:17:41 +00:00
Merge branch 'develop' of https://github.com/sleuthkit/autopsy into 2882-AllowResetWhileCaseOpen
This commit is contained in:
commit
309075fa57
@ -70,7 +70,7 @@ public final class IconsUtil {
|
||||
} else if (typeID == ARTIFACT_TYPE.TSK_SPEED_DIAL_ENTRY.getTypeID()) {
|
||||
imageFile = "speeddialentry.png"; //NON-NLS
|
||||
} else if (typeID == ARTIFACT_TYPE.TSK_BLUETOOTH_PAIRING.getTypeID()) {
|
||||
imageFile = "bluetooth.png"; //NON-NLS
|
||||
imageFile = "Bluetooth.png"; //NON-NLS
|
||||
} else if (typeID == ARTIFACT_TYPE.TSK_GPS_BOOKMARK.getTypeID()) {
|
||||
imageFile = "gpsfav.png"; //NON-NLS
|
||||
} else if (typeID == ARTIFACT_TYPE.TSK_GPS_LAST_KNOWN_LOCATION.getTypeID()) {
|
||||
|
@ -115,8 +115,8 @@ public final class IngestJob {
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs an ingest job that analyzes one data source, possibly using
|
||||
* an ingest stream.
|
||||
* Constructs an ingest job that analyzes one data source, possibly using an
|
||||
* ingest stream.
|
||||
*
|
||||
* @param settings The ingest job settings.
|
||||
*/
|
||||
@ -202,8 +202,8 @@ public final class IngestJob {
|
||||
|
||||
/*
|
||||
* Try to start each data source ingest job. Note that there is an
|
||||
* assumption here that if there is going to be a module
|
||||
* startup failure, it will be for the first ingest job pipeline.
|
||||
* assumption here that if there is going to be a module startup
|
||||
* failure, it will be for the first ingest job pipeline.
|
||||
*
|
||||
* TODO (RC): Consider separating module start up from pipeline startup
|
||||
* so that no processing is done if this assumption is false.
|
||||
@ -508,7 +508,8 @@ public final class IngestJob {
|
||||
* used to get basic information about the module and to request
|
||||
* cancellation of the module.
|
||||
*
|
||||
* @param ingestJobPipeline The ingestJobPipeline that owns the data source level ingest module.
|
||||
* @param ingestJobPipeline The ingestJobPipeline that owns the data
|
||||
* source level ingest module.
|
||||
* @param module The data source level ingest module.
|
||||
*/
|
||||
private DataSourceIngestModuleHandle(IngestJobPipeline ingestJobPipeline, DataSourceIngestPipeline.DataSourcePipelineModule module) {
|
||||
|
@ -4,7 +4,7 @@ AddEditCategoryDialog.categoryLabel.text=Category:
|
||||
AddEditCategoryDialog.domainSuffixLabel.text=Domain Suffix:
|
||||
AddEditCategoryDialog.saveButton.text=Save
|
||||
AddEditCategoryDialog.cancelButton.text=Cancel
|
||||
WebCategoriesOptionsPanel.panelDescription.text=This module allows you to classify web sites based on domain names.
|
||||
WebCategoriesOptionsPanel.panelDescription.text=<html>This feature allows the Recent Activity ingest module to perform custom categorization of web sites based on domain suffixes.</html>
|
||||
WebCategoriesOptionsPanel.categoriesTitle.text=Categories:
|
||||
WebCategoriesOptionsPanel.newEntryButton.text=New Entry
|
||||
WebCategoriesOptionsPanel.editEntryButton.text=Edit Entry
|
||||
|
@ -27,7 +27,7 @@ AddEditCategoryDialog.categoryLabel.text=Category:
|
||||
AddEditCategoryDialog.domainSuffixLabel.text=Domain Suffix:
|
||||
AddEditCategoryDialog.saveButton.text=Save
|
||||
AddEditCategoryDialog.cancelButton.text=Cancel
|
||||
WebCategoriesOptionsPanel.panelDescription.text=This module allows you to classify web sites based on domain names.
|
||||
WebCategoriesOptionsPanel.panelDescription.text=<html>This feature allows the Recent Activity ingest module to perform custom categorization of web sites based on domain suffixes.</html>
|
||||
WebCategoriesOptionsPanel.categoriesTitle.text=Categories:
|
||||
WebCategoriesOptionsPanel.newEntryButton.text=New Entry
|
||||
WebCategoriesOptionsPanel.editEntryButton.text=Edit Entry
|
||||
|
@ -197,8 +197,8 @@
|
||||
</Container>
|
||||
<Component class="javax.swing.JLabel" name="ingestRunningWarning">
|
||||
<Properties>
|
||||
<Property name="foreground" type="java.awt.Color" editor="org.netbeans.modules.form.RADConnectionPropertyEditor">
|
||||
<Connection code="java.awt.Color.RED" type="code"/>
|
||||
<Property name="icon" type="javax.swing.Icon" editor="org.netbeans.modules.form.editors2.IconEditor">
|
||||
<Image iconType="3" name="/org/sleuthkit/autopsy/images/warning16.png"/>
|
||||
</Property>
|
||||
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
|
||||
<ResourceString bundle="org/sleuthkit/autopsy/url/analytics/domaincategorization/Bundle.properties" key="WebCategoriesOptionsPanel.ingestRunningWarning.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, "{key}")"/>
|
||||
|
@ -373,7 +373,7 @@ public class WebCategoriesOptionsPanel extends IngestModuleGlobalSettingsPanel i
|
||||
gridBagConstraints.weightx = 1.0;
|
||||
add(bottomStrut, gridBagConstraints);
|
||||
|
||||
ingestRunningWarning.setForeground(java.awt.Color.RED);
|
||||
ingestRunningWarning.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/images/warning16.png"))); // NOI18N
|
||||
ingestRunningWarning.setText(org.openide.util.NbBundle.getMessage(WebCategoriesOptionsPanel.class, "WebCategoriesOptionsPanel.ingestRunningWarning.text")); // NOI18N
|
||||
gridBagConstraints = new java.awt.GridBagConstraints();
|
||||
gridBagConstraints.gridx = 0;
|
||||
|
@ -32,6 +32,7 @@ import java.util.Set;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.openide.util.Lookup;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
@ -86,6 +87,17 @@ class DomainCategoryRunner extends Extract {
|
||||
// NOTE: if CustomWebCategorizer ever changes name, this will need to be changed as well.
|
||||
private static final String CUSTOM_CATEGORIZER_PATH = "org.sleuthkit.autopsy.url.analytics.domaincategorization.CustomWebCategorizer";
|
||||
|
||||
// the artifact types to be searched for domain categories
|
||||
private static final List<BlackboardArtifact.Type> DOMAIN_CATEGORIZATION_TYPES = Stream.of(
|
||||
BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK,
|
||||
BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_CACHE,
|
||||
BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE,
|
||||
BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_DOWNLOAD,
|
||||
BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY,
|
||||
BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_SEARCH_QUERY)
|
||||
.map(BlackboardArtifact.Type::new)
|
||||
.collect(Collectors.toList());
|
||||
|
||||
/**
|
||||
* Get seconds from epoch from the mapping for the attribute type id.
|
||||
*
|
||||
@ -355,7 +367,7 @@ class DomainCategoryRunner extends Extract {
|
||||
Set<String> hostSuffixesSeen = new HashSet<>();
|
||||
try {
|
||||
List<BlackboardArtifact> listArtifacts = currentCase.getSleuthkitCase().getBlackboard().getArtifacts(
|
||||
Arrays.asList(new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_WEB_HISTORY)),
|
||||
DOMAIN_CATEGORIZATION_TYPES,
|
||||
Arrays.asList(dataSource.getId()));
|
||||
|
||||
logger.log(Level.INFO, "Processing {0} blackboard artifacts.", listArtifacts.size()); //NON-NLS
|
||||
@ -364,7 +376,8 @@ class DomainCategoryRunner extends Extract {
|
||||
for (BlackboardArtifact artifact : listArtifacts) {
|
||||
// make sure we haven't cancelled
|
||||
if (context.dataSourceIngestIsCancelled()) {
|
||||
break; //User cancelled the process.
|
||||
//User cancelled the process.
|
||||
break;
|
||||
}
|
||||
|
||||
// get the pertinent details for this artifact.
|
||||
|
@ -445,6 +445,7 @@ def normalize_db_entry(line, files_table, vs_parts_table, vs_info_table, fs_info
|
||||
os_account_index = line.find('INSERT INTO "tsk_os_accounts"') > -1 or line.find('INSERT INTO tsk_os_accounts') > -1
|
||||
os_account_attr_index = line.find('INSERT INTO "tsk_os_account_attributes"') > -1 or line.find('INSERT INTO tsk_os_account_attributes') > -1
|
||||
os_account_instances_index = line.find('INSERT INTO "tsk_os_account_instances"') > -1 or line.find('INSERT INTO tsk_os_account_instances') > -1
|
||||
data_artifacts_index = line.find('INSERT INTO "tsk_data_artifacts"') > -1 or line.find('INSERT INTO tsk_data_artifacts') > -1
|
||||
|
||||
parens = line[line.find('(') + 1 : line.rfind(')')]
|
||||
no_space_parens = parens.replace(" ", "")
|
||||
@ -670,6 +671,19 @@ def normalize_db_entry(line, files_table, vs_parts_table, vs_info_table, fs_info
|
||||
fields_list[1] = accounts_table[os_account_id]
|
||||
newLine = ('INSERT INTO "tsk_os_account_instances" VALUES(' + ','.join(fields_list[1:]) + ');') # remove id
|
||||
return newLine
|
||||
elif data_artifacts_index:
|
||||
art_obj_id = int(fields_list[0])
|
||||
if art_obj_id in files_table.keys():
|
||||
fields_list[0] = files_table[art_obj_id]
|
||||
else:
|
||||
fields_list[0] = 'Artifact Object ID Omitted'
|
||||
account_obj_id = int(fields_list[1])
|
||||
if account_obj_id in files_table.keys():
|
||||
fields_list[1] = files_table[account_obj_id]
|
||||
else:
|
||||
fields_list[1] = 'Account Object ID Omitted'
|
||||
newLine = ('INSERT INTO "tsk_data_artifacts" VALUES(' + ','.join(fields_list[:]) + ');') # remove ids
|
||||
return newLine
|
||||
else:
|
||||
return line
|
||||
|
||||
@ -798,7 +812,7 @@ def build_id_accounts_table(db_cursor, isPostgreSQL):
|
||||
"""
|
||||
# for each row in the db, take the object id and account SID then creates a tuple in the dictionary
|
||||
# with the object id as the key and the OS Account's SID as the value
|
||||
mapping = dict([(row[0], row[1]) for row in sql_select_execute(db_cursor, isPostgreSQL, "SELECT os_account_obj_id, unique_id FROM tsk_os_accounts")])
|
||||
mapping = dict([(row[0], row[1]) for row in sql_select_execute(db_cursor, isPostgreSQL, "SELECT os_account_obj_id, addr FROM tsk_os_accounts")])
|
||||
return mapping
|
||||
|
||||
def build_id_obj_path_table(files_table, objects_table, artifacts_table, reports_table, images_table, accounts_table):
|
||||
@ -810,7 +824,7 @@ def build_id_obj_path_table(files_table, objects_table, artifacts_table, reports
|
||||
artifacts_table: obj_id, artifact_type_name
|
||||
reports_table: obj_id, path
|
||||
images_table: obj_id, name
|
||||
accounts_table: obj_id, unique_id
|
||||
accounts_table: obj_id, addr
|
||||
"""
|
||||
# make a copy of files_table and update it with new data from artifacts_table and reports_table
|
||||
mapping = files_table.copy()
|
||||
@ -830,7 +844,7 @@ def build_id_obj_path_table(files_table, objects_table, artifacts_table, reports
|
||||
elif par_obj_id in images_table.keys():
|
||||
path = images_table[par_obj_id]
|
||||
mapping[k] = path + "/" + artifacts_table[k]
|
||||
elif k in accounts_table.keys(): # For an OS Account object ID we use its unique_id field which is the account SID
|
||||
elif k in accounts_table.keys(): # For an OS Account object ID we use its addr field which is the account SID
|
||||
mapping[k] = accounts_table[k]
|
||||
elif v[0] not in mapping.keys():
|
||||
if v[0] in artifacts_table.keys():
|
||||
|
Loading…
x
Reference in New Issue
Block a user