diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/utils/IconsUtil.java b/Core/src/org/sleuthkit/autopsy/datamodel/utils/IconsUtil.java index 78cee32000..de88e41e04 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/utils/IconsUtil.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/utils/IconsUtil.java @@ -70,7 +70,7 @@ public final class IconsUtil { } else if (typeID == ARTIFACT_TYPE.TSK_SPEED_DIAL_ENTRY.getTypeID()) { imageFile = "speeddialentry.png"; //NON-NLS } else if (typeID == ARTIFACT_TYPE.TSK_BLUETOOTH_PAIRING.getTypeID()) { - imageFile = "bluetooth.png"; //NON-NLS + imageFile = "Bluetooth.png"; //NON-NLS } else if (typeID == ARTIFACT_TYPE.TSK_GPS_BOOKMARK.getTypeID()) { imageFile = "gpsfav.png"; //NON-NLS } else if (typeID == ARTIFACT_TYPE.TSK_GPS_LAST_KNOWN_LOCATION.getTypeID()) { diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java index decccd4e84..6f6dc80b87 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java @@ -63,7 +63,7 @@ public final class IngestJob { return displayName; } } - + /** * Ingest job mode. */ @@ -71,7 +71,7 @@ public final class IngestJob { BATCH, STREAMING } - + private static final Logger logger = Logger.getLogger(IngestJob.class.getName()); private final static AtomicLong nextId = new AtomicLong(0L); private final long id; @@ -113,12 +113,12 @@ public final class IngestJob { this(Arrays.asList(dataSource), settings); this.files.addAll(files); } - + /** - * Constructs an ingest job that analyzes one data source, possibly using - * an ingest stream. + * Constructs an ingest job that analyzes one data source, possibly using an + * ingest stream. * - * @param settings The ingest job settings. + * @param settings The ingest job settings. */ IngestJob(DataSource dataSource, Mode ingestMode, IngestJobSettings settings) { this.id = IngestJob.nextId.getAndIncrement(); @@ -149,10 +149,10 @@ public final class IngestJob { boolean hasIngestPipeline() { return (!settings.getEnabledIngestModuleTemplates().isEmpty()); } - + /** * Add a set of files (by object ID) to be ingested. - * + * * @param fileObjIds the list of file IDs */ void addStreamingIngestFiles(List fileObjIds) { @@ -164,7 +164,7 @@ public final class IngestJob { IngestJobPipeline streamingIngestPipeline = ingestJobPipelines.values().iterator().next(); streamingIngestPipeline.addStreamingIngestFiles(fileObjIds); } - + /** * Start data source processing for streaming ingest. */ @@ -185,7 +185,7 @@ public final class IngestJob { * @return A collection of ingest module start up errors, empty on success. */ List start() { - + /* * Set up the pipeline(s) */ @@ -199,11 +199,11 @@ public final class IngestJob { this.ingestJobPipelines.put(ingestJobPipeline.getId(), ingestJobPipeline); } incompleteJobsCount.set(ingestJobPipelines.size()); - + /* * Try to start each data source ingest job. Note that there is an - * assumption here that if there is going to be a module - * startup failure, it will be for the first ingest job pipeline. + * assumption here that if there is going to be a module startup + * failure, it will be for the first ingest job pipeline. * * TODO (RC): Consider separating module start up from pipeline startup * so that no processing is done if this assumption is false. @@ -229,14 +229,14 @@ public final class IngestJob { return errors; } - + /** * Get the ingest mode for this job (batch or streaming). - * + * * @return the ingest mode. */ Mode getIngestMode() { - return ingestMode; + return ingestMode; } /** @@ -251,8 +251,8 @@ public final class IngestJob { /** * Gets a snapshot of the progress of this ingest job. * - * @param getIngestTasksSnapshot - * + * @param getIngestTasksSnapshot + * * @return The snapshot. */ public ProgressSnapshot getSnapshot(boolean getIngestTasksSnapshot) { @@ -508,8 +508,9 @@ public final class IngestJob { * used to get basic information about the module and to request * cancellation of the module. * - * @param ingestJobPipeline The ingestJobPipeline that owns the data source level ingest module. - * @param module The data source level ingest module. + * @param ingestJobPipeline The ingestJobPipeline that owns the data + * source level ingest module. + * @param module The data source level ingest module. */ private DataSourceIngestModuleHandle(IngestJobPipeline ingestJobPipeline, DataSourceIngestPipeline.DataSourcePipelineModule module) { this.ingestJobPipeline = ingestJobPipeline; diff --git a/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/Bundle.properties b/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/Bundle.properties index fae4fa97ec..f4d258315a 100644 --- a/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/Bundle.properties +++ b/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/Bundle.properties @@ -4,7 +4,7 @@ AddEditCategoryDialog.categoryLabel.text=Category: AddEditCategoryDialog.domainSuffixLabel.text=Domain Suffix: AddEditCategoryDialog.saveButton.text=Save AddEditCategoryDialog.cancelButton.text=Cancel -WebCategoriesOptionsPanel.panelDescription.text=This module allows you to classify web sites based on domain names. +WebCategoriesOptionsPanel.panelDescription.text=This feature allows the Recent Activity ingest module to perform custom categorization of web sites based on domain suffixes. WebCategoriesOptionsPanel.categoriesTitle.text=Categories: WebCategoriesOptionsPanel.newEntryButton.text=New Entry WebCategoriesOptionsPanel.editEntryButton.text=Edit Entry diff --git a/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/Bundle.properties-MERGED index 68993c6213..f08c1c2986 100644 --- a/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/Bundle.properties-MERGED @@ -27,7 +27,7 @@ AddEditCategoryDialog.categoryLabel.text=Category: AddEditCategoryDialog.domainSuffixLabel.text=Domain Suffix: AddEditCategoryDialog.saveButton.text=Save AddEditCategoryDialog.cancelButton.text=Cancel -WebCategoriesOptionsPanel.panelDescription.text=This module allows you to classify web sites based on domain names. +WebCategoriesOptionsPanel.panelDescription.text=This feature allows the Recent Activity ingest module to perform custom categorization of web sites based on domain suffixes. WebCategoriesOptionsPanel.categoriesTitle.text=Categories: WebCategoriesOptionsPanel.newEntryButton.text=New Entry WebCategoriesOptionsPanel.editEntryButton.text=Edit Entry diff --git a/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/WebCategoriesOptionsPanel.form b/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/WebCategoriesOptionsPanel.form index 177919921c..00dc510d0d 100644 --- a/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/WebCategoriesOptionsPanel.form +++ b/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/WebCategoriesOptionsPanel.form @@ -197,8 +197,8 @@ - - + + diff --git a/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/WebCategoriesOptionsPanel.java b/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/WebCategoriesOptionsPanel.java index 3f7ca1297d..7db16e9d19 100644 --- a/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/WebCategoriesOptionsPanel.java +++ b/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/WebCategoriesOptionsPanel.java @@ -373,7 +373,7 @@ public class WebCategoriesOptionsPanel extends IngestModuleGlobalSettingsPanel i gridBagConstraints.weightx = 1.0; add(bottomStrut, gridBagConstraints); - ingestRunningWarning.setForeground(java.awt.Color.RED); + ingestRunningWarning.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/images/warning16.png"))); // NOI18N ingestRunningWarning.setText(org.openide.util.NbBundle.getMessage(WebCategoriesOptionsPanel.class, "WebCategoriesOptionsPanel.ingestRunningWarning.text")); // NOI18N gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/DomainCategoryRunner.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/DomainCategoryRunner.java index f65673b6d0..0102f6e868 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/DomainCategoryRunner.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/DomainCategoryRunner.java @@ -32,6 +32,7 @@ import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; +import java.util.stream.Stream; import org.apache.commons.lang.StringUtils; import org.openide.util.Lookup; import org.openide.util.NbBundle.Messages; @@ -86,6 +87,17 @@ class DomainCategoryRunner extends Extract { // NOTE: if CustomWebCategorizer ever changes name, this will need to be changed as well. private static final String CUSTOM_CATEGORIZER_PATH = "org.sleuthkit.autopsy.url.analytics.domaincategorization.CustomWebCategorizer"; + // the artifact types to be searched for domain categories + private static final List DOMAIN_CATEGORIZATION_TYPES = Stream.of( + BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK, + BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_CACHE, + BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE, + BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_DOWNLOAD, + BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY, + BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_SEARCH_QUERY) + .map(BlackboardArtifact.Type::new) + .collect(Collectors.toList()); + /** * Get seconds from epoch from the mapping for the attribute type id. * @@ -168,7 +180,7 @@ class DomainCategoryRunner extends Extract { * Main constructor. */ DomainCategoryRunner() { - + } /** @@ -355,7 +367,7 @@ class DomainCategoryRunner extends Extract { Set hostSuffixesSeen = new HashSet<>(); try { List listArtifacts = currentCase.getSleuthkitCase().getBlackboard().getArtifacts( - Arrays.asList(new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_WEB_HISTORY)), + DOMAIN_CATEGORIZATION_TYPES, Arrays.asList(dataSource.getId())); logger.log(Level.INFO, "Processing {0} blackboard artifacts.", listArtifacts.size()); //NON-NLS @@ -364,7 +376,8 @@ class DomainCategoryRunner extends Extract { for (BlackboardArtifact artifact : listArtifacts) { // make sure we haven't cancelled if (context.dataSourceIngestIsCancelled()) { - break; //User cancelled the process. + //User cancelled the process. + break; } // get the pertinent details for this artifact. diff --git a/test/script/tskdbdiff.py b/test/script/tskdbdiff.py index 9452b335d9..cec54316d2 100644 --- a/test/script/tskdbdiff.py +++ b/test/script/tskdbdiff.py @@ -445,6 +445,7 @@ def normalize_db_entry(line, files_table, vs_parts_table, vs_info_table, fs_info os_account_index = line.find('INSERT INTO "tsk_os_accounts"') > -1 or line.find('INSERT INTO tsk_os_accounts') > -1 os_account_attr_index = line.find('INSERT INTO "tsk_os_account_attributes"') > -1 or line.find('INSERT INTO tsk_os_account_attributes') > -1 os_account_instances_index = line.find('INSERT INTO "tsk_os_account_instances"') > -1 or line.find('INSERT INTO tsk_os_account_instances') > -1 + data_artifacts_index = line.find('INSERT INTO "tsk_data_artifacts"') > -1 or line.find('INSERT INTO tsk_data_artifacts') > -1 parens = line[line.find('(') + 1 : line.rfind(')')] no_space_parens = parens.replace(" ", "") @@ -670,6 +671,19 @@ def normalize_db_entry(line, files_table, vs_parts_table, vs_info_table, fs_info fields_list[1] = accounts_table[os_account_id] newLine = ('INSERT INTO "tsk_os_account_instances" VALUES(' + ','.join(fields_list[1:]) + ');') # remove id return newLine + elif data_artifacts_index: + art_obj_id = int(fields_list[0]) + if art_obj_id in files_table.keys(): + fields_list[0] = files_table[art_obj_id] + else: + fields_list[0] = 'Artifact Object ID Omitted' + account_obj_id = int(fields_list[1]) + if account_obj_id in files_table.keys(): + fields_list[1] = files_table[account_obj_id] + else: + fields_list[1] = 'Account Object ID Omitted' + newLine = ('INSERT INTO "tsk_data_artifacts" VALUES(' + ','.join(fields_list[:]) + ');') # remove ids + return newLine else: return line @@ -798,7 +812,7 @@ def build_id_accounts_table(db_cursor, isPostgreSQL): """ # for each row in the db, take the object id and account SID then creates a tuple in the dictionary # with the object id as the key and the OS Account's SID as the value - mapping = dict([(row[0], row[1]) for row in sql_select_execute(db_cursor, isPostgreSQL, "SELECT os_account_obj_id, unique_id FROM tsk_os_accounts")]) + mapping = dict([(row[0], row[1]) for row in sql_select_execute(db_cursor, isPostgreSQL, "SELECT os_account_obj_id, addr FROM tsk_os_accounts")]) return mapping def build_id_obj_path_table(files_table, objects_table, artifacts_table, reports_table, images_table, accounts_table): @@ -810,7 +824,7 @@ def build_id_obj_path_table(files_table, objects_table, artifacts_table, reports artifacts_table: obj_id, artifact_type_name reports_table: obj_id, path images_table: obj_id, name - accounts_table: obj_id, unique_id + accounts_table: obj_id, addr """ # make a copy of files_table and update it with new data from artifacts_table and reports_table mapping = files_table.copy() @@ -830,7 +844,7 @@ def build_id_obj_path_table(files_table, objects_table, artifacts_table, reports elif par_obj_id in images_table.keys(): path = images_table[par_obj_id] mapping[k] = path + "/" + artifacts_table[k] - elif k in accounts_table.keys(): # For an OS Account object ID we use its unique_id field which is the account SID + elif k in accounts_table.keys(): # For an OS Account object ID we use its addr field which is the account SID mapping[k] = accounts_table[k] elif v[0] not in mapping.keys(): if v[0] in artifacts_table.keys():