mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-12 16:06:15 +00:00
4726 add image names table and lookup for normalizing data source names
This commit is contained in:
parent
ca7b516e73
commit
c0d3067a3e
@ -320,7 +320,8 @@ class TskDbDiff(object):
|
|||||||
id_objects_table = build_id_objects_table(conn.cursor(), isMultiUser)
|
id_objects_table = build_id_objects_table(conn.cursor(), isMultiUser)
|
||||||
id_artifact_types_table = build_id_artifact_types_table(conn.cursor(), isMultiUser)
|
id_artifact_types_table = build_id_artifact_types_table(conn.cursor(), isMultiUser)
|
||||||
id_reports_table = build_id_reports_table(conn.cursor(), isMultiUser)
|
id_reports_table = build_id_reports_table(conn.cursor(), isMultiUser)
|
||||||
id_obj_path_table = build_id_obj_path_table(id_files_table, id_objects_table, id_artifact_types_table, id_reports_table)
|
id_images_table = build_id_images_names_table(conn.cursor(), isMultiUser)
|
||||||
|
id_obj_path_table = build_id_obj_path_table(id_files_table, id_objects_table, id_artifact_types_table, id_reports_table, id_images_table)
|
||||||
|
|
||||||
if isMultiUser: # Use PostgreSQL
|
if isMultiUser: # Use PostgreSQL
|
||||||
os.environ['PGPASSWORD']=pgSettings.password
|
os.environ['PGPASSWORD']=pgSettings.password
|
||||||
@ -340,7 +341,7 @@ class TskDbDiff(object):
|
|||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
dump_line += line
|
dump_line += line
|
||||||
dump_line = normalize_db_entry(dump_line, id_obj_path_table, id_vs_parts_table, id_vs_info_table, id_fs_info_table, id_objects_table, id_reports_table)
|
dump_line = normalize_db_entry(dump_line, id_obj_path_table, id_vs_parts_table, id_vs_info_table, id_fs_info_table, id_objects_table, id_reports_table, id_images_table)
|
||||||
db_log.write('%s\n' % dump_line)
|
db_log.write('%s\n' % dump_line)
|
||||||
dump_line = ''
|
dump_line = ''
|
||||||
postgreSQL_db.close()
|
postgreSQL_db.close()
|
||||||
@ -354,7 +355,7 @@ class TskDbDiff(object):
|
|||||||
for line in conn.iterdump():
|
for line in conn.iterdump():
|
||||||
if 'INSERT INTO "image_gallery_groups_seen"' in line:
|
if 'INSERT INTO "image_gallery_groups_seen"' in line:
|
||||||
continue
|
continue
|
||||||
line = normalize_db_entry(line, id_obj_path_table, id_vs_parts_table, id_vs_info_table, id_fs_info_table, id_objects_table, id_reports_table)
|
line = normalize_db_entry(line, id_obj_path_table, id_vs_parts_table, id_vs_info_table, id_fs_info_table, id_objects_table, id_reports_table, id_images_table)
|
||||||
db_log.write('%s\n' % line)
|
db_log.write('%s\n' % line)
|
||||||
# Now sort the file
|
# Now sort the file
|
||||||
srtcmdlst = ["sort", dump_file, "-o", dump_file]
|
srtcmdlst = ["sort", dump_file, "-o", dump_file]
|
||||||
@ -406,7 +407,7 @@ class PGSettings(object):
|
|||||||
return self.password
|
return self.password
|
||||||
|
|
||||||
|
|
||||||
def normalize_db_entry(line, files_table, vs_parts_table, vs_info_table, fs_info_table, objects_table, reports_table):
|
def normalize_db_entry(line, files_table, vs_parts_table, vs_info_table, fs_info_table, objects_table, reports_table, images_table):
|
||||||
""" Make testing more consistent and reasonable by doctoring certain db entries.
|
""" Make testing more consistent and reasonable by doctoring certain db entries.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@ -515,6 +516,8 @@ def normalize_db_entry(line, files_table, vs_parts_table, vs_info_table, fs_info
|
|||||||
parent_path = vs_info_table[parent_id]
|
parent_path = vs_info_table[parent_id]
|
||||||
elif parent_id in fs_info_table.keys():
|
elif parent_id in fs_info_table.keys():
|
||||||
parent_path = fs_info_table[parent_id]
|
parent_path = fs_info_table[parent_id]
|
||||||
|
elif parent_id in images_table.keys():
|
||||||
|
parent_path = images_table[parent_id]
|
||||||
elif parent_id == 'NULL':
|
elif parent_id == 'NULL':
|
||||||
parent_path = "NULL"
|
parent_path = "NULL"
|
||||||
|
|
||||||
@ -615,11 +618,23 @@ def build_id_objects_table(db_cursor, isPostgreSQL):
|
|||||||
Args:
|
Args:
|
||||||
db_cursor: the database cursor
|
db_cursor: the database cursor
|
||||||
"""
|
"""
|
||||||
# for each row in the db, take the object id, par_obj_id, then create a tuple in the dictionary
|
# for each row in the db, take the object id, device_id, then create a tuple in the dictionary
|
||||||
# with the object id as the key and par_obj_id, type as the value
|
# with the object id as the key and par_obj_id, type as the value
|
||||||
mapping = dict([(row[0], [row[1], row[2]]) for row in sql_select_execute(db_cursor, isPostgreSQL, "SELECT * FROM tsk_objects")])
|
mapping = dict([(row[0], [row[1], row[2]]) for row in sql_select_execute(db_cursor, isPostgreSQL, "SELECT * FROM tsk_objects")])
|
||||||
return mapping
|
return mapping
|
||||||
|
|
||||||
|
def build_id_images_names_table(db_cursor, isPostgreSQL):
|
||||||
|
"""Build the map of object ids to name.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
db_cursor: the database cursor
|
||||||
|
"""
|
||||||
|
# for each row in the db, take the object id and name then create a tuple in the dictionary
|
||||||
|
# with the object id as the key and name, type as the value
|
||||||
|
mapping = dict([(row[0], row[1]) for row in sql_select_execute(db_cursor, isPostgreSQL, "SELECT obj_id, name FROM tsk_image_names WHERE sequence=0")])
|
||||||
|
#data_sources which are logical file sets will be found in the files table
|
||||||
|
return mapping
|
||||||
|
|
||||||
def build_id_artifact_types_table(db_cursor, isPostgreSQL):
|
def build_id_artifact_types_table(db_cursor, isPostgreSQL):
|
||||||
"""Build the map of object ids to artifact ids.
|
"""Build the map of object ids to artifact ids.
|
||||||
|
|
||||||
@ -642,7 +657,7 @@ def build_id_reports_table(db_cursor, isPostgreSQL):
|
|||||||
return mapping
|
return mapping
|
||||||
|
|
||||||
|
|
||||||
def build_id_obj_path_table(files_table, objects_table, artifacts_table, reports_table):
|
def build_id_obj_path_table(files_table, objects_table, artifacts_table, reports_table, images_table):
|
||||||
"""Build the map of object ids to artifact ids.
|
"""Build the map of object ids to artifact ids.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@ -666,6 +681,8 @@ def build_id_obj_path_table(files_table, objects_table, artifacts_table, reports
|
|||||||
path = mapping[par_obj_id]
|
path = mapping[par_obj_id]
|
||||||
elif par_obj_id in reports_table.keys():
|
elif par_obj_id in reports_table.keys():
|
||||||
path = reports_table[par_obj_id]
|
path = reports_table[par_obj_id]
|
||||||
|
elif par_obj_id in images_table.keys():
|
||||||
|
path = images_table[par_obj_id]
|
||||||
mapping[k] = path + "/" + artifacts_table[k]
|
mapping[k] = path + "/" + artifacts_table[k]
|
||||||
elif v[0] not in mapping.keys():
|
elif v[0] not in mapping.keys():
|
||||||
if v[0] in artifacts_table.keys():
|
if v[0] in artifacts_table.keys():
|
||||||
|
Loading…
x
Reference in New Issue
Block a user