mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-12 16:06:15 +00:00
merged develop branch into 1190-basic-edge-module
This commit is contained in:
commit
ebc6406d8c
@ -43,6 +43,7 @@ public final class StartupWindow extends JDialog implements StartupWindowInterfa
|
||||
}
|
||||
|
||||
private void init() {
|
||||
|
||||
setSize(DIMENSIONS);
|
||||
welcomeWindow = new CueBannerPanel();
|
||||
welcomeWindow.setCloseButtonActionListener(new ActionListener() {
|
||||
@ -58,7 +59,9 @@ public final class StartupWindow extends JDialog implements StartupWindowInterfa
|
||||
|
||||
@Override
|
||||
public void open() {
|
||||
welcomeWindow.refresh();
|
||||
if (welcomeWindow != null) {
|
||||
welcomeWindow.refresh();
|
||||
}
|
||||
setLocationRelativeTo(WindowManager.getDefault().getMainWindow());
|
||||
setVisible(true);
|
||||
}
|
||||
|
@ -138,7 +138,8 @@ public class Metadata extends javax.swing.JPanel implements DataContentViewer {
|
||||
"Metadata.tableRowTitle.timezone=Time Zone",
|
||||
"Metadata.tableRowTitle.deviceId=Device ID",
|
||||
"Metadata.tableRowTitle.acquisitionDetails=Acquisition Details",
|
||||
"Metadata.nodeText.unknown=Unknown"})
|
||||
"Metadata.nodeText.unknown=Unknown",
|
||||
"Metadata.nodeText.none=None"})
|
||||
@Override
|
||||
public void setNode(Node node) {
|
||||
AbstractFile file = node.getLookup().lookup(AbstractFile.class);
|
||||
@ -251,15 +252,20 @@ public class Metadata extends javax.swing.JPanel implements DataContentViewer {
|
||||
|
||||
// Add all the data source paths to the "Local Path" value cell.
|
||||
String[] imagePaths = image.getPaths();
|
||||
StringBuilder pathValues = new StringBuilder("<div>");
|
||||
pathValues.append(imagePaths[0]);
|
||||
pathValues.append("</div>");
|
||||
for (int i=1; i < imagePaths.length; i++) {
|
||||
pathValues.append("<div>");
|
||||
pathValues.append(imagePaths[i]);
|
||||
if (imagePaths.length > 0) {
|
||||
StringBuilder pathValues = new StringBuilder("<div>");
|
||||
pathValues.append(imagePaths[0]);
|
||||
pathValues.append("</div>");
|
||||
for (int i=1; i < imagePaths.length; i++) {
|
||||
pathValues.append("<div>");
|
||||
pathValues.append(imagePaths[i]);
|
||||
pathValues.append("</div>");
|
||||
}
|
||||
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.localPath"), pathValues.toString());
|
||||
} else {
|
||||
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.localPath"),
|
||||
NbBundle.getMessage(this.getClass(), "Metadata.nodeText.none"));
|
||||
}
|
||||
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.localPath"), pathValues.toString());
|
||||
}
|
||||
|
||||
setText(sb.toString());
|
||||
|
@ -31,8 +31,10 @@ import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import javax.swing.JMenuItem;
|
||||
import javax.swing.JOptionPane;
|
||||
import javax.swing.SwingWorker;
|
||||
import javax.swing.text.BadLocationException;
|
||||
import javax.swing.text.Utilities;
|
||||
import org.netbeans.api.progress.ProgressHandle;
|
||||
import org.openide.nodes.Node;
|
||||
import org.openide.util.lookup.ServiceProvider;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
@ -359,35 +361,70 @@ public class DataContentViewerHex extends javax.swing.JPanel implements DataCont
|
||||
}//GEN-LAST:event_goToOffsetTextFieldActionPerformed
|
||||
|
||||
@NbBundle.Messages({"DataContentViewerHex.launchError=Unable to launch HxD Editor. "
|
||||
+ "Please set-up the HdX install location in Tools -> Options -> External Viewer"})
|
||||
+ "Please specify the HxD install location in Tools -> Options -> External Viewer",
|
||||
"DataContentViewerHex.copyingFile=Copying file to open in HxD..."})
|
||||
private void launchHxDButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_launchHxDButtonActionPerformed
|
||||
try {
|
||||
File HdXExecutable = new File(UserPreferences.getExternalHexEditorPath());
|
||||
if(!HdXExecutable.exists() || !HdXExecutable.canExecute()) {
|
||||
JOptionPane.showMessageDialog(null, DataContentViewerHex_launchError());
|
||||
return;
|
||||
}
|
||||
new BackgroundFileCopyTask().execute();
|
||||
}//GEN-LAST:event_launchHxDButtonActionPerformed
|
||||
|
||||
String tempDirectory = Case.getCurrentCaseThrows().getTempDirectory();
|
||||
File dataSourceInTempDirectory = Paths.get(tempDirectory,
|
||||
FileUtil.escapeFileName(dataSource.getId() + dataSource.getName())).toFile();
|
||||
ContentUtils.writeToFile(dataSource, dataSourceInTempDirectory);
|
||||
/**
|
||||
* Performs the file copying and process launching in a SwingWorker so that the
|
||||
* UI is not blocked when opening large files.
|
||||
*/
|
||||
private class BackgroundFileCopyTask extends SwingWorker<Void, Void> {
|
||||
private boolean wasCancelled = false;
|
||||
|
||||
@Override
|
||||
public Void doInBackground() throws InterruptedException {
|
||||
ProgressHandle progress = ProgressHandle.createHandle(DataContentViewerHex_copyingFile(), () -> {
|
||||
//Cancel the swing worker (which will interrupt the ContentUtils call below)
|
||||
this.cancel(true);
|
||||
wasCancelled = true;
|
||||
return true;
|
||||
});
|
||||
|
||||
try {
|
||||
ProcessBuilder launchHdXExecutable = new ProcessBuilder();
|
||||
launchHdXExecutable.command(String.format("\"%s\" \"%s\"",
|
||||
HdXExecutable.getAbsolutePath(),
|
||||
dataSourceInTempDirectory.getAbsolutePath()));
|
||||
launchHdXExecutable.start();
|
||||
} catch (IOException ex) {
|
||||
File HxDExecutable = new File(UserPreferences.getExternalHexEditorPath());
|
||||
if(!HxDExecutable.exists() || !HxDExecutable.canExecute()) {
|
||||
JOptionPane.showMessageDialog(null, DataContentViewerHex_launchError());
|
||||
return null;
|
||||
}
|
||||
|
||||
String tempDirectory = Case.getCurrentCaseThrows().getTempDirectory();
|
||||
File tempFile = Paths.get(tempDirectory,
|
||||
FileUtil.escapeFileName(dataSource.getId() + dataSource.getName())).toFile();
|
||||
|
||||
progress.start(100);
|
||||
ContentUtils.writeToFile(dataSource, tempFile, progress, this, true);
|
||||
|
||||
if(wasCancelled) {
|
||||
tempFile.delete();
|
||||
progress.finish();
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
ProcessBuilder launchHxDExecutable = new ProcessBuilder();
|
||||
launchHxDExecutable.command(String.format("\"%s\" \"%s\"",
|
||||
HxDExecutable.getAbsolutePath(),
|
||||
tempFile.getAbsolutePath()));
|
||||
launchHxDExecutable.start();
|
||||
} catch (IOException ex) {
|
||||
logger.log(Level.WARNING, "Unsuccessful attempt to launch HxD", ex);
|
||||
JOptionPane.showMessageDialog(null, DataContentViewerHex_launchError());
|
||||
tempFile.delete();
|
||||
}
|
||||
} catch (NoCurrentCaseException | IOException ex) {
|
||||
logger.log(Level.SEVERE, "Unable to copy file into temp directory", ex);
|
||||
JOptionPane.showMessageDialog(null, DataContentViewerHex_launchError());
|
||||
dataSourceInTempDirectory.delete();
|
||||
}
|
||||
} catch (NoCurrentCaseException | IOException ex) {
|
||||
logger.log(Level.SEVERE, "Unable to copy file into temp directory", ex);
|
||||
JOptionPane.showMessageDialog(null, DataContentViewerHex_launchError());
|
||||
|
||||
progress.finish();
|
||||
return null;
|
||||
}
|
||||
}//GEN-LAST:event_launchHxDButtonActionPerformed
|
||||
}
|
||||
|
||||
|
||||
|
||||
// Variables declaration - do not modify//GEN-BEGIN:variables
|
||||
private javax.swing.JMenuItem copyMenuItem;
|
||||
|
@ -92,6 +92,12 @@ public class NetworkUtils {
|
||||
if (base.matches(".*[~`!@#$%^&\\*\\(\\)\\+={}\\[\\];:\\?<>,/ ].*")) {
|
||||
return "";
|
||||
}
|
||||
|
||||
//verify that the base domain actually has a '.', details JIRA-4609
|
||||
if(!base.contains(".")) {
|
||||
return "";
|
||||
}
|
||||
|
||||
return base;
|
||||
}
|
||||
|
||||
|
@ -516,8 +516,7 @@ class MSOfficeEmbeddedContentExtractor {
|
||||
* @return
|
||||
*/
|
||||
private String getFileRelativePath(String fileName) {
|
||||
// Used explicit FWD slashes to maintain DB consistency across operating systems.
|
||||
return "/" + moduleDirRelative + "/" + this.parentFileName + "/" + fileName; //NON-NLS
|
||||
return Paths.get(moduleDirRelative, this.parentFileName, fileName).toString();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -23,6 +23,7 @@ import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.SortedSet;
|
||||
import java.util.TreeSet;
|
||||
import java.util.logging.Level;
|
||||
import java.util.stream.Collectors;
|
||||
import org.apache.tika.Tika;
|
||||
import org.apache.tika.io.TikaInputStream;
|
||||
@ -57,7 +58,7 @@ public class FileTypeDetector {
|
||||
* @return A list of all detectable file types.
|
||||
*
|
||||
* @throws FileTypeDetectorInitException If an error occurs while assembling
|
||||
* the list of types
|
||||
* the list of types
|
||||
*/
|
||||
public static synchronized SortedSet<String> getDetectedTypes() throws FileTypeDetectorInitException {
|
||||
TreeSet<String> detectedTypes = new TreeSet<>((String string1, String string2) -> {
|
||||
@ -108,9 +109,7 @@ public class FileTypeDetector {
|
||||
* Tika, and Autopsy file type definitions take precendence over Tika.
|
||||
*
|
||||
* @throws FileTypeDetectorInitException If an initialization error occurs,
|
||||
* e.g., user-defined file type
|
||||
* definitions exist but cannot be
|
||||
* loaded.
|
||||
* e.g., user-defined file type definitions exist but cannot be loaded.
|
||||
*/
|
||||
public FileTypeDetector() throws FileTypeDetectorInitException {
|
||||
try {
|
||||
@ -140,7 +139,7 @@ public class FileTypeDetector {
|
||||
* user-defined MIME type by this detector.
|
||||
*
|
||||
* @param customTypes
|
||||
* @param mimeType The MIME type name (e.g., "text/html").
|
||||
* @param mimeType The MIME type name (e.g., "text/html").
|
||||
*
|
||||
* @return True or false.
|
||||
*/
|
||||
@ -171,9 +170,9 @@ public class FileTypeDetector {
|
||||
* @param file The file to test.
|
||||
*
|
||||
* @return A MIME type name. If file type could not be detected, or results
|
||||
* were uncertain, octet-stream is returned.
|
||||
* were uncertain, octet-stream is returned.
|
||||
*
|
||||
*
|
||||
|
||||
*/
|
||||
public String getMIMEType(AbstractFile file) {
|
||||
/*
|
||||
@ -235,6 +234,22 @@ public class FileTypeDetector {
|
||||
*/
|
||||
mimeType = removeOptionalParameter(mimeType);
|
||||
|
||||
/**
|
||||
* We cannot trust Tika's audio/mpeg mimetype. Lets verify the
|
||||
* first two bytes and confirm it is not 0xffff. Details in
|
||||
* JIRA-4659
|
||||
*/
|
||||
if (mimeType.contains("audio/mpeg")) {
|
||||
try {
|
||||
byte[] header = getNBytes(file, 0, 2);
|
||||
if (byteIs0xFF(header[0]) && byteIs0xFF(header[1])) {
|
||||
mimeType = MimeTypes.OCTET_STREAM;
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
//Oh well, the mimetype is what it is.
|
||||
logger.log(Level.WARNING, String.format("Could not verify audio/mpeg mimetype for file %s with id=%d", file.getName(), file.getId()), ex);
|
||||
}
|
||||
}
|
||||
} catch (Exception ignored) {
|
||||
/*
|
||||
* This exception is swallowed and not logged rather than
|
||||
@ -255,6 +270,33 @@ public class FileTypeDetector {
|
||||
return mimeType;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if the byte is 255 (0xFF) by examining the last 4 bits and the
|
||||
* first 4 bits.
|
||||
*
|
||||
* @param x byte
|
||||
* @return Flag indicating the byte if 0xFF
|
||||
*/
|
||||
private boolean byteIs0xFF(byte x) {
|
||||
return (x & 0x0F) == 0x0F && (x & 0xF0) == 0xF0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the first N bytes from a file.
|
||||
*
|
||||
* @param file Abstract file to read
|
||||
* @param offset Offset to begin reading
|
||||
* @param n Number of bytes to read
|
||||
* @return Byte array of size n
|
||||
*
|
||||
* @throws TskCoreException
|
||||
*/
|
||||
private byte[] getNBytes(AbstractFile file, int offset, int n) throws TskCoreException {
|
||||
byte[] headerCache = new byte[n];
|
||||
file.read(headerCache, offset, n);
|
||||
return headerCache;
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes the optional parameter from a MIME type string
|
||||
*
|
||||
@ -291,7 +333,8 @@ public class FileTypeDetector {
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines whether or not a file matches a custom file type defined by Autopsy.
|
||||
* Determines whether or not a file matches a custom file type defined by
|
||||
* Autopsy.
|
||||
*
|
||||
* @param file The file to test.
|
||||
*
|
||||
@ -328,7 +371,7 @@ public class FileTypeDetector {
|
||||
* Constructs an exception to throw if an initialization error occurs,
|
||||
* e.g., user-defined file type definitions exist but cannot be loaded.
|
||||
*
|
||||
* @param message The exception message,
|
||||
* @param message The exception message,
|
||||
* @param throwable The underlying cause of the exception.
|
||||
*/
|
||||
FileTypeDetectorInitException(String message, Throwable throwable) {
|
||||
@ -366,7 +409,7 @@ public class FileTypeDetector {
|
||||
* @return A MIME type name.
|
||||
*
|
||||
* @throws TskCoreException if detection is required and there is a problem
|
||||
* writing the result to the case database.
|
||||
* writing the result to the case database.
|
||||
* @deprecated Use getMIMEType instead, and call AbstractFile.setMIMEType
|
||||
* and AbstractFile.save to save the result to the file object and the
|
||||
* database.
|
||||
@ -386,10 +429,10 @@ public class FileTypeDetector {
|
||||
* @param file The file.
|
||||
*
|
||||
* @return A MIME type name. If file type could not be detected or results
|
||||
* were uncertain, octet-stream is returned.
|
||||
* were uncertain, octet-stream is returned.
|
||||
*
|
||||
* @throws TskCoreException if detection is required and there is a problem
|
||||
* writing the result to the case database.
|
||||
* writing the result to the case database.
|
||||
*
|
||||
* @deprecated Use getMIMEType instead, and call AbstractFile.setMIMEType
|
||||
* and AbstractFile.save to save the result to the file object and the
|
||||
@ -410,7 +453,7 @@ public class FileTypeDetector {
|
||||
* @param file The file to test.
|
||||
*
|
||||
* @return A MIME type name. If file type could not be detected or results
|
||||
* were uncertain, octet-stream is returned.
|
||||
* were uncertain, octet-stream is returned.
|
||||
*
|
||||
* @throws TskCoreException
|
||||
* @deprecated Use getMIMEType instead.
|
||||
|
@ -50,6 +50,7 @@ import org.apache.tika.parser.ParsingReader;
|
||||
import org.apache.tika.parser.microsoft.OfficeParserConfig;
|
||||
import org.apache.tika.parser.ocr.TesseractOCRConfig;
|
||||
import org.apache.tika.parser.pdf.PDFParserConfig;
|
||||
import org.apache.tika.mime.MediaType;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.openide.modules.InstalledFileLocator;
|
||||
import org.openide.util.Lookup;
|
||||
@ -125,7 +126,7 @@ final class TikaTextExtractor implements TextExtractor {
|
||||
private final ExecutorService executorService = Executors.newSingleThreadExecutor(tikaThreadFactory);
|
||||
private static final String SQLITE_MIMETYPE = "application/x-sqlite3";
|
||||
|
||||
private final AutoDetectParser parser = new AutoDetectParser();
|
||||
private final AutoDetectParser parser;
|
||||
private final Content content;
|
||||
|
||||
private boolean tesseractOCREnabled;
|
||||
@ -145,12 +146,23 @@ final class TikaTextExtractor implements TextExtractor {
|
||||
|
||||
public TikaTextExtractor(Content content) {
|
||||
this.content = content;
|
||||
|
||||
parser = new AutoDetectParser();
|
||||
|
||||
if (content instanceof AbstractFile) {
|
||||
AbstractFile file = (AbstractFile) content;
|
||||
if (file.getMIMEType() != null && !file.getMIMEType().isEmpty()) {
|
||||
//Force Tika to use our pre-computed mime type during detection
|
||||
parser.setDetector((InputStream inStream, Metadata metaData)
|
||||
-> MediaType.parse(file.getMIMEType()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* If Tesseract has been installed and is set to be used through
|
||||
* configuration, then ocr is enabled. OCR can only currently be run on
|
||||
* 64 bit Windows OS.
|
||||
* configuration, then ocr is enabled. OCR can only currently be run on 64
|
||||
* bit Windows OS.
|
||||
*
|
||||
* @return Flag indicating if OCR is set to be used.
|
||||
*/
|
||||
@ -422,11 +434,11 @@ final class TikaTextExtractor implements TextExtractor {
|
||||
*/
|
||||
@Override
|
||||
public boolean isSupported() {
|
||||
if(!(content instanceof AbstractFile)) {
|
||||
if (!(content instanceof AbstractFile)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
String detectedType = ((AbstractFile)content).getMIMEType();
|
||||
String detectedType = ((AbstractFile) content).getMIMEType();
|
||||
if (detectedType == null
|
||||
|| BINARY_MIME_TYPES.contains(detectedType) //any binary unstructured blobs (string extraction will be used)
|
||||
|| ARCHIVE_MIME_TYPES.contains(detectedType)
|
||||
@ -485,11 +497,11 @@ final class TikaTextExtractor implements TextExtractor {
|
||||
if (context != null) {
|
||||
ImageConfig configInstance = context.lookup(ImageConfig.class);
|
||||
if (configInstance != null) {
|
||||
if(Objects.nonNull(configInstance.getOCREnabled())) {
|
||||
if (Objects.nonNull(configInstance.getOCREnabled())) {
|
||||
this.tesseractOCREnabled = configInstance.getOCREnabled();
|
||||
}
|
||||
|
||||
if(Objects.nonNull(configInstance.getOCRLanguages())) {
|
||||
if (Objects.nonNull(configInstance.getOCRLanguages())) {
|
||||
this.languagePacks = formatLanguagePacks(configInstance.getOCRLanguages());
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,44 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011-2018 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.experimental.configuration;
|
||||
|
||||
import javax.swing.JPanel;
|
||||
|
||||
/**
|
||||
* Interface to run an ingest job in the background.
|
||||
*/
|
||||
public interface IngestJobRunningService {
|
||||
|
||||
/**
|
||||
* Starts the service
|
||||
*/
|
||||
void start();
|
||||
|
||||
/**
|
||||
* Stops the service
|
||||
*/
|
||||
void stop();
|
||||
|
||||
/**
|
||||
* Returns a panel to be displayed while using this service
|
||||
*
|
||||
* @return panel to be displayed while using this service
|
||||
*/
|
||||
JPanel getStartupWindow();
|
||||
}
|
@ -320,7 +320,8 @@ class TskDbDiff(object):
|
||||
id_objects_table = build_id_objects_table(conn.cursor(), isMultiUser)
|
||||
id_artifact_types_table = build_id_artifact_types_table(conn.cursor(), isMultiUser)
|
||||
id_reports_table = build_id_reports_table(conn.cursor(), isMultiUser)
|
||||
id_obj_path_table = build_id_obj_path_table(id_files_table, id_objects_table, id_artifact_types_table, id_reports_table)
|
||||
id_images_table = build_id_image_names_table(conn.cursor(), isMultiUser)
|
||||
id_obj_path_table = build_id_obj_path_table(id_files_table, id_objects_table, id_artifact_types_table, id_reports_table, id_images_table)
|
||||
|
||||
if isMultiUser: # Use PostgreSQL
|
||||
os.environ['PGPASSWORD']=pgSettings.password
|
||||
@ -333,14 +334,17 @@ class TskDbDiff(object):
|
||||
for line in postgreSQL_db:
|
||||
line = line.strip('\r\n ')
|
||||
# Deal with pg_dump result file
|
||||
if (line.startswith('--') or line.lower().startswith('alter') or "pg_catalog" in line or "idle_in_transaction_session_timeout" in line or not line or 'INSERT INTO "image_gallery_groups_seen"' in line): # It's comment or alter statement or catalog entry or set idle entry or empty line
|
||||
if (line.startswith('--') or line.lower().startswith('alter') or "pg_catalog" in line or "idle_in_transaction_session_timeout" in line or not line): # It's comment or alter statement or catalog entry or set idle entry or empty line
|
||||
continue
|
||||
elif not line.endswith(';'): # Statement not finished
|
||||
dump_line += line
|
||||
continue
|
||||
else:
|
||||
dump_line += line
|
||||
dump_line = normalize_db_entry(dump_line, id_obj_path_table, id_vs_parts_table, id_vs_info_table, id_fs_info_table, id_objects_table, id_reports_table)
|
||||
if 'INSERT INTO image_gallery_groups_seen' in dump_line:
|
||||
dump_line = ''
|
||||
continue;
|
||||
dump_line = normalize_db_entry(dump_line, id_obj_path_table, id_vs_parts_table, id_vs_info_table, id_fs_info_table, id_objects_table, id_reports_table, id_images_table)
|
||||
db_log.write('%s\n' % dump_line)
|
||||
dump_line = ''
|
||||
postgreSQL_db.close()
|
||||
@ -354,7 +358,7 @@ class TskDbDiff(object):
|
||||
for line in conn.iterdump():
|
||||
if 'INSERT INTO "image_gallery_groups_seen"' in line:
|
||||
continue
|
||||
line = normalize_db_entry(line, id_obj_path_table, id_vs_parts_table, id_vs_info_table, id_fs_info_table, id_objects_table, id_reports_table)
|
||||
line = normalize_db_entry(line, id_obj_path_table, id_vs_parts_table, id_vs_info_table, id_fs_info_table, id_objects_table, id_reports_table, id_images_table)
|
||||
db_log.write('%s\n' % line)
|
||||
# Now sort the file
|
||||
srtcmdlst = ["sort", dump_file, "-o", dump_file]
|
||||
@ -406,7 +410,7 @@ class PGSettings(object):
|
||||
return self.password
|
||||
|
||||
|
||||
def normalize_db_entry(line, files_table, vs_parts_table, vs_info_table, fs_info_table, objects_table, reports_table):
|
||||
def normalize_db_entry(line, files_table, vs_parts_table, vs_info_table, fs_info_table, objects_table, reports_table, images_table):
|
||||
""" Make testing more consistent and reasonable by doctoring certain db entries.
|
||||
|
||||
Args:
|
||||
@ -515,6 +519,8 @@ def normalize_db_entry(line, files_table, vs_parts_table, vs_info_table, fs_info
|
||||
parent_path = vs_info_table[parent_id]
|
||||
elif parent_id in fs_info_table.keys():
|
||||
parent_path = fs_info_table[parent_id]
|
||||
elif parent_id in images_table.keys():
|
||||
parent_path = images_table[parent_id]
|
||||
elif parent_id == 'NULL':
|
||||
parent_path = "NULL"
|
||||
|
||||
@ -620,6 +626,18 @@ def build_id_objects_table(db_cursor, isPostgreSQL):
|
||||
mapping = dict([(row[0], [row[1], row[2]]) for row in sql_select_execute(db_cursor, isPostgreSQL, "SELECT * FROM tsk_objects")])
|
||||
return mapping
|
||||
|
||||
def build_id_image_names_table(db_cursor, isPostgreSQL):
|
||||
"""Build the map of object ids to name.
|
||||
|
||||
Args:
|
||||
db_cursor: the database cursor
|
||||
"""
|
||||
# for each row in the db, take the object id and name then create a tuple in the dictionary
|
||||
# with the object id as the key and name, type as the value
|
||||
mapping = dict([(row[0], row[1]) for row in sql_select_execute(db_cursor, isPostgreSQL, "SELECT obj_id, name FROM tsk_image_names WHERE sequence=0")])
|
||||
#data_sources which are logical file sets will be found in the files table
|
||||
return mapping
|
||||
|
||||
def build_id_artifact_types_table(db_cursor, isPostgreSQL):
|
||||
"""Build the map of object ids to artifact ids.
|
||||
|
||||
@ -642,7 +660,7 @@ def build_id_reports_table(db_cursor, isPostgreSQL):
|
||||
return mapping
|
||||
|
||||
|
||||
def build_id_obj_path_table(files_table, objects_table, artifacts_table, reports_table):
|
||||
def build_id_obj_path_table(files_table, objects_table, artifacts_table, reports_table, images_table):
|
||||
"""Build the map of object ids to artifact ids.
|
||||
|
||||
Args:
|
||||
@ -654,6 +672,7 @@ def build_id_obj_path_table(files_table, objects_table, artifacts_table, reports
|
||||
# make a copy of files_table and update it with new data from artifacts_table and reports_table
|
||||
mapping = files_table.copy()
|
||||
for k, v in objects_table.items():
|
||||
path = ""
|
||||
if k not in mapping.keys(): # If the mapping table doesn't have data for obj_id
|
||||
if k in reports_table.keys(): # For a report we use the report path
|
||||
par_obj_id = v[0]
|
||||
@ -665,6 +684,8 @@ def build_id_obj_path_table(files_table, objects_table, artifacts_table, reports
|
||||
path = mapping[par_obj_id]
|
||||
elif par_obj_id in reports_table.keys():
|
||||
path = reports_table[par_obj_id]
|
||||
elif par_obj_id in images_table.keys():
|
||||
path = images_table[par_obj_id]
|
||||
mapping[k] = path + "/" + artifacts_table[k]
|
||||
elif v[0] not in mapping.keys():
|
||||
if v[0] in artifacts_table.keys():
|
||||
|
Loading…
x
Reference in New Issue
Block a user