diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/SQLiteViewer.java b/Core/src/org/sleuthkit/autopsy/contentviewers/SQLiteViewer.java old mode 100644 new mode 100755 index a84dffadd7..64503d39fe --- a/Core/src/org/sleuthkit/autopsy/contentviewers/SQLiteViewer.java +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/SQLiteViewer.java @@ -22,16 +22,24 @@ import java.awt.BorderLayout; import java.awt.Component; import java.awt.Cursor; import java.io.File; -import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.logging.Level; -import java.util.stream.Collectors; import javax.swing.JComboBox; import javax.swing.JFileChooser; import javax.swing.JOptionPane; @@ -43,11 +51,9 @@ import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; -import org.sleuthkit.autopsy.tabulardatareader.AbstractReader; -import org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException; -import org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException; -import org.sleuthkit.autopsy.tabulardatareader.FileReaderFactory; +import org.sleuthkit.autopsy.coreutils.SqliteUtil; /** * A file content viewer for SQLite database files. @@ -62,7 +68,7 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer { private final SQLiteTableView selectedTableView = new SQLiteTableView(); private AbstractFile sqliteDbFile; private File tmpDbFile; - private AbstractReader sqliteReader; + private Connection connection; private int numRows; // num of rows in the selected table private int currPage = 0; // curr page of rows being displayed @@ -339,9 +345,13 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer { numEntriesField.setText(""); // close DB connection to file - if (null != sqliteReader) { - sqliteReader.close(); - sqliteReader = null; + if (null != connection) { + try { + connection.close(); + connection = null; + } catch (SQLException ex) { + logger.log(Level.SEVERE, "Failed to close DB connection to file.", ex); //NON-NLS + } } sqliteDbFile = null; @@ -358,40 +368,66 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer { "SQLiteViewer.errorMessage.failedToQueryDatabase=The database tables in the file could not be read.", "SQLiteViewer.errorMessage.failedToinitJDBCDriver=The JDBC driver for SQLite could not be loaded.", "# {0} - exception message", "SQLiteViewer.errorMessage.unexpectedError=An unexpected error occurred:\n{0).",}) - private void processSQLiteFile() { + private void processSQLiteFile() { + tablesDropdownList.removeAllItems(); + try { - sqliteReader = FileReaderFactory.createReader(sqliteDbFile, SUPPORTED_MIMETYPES[0]); - - Map dbTablesMap = sqliteReader.getTableSchemas(); - + String localDiskPath = SqliteUtil.writeAbstractFileToLocalDisk(sqliteDbFile); + SqliteUtil.findAndCopySQLiteMetaFile(sqliteDbFile); + // Load the SQLite JDBC driver, if necessary. + Class.forName("org.sqlite.JDBC"); //NON-NLS + connection = DriverManager.getConnection("jdbc:sqlite:" + localDiskPath); //NON-NLS + + Collection dbTablesMap = getTables(); if (dbTablesMap.isEmpty()) { tablesDropdownList.addItem(Bundle.SQLiteViewer_comboBox_noTableEntry()); tablesDropdownList.setEnabled(false); } else { - dbTablesMap.keySet().forEach((tableName) -> { + dbTablesMap.forEach((tableName) -> { tablesDropdownList.addItem(tableName); }); } - } catch (FileReaderException ex) { - logger.log(Level.SEVERE, String.format( - "Failed to get tables from DB file '%s' (objId=%d)", //NON-NLS - sqliteDbFile.getName(), sqliteDbFile.getId()), ex); - MessageNotifyUtil.Message.error( - Bundle.SQLiteViewer_errorMessage_failedToQueryDatabase()); - } catch (FileReaderInitException ex) { - logger.log(Level.SEVERE, String.format( - "Failed to create a SQLiteReader for file: '%s' (objId=%d)", //NON-NLS - sqliteDbFile.getName(), sqliteDbFile.getId()), ex); + } catch (ClassNotFoundException ex) { + logger.log(Level.SEVERE, String.format("Failed to initialize JDBC SQLite '%s' (objId=%d)", sqliteDbFile.getName(), sqliteDbFile.getId()), ex); //NON-NLS + MessageNotifyUtil.Message.error(Bundle.SQLiteViewer_errorMessage_failedToinitJDBCDriver()); + } catch (SQLException ex) { + logger.log(Level.SEVERE, String.format("Failed to get tables from DB file '%s' (objId=%d)", sqliteDbFile.getName(), sqliteDbFile.getId()), ex); //NON-NLS + MessageNotifyUtil.Message.error(Bundle.SQLiteViewer_errorMessage_failedToQueryDatabase()); + } catch (IOException | NoCurrentCaseException | TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Failed to create temp copy of DB file '%s' (objId=%d)", sqliteDbFile.getName(), sqliteDbFile.getId()), ex); //NON-NLS + MessageNotifyUtil.Message.error(Bundle.SQLiteViewer_errorMessage_failedToExtractFile()); } } + /** + * Gets the table names and schemas from the SQLite database file. + * + * @return A mapping of table names to SQL CREATE TABLE statements. + */ + private Collection getTables() throws SQLException { + Collection tableNames = new LinkedList<>(); + try (Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery( + "SELECT name FROM sqlite_master " + + " WHERE type= 'table' ")){ + while (resultSet.next()) { + tableNames.add(resultSet.getString("name")); //NON-NLS + } + } + return tableNames; + } + @NbBundle.Messages({"# {0} - tableName", "SQLiteViewer.selectTable.errorText=Error getting row count for table: {0}" }) private void selectTable(String tableName) { - try { - numRows = sqliteReader.getRowCountFromTable(tableName); + + try (Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery( + "SELECT count (*) as count FROM " + "\"" + tableName + "\"")) { //NON-NLS{ + + numRows = resultSet.getInt("count"); numEntriesField.setText(numRows + " entries"); currPage = 1; @@ -410,12 +446,9 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer { selectedTableView.setupTable(Collections.emptyList()); } - } catch (FileReaderException ex) { - logger.log(Level.SEVERE, String.format( - "Failed to load table %s from DB file '%s' (objId=%d)", tableName, //NON-NLS - sqliteDbFile.getName(), sqliteDbFile.getId()), ex); - MessageNotifyUtil.Message.error( - Bundle.SQLiteViewer_selectTable_errorText(tableName)); + } catch (SQLException ex) { + logger.log(Level.SEVERE, String.format("Failed to load table %s from DB file '%s' (objId=%d)", tableName, sqliteDbFile.getName(), sqliteDbFile.getId()), ex); //NON-NLS + MessageNotifyUtil.Message.error(Bundle.SQLiteViewer_selectTable_errorText(tableName)); } } @@ -423,108 +456,109 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer { "SQLiteViewer.readTable.errorText=Error getting rows for table: {0}"}) private void readTable(String tableName, int startRow, int numRowsToRead) { - try { - List> rows = sqliteReader.getRowsFromTable( - tableName, startRow, numRowsToRead); + try ( + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery( + "SELECT * FROM " + "\"" + tableName + "\"" + + " LIMIT " + Integer.toString(numRowsToRead) + + " OFFSET " + Integer.toString(startRow - 1))) { + + List> rows = resultSetToArrayList(resultSet); if (Objects.nonNull(rows)) { selectedTableView.setupTable(rows); } else { selectedTableView.setupTable(Collections.emptyList()); } - } catch (FileReaderException ex) { - logger.log(Level.SEVERE, String.format( - "Failed to read table %s from DB file '%s' (objId=%d)", tableName, //NON-NLS - sqliteDbFile.getName(), sqliteDbFile.getId()), ex); - MessageNotifyUtil.Message.error( - Bundle.SQLiteViewer_readTable_errorText(tableName)); + } catch (SQLException ex) { + logger.log(Level.SEVERE, String.format("Failed to read table %s from DB file '%s' (objId=%d)", tableName, sqliteDbFile.getName(), sqliteDbFile.getId()), ex); //NON-NLS + MessageNotifyUtil.Message.error(Bundle.SQLiteViewer_readTable_errorText(tableName)); } } - - /** - * Converts a sqlite table into a CSV file. - * - * @param file - * @param tableName - * @param rowMap A list of rows in the table, where each row is represented as a column-value - * map. - * @throws FileNotFoundException - * @throws IOException - */ - @NbBundle.Messages({ - "SQLiteViewer.exportTableToCsv.FileName=File name: ", - "SQLiteViewer.exportTableToCsv.TableName=Table name: " - }) - public void exportTableToCSV(File file, String tableName, - List> rowMap) throws FileNotFoundException, IOException{ - - File csvFile; - String fileName = file.getName(); - if (FilenameUtils.getExtension(fileName).equalsIgnoreCase("csv")) { - csvFile = file; - } else { - csvFile = new File(file.toString() + ".csv"); - } - try (FileOutputStream out = new FileOutputStream(csvFile, false)) { - - out.write((Bundle.SQLiteViewer_exportTableToCsv_FileName() + csvFile.getName() + "\n").getBytes()); - out.write((Bundle.SQLiteViewer_exportTableToCsv_TableName() + tableName + "\n").getBytes()); - - String header = createColumnHeader(rowMap.get(0)).concat("\n"); - out.write(header.getBytes()); - - for (Map maps : rowMap) { - String row = maps.values() - .stream() - .map(Object::toString) - .collect(Collectors.joining(",")) - .concat("\n"); - out.write(row.getBytes()); + @NbBundle.Messages("SQLiteViewer.BlobNotShown.message=BLOB Data not shown") + private List> resultSetToArrayList(ResultSet resultSet) throws SQLException { + ResultSetMetaData metaData = resultSet.getMetaData(); + int columns = metaData.getColumnCount(); + ArrayList> rowlist = new ArrayList<>(); + while (resultSet.next()) { + Map row = new LinkedHashMap<>(columns); + for (int i = 1; i <= columns; ++i) { + if (resultSet.getObject(i) == null) { + row.put(metaData.getColumnName(i), ""); + } else { + if (metaData.getColumnTypeName(i).compareToIgnoreCase("blob") == 0) { + row.put(metaData.getColumnName(i), Bundle.SQLiteViewer_BlobNotShown_message()); + } else { + row.put(metaData.getColumnName(i), resultSet.getObject(i)); + } + } } + rowlist.add(row); } + + return rowlist; } - @NbBundle.Messages({ - "SQLiteViewer.exportTableToCsv.write.errText=Failed to export table content to csv file.", + @NbBundle.Messages({"SQLiteViewer.exportTableToCsv.write.errText=Failed to export table content to csv file.", + "SQLiteViewer.exportTableToCsv.FileName=File name: ", + "SQLiteViewer.exportTableToCsv.TableName=Table name: " }) private void exportTableToCsv(File file) { String tableName = (String) this.tablesDropdownList.getSelectedItem(); - try { - List> currentTableRows = - sqliteReader.getRowsFromTable(tableName); + try ( + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery("SELECT * FROM " + "\"" + tableName + "\"")) { + List> currentTableRows = resultSetToArrayList(resultSet); if (Objects.isNull(currentTableRows) || currentTableRows.isEmpty()) { - logger.log(Level.INFO, String.format( - "The table %s is empty. (objId=%d)", tableName, //NON-NLS - sqliteDbFile.getId())); + logger.log(Level.INFO, String.format("The table %s is empty. (objId=%d)", tableName, sqliteDbFile.getId())); //NON-NLS } else { - exportTableToCSV(file, tableName, currentTableRows); + File csvFile; + String fileName = file.getName(); + if (FilenameUtils.getExtension(fileName).equalsIgnoreCase("csv")) { + csvFile = file; + } else { + csvFile = new File(file.toString() + ".csv"); + } + + try (FileOutputStream out = new FileOutputStream(csvFile, false)) { + + out.write((Bundle.SQLiteViewer_exportTableToCsv_FileName() + csvFile.getName() + "\n").getBytes()); + out.write((Bundle.SQLiteViewer_exportTableToCsv_TableName() + tableName + "\n").getBytes()); + // Set up the column names + Map row = currentTableRows.get(0); + StringBuffer header = new StringBuffer(); + for (Map.Entry col : row.entrySet()) { + String colName = col.getKey(); + if (header.length() > 0) { + header.append(',').append(colName); + } else { + header.append(colName); + } + } + out.write(header.append('\n').toString().getBytes()); + + for (Map maps : currentTableRows) { + StringBuffer valueLine = new StringBuffer(); + maps.values().forEach((value) -> { + if (valueLine.length() > 0) { + valueLine.append(',').append(value.toString()); + } else { + valueLine.append(value.toString()); + } + }); + out.write(valueLine.append('\n').toString().getBytes()); + } + } } - } catch (FileReaderException ex) { - logger.log(Level.SEVERE, String.format( - "Failed to read table %s from DB file '%s' (objId=%d)", //NON-NLS - tableName, sqliteDbFile.getName(), sqliteDbFile.getId()), ex); - MessageNotifyUtil.Message.error( - Bundle.SQLiteViewer_readTable_errorText(tableName)); + } catch (SQLException ex) { + logger.log(Level.SEVERE, String.format("Failed to read table %s from DB file '%s' (objId=%d)", tableName, sqliteDbFile.getName(), sqliteDbFile.getId()), ex); //NON-NLS + MessageNotifyUtil.Message.error(Bundle.SQLiteViewer_readTable_errorText(tableName)); } catch (IOException ex) { - logger.log(Level.SEVERE, String.format( - "Failed to export table %s to file '%s'", tableName, file.getName()), ex); //NON-NLS - MessageNotifyUtil.Message.error( - Bundle.SQLiteViewer_exportTableToCsv_write_errText()); + logger.log(Level.SEVERE, String.format("Failed to export table %s to file '%s'", tableName, file.getName()), ex); //NON-NLS + MessageNotifyUtil.Message.error(Bundle.SQLiteViewer_exportTableToCsv_write_errText()); } } + - /** - * Returns a comma seperated header string from the keys of the column - * row map. - * - * @param row column header row map - * @return comma seperated header string - */ - private String createColumnHeader(Map row) { - return row.entrySet() - .stream() - .map(Map.Entry::getKey) - .collect(Collectors.joining(",")); - } } diff --git a/Core/src/org/sleuthkit/autopsy/coreutils/SqliteUtil.java b/Core/src/org/sleuthkit/autopsy/coreutils/SqliteUtil.java new file mode 100755 index 0000000000..4250487298 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/coreutils/SqliteUtil.java @@ -0,0 +1,130 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2018-2018 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.coreutils; + +import java.io.File; +import java.io.IOException; +import java.util.List; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; +import org.sleuthkit.autopsy.casemodule.services.FileManager; +import org.sleuthkit.autopsy.casemodule.services.Services; +import org.sleuthkit.autopsy.datamodel.ContentUtils; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.SleuthkitCase; +import org.sleuthkit.datamodel.TskCoreException; + +/** + * Sqlite utility class. Find and copy metafiles, write sqlite abstract files to + * temp directory, and generate unique temp directory paths. + */ +public final class SqliteUtil { + + private SqliteUtil() { + + } + + /** + * Overloaded implementation of + * {@link #findAndCopySQLiteMetaFile(AbstractFile, String) findAndCopySQLiteMetaFile} + * , automatically tries to copy -wal and -shm files without needing to know + * their existence. + * + * @param sqliteFile file which has -wal and -shm meta files + * + * @throws NoCurrentCaseException Case has been closed. + * @throws TskCoreException fileManager cannot find AbstractFile + * files. + * @throws IOException Issue during writing to file. + */ + public static void findAndCopySQLiteMetaFile(AbstractFile sqliteFile) + throws NoCurrentCaseException, TskCoreException, IOException { + + findAndCopySQLiteMetaFile(sqliteFile, sqliteFile.getName() + "-wal"); + findAndCopySQLiteMetaFile(sqliteFile, sqliteFile.getName() + "-shm"); + } + + /** + * Searches for a meta file associated with the give SQLite database. If + * found, it copies this file into the temp directory of the current case. + * + * @param sqliteFile file being processed + * @param metaFileName name of meta file to look for + * + * @throws NoCurrentCaseException Case has been closed. + * @throws TskCoreException fileManager cannot find AbstractFile + * files. + * @throws IOException Issue during writing to file. + */ + public static void findAndCopySQLiteMetaFile(AbstractFile sqliteFile, + String metaFileName) throws NoCurrentCaseException, TskCoreException, IOException { + + Case openCase = Case.getCurrentCaseThrows(); + SleuthkitCase sleuthkitCase = openCase.getSleuthkitCase(); + Services services = new Services(sleuthkitCase); + FileManager fileManager = services.getFileManager(); + + List metaFiles = fileManager.findFiles( + sqliteFile.getDataSource(), metaFileName, + sqliteFile.getParent().getName()); + + if (metaFiles != null) { + for (AbstractFile metaFile : metaFiles) { + writeAbstractFileToLocalDisk(metaFile); + } + } + } + + /** + * Copies the file contents into a unique path in the current case temp + * directory. + * + * @param file AbstractFile from the data source + * + * @return The path of the file on disk + * + * @throws IOException Exception writing file contents + * @throws NoCurrentCaseException Current case closed during file copying + */ + public static String writeAbstractFileToLocalDisk(AbstractFile file) + throws IOException, NoCurrentCaseException { + + String localDiskPath = getUniqueTempDirectoryPath(file); + File localDatabaseFile = new File(localDiskPath); + if (!localDatabaseFile.exists()) { + ContentUtils.writeToFile(file, localDatabaseFile); + } + return localDiskPath; + } + + /** + * Generates a unique local disk path that resides in the temp directory of + * the current case. + * + * @param file The database abstract file + * + * @return Unique local disk path living in the temp directory of the case + * + * @throws org.sleuthkit.autopsy.casemodule.NoCurrentCaseException + */ + public static String getUniqueTempDirectoryPath(AbstractFile file) throws NoCurrentCaseException { + return Case.getCurrentCaseThrows().getTempDirectory() + + File.separator + file.getId() + file.getName(); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/tabulardatareader/AbstractReader.java b/Core/src/org/sleuthkit/autopsy/tabulardatareader/AbstractReader.java deleted file mode 100755 index 69e37cd825..0000000000 --- a/Core/src/org/sleuthkit/autopsy/tabulardatareader/AbstractReader.java +++ /dev/null @@ -1,185 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2018-2018 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.tabulardatareader; - -import java.io.File; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; -import org.sleuthkit.autopsy.datamodel.ContentUtils; -import org.sleuthkit.datamodel.Content; -import org.sleuthkit.datamodel.TskCoreException; - -/** - * An abstract reader interface for retrieving contents from files via a common - * API. - */ -public abstract class AbstractReader implements AutoCloseable { - - private final String localDiskPath; - - public AbstractReader(Content file) - throws FileReaderInitException { - - try { - localDiskPath = getLocalDiskPath(file); - writeDataSourceToLocalDisk(file); - } catch (FileReaderInitException ex) { - throw new FileReaderInitException(ex); - } - } - - /** - * Copies the data source file contents to local drive for processing. - * This function is common to all readers. - * - * @param file AbstractFile from the data source - * @param localDiskPath Local drive path to copy AbstractFile contents - * @throws IOException Exception writing file contents - * @throws NoCurrentCaseException Current case closed during file copying - * @throws TskCoreException Exception finding files from abstract file - */ - private void writeDataSourceToLocalDisk(Content file) - throws FileReaderInitException { - - try { - File localDatabaseFile = new File(localDiskPath); - if (!localDatabaseFile.exists()) { - ContentUtils.writeToFile(file, localDatabaseFile); - } - } catch (IOException ex) { - throw new FileReaderInitException(ex); - } - } - - public String getLocalDiskPath() { - return localDiskPath; - } - - /** - * Generates a local disk path for abstract file contents to be copied. All - * file sources must be copied to local disk to be opened by abstract - * reader. - * - * @param file The database abstract file - * - * @return Valid local path for copying - * @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException - * - */ - private String getLocalDiskPath(Content file) throws FileReaderInitException { - try { - return Case.getCurrentCaseThrows().getTempDirectory() - + File.separator + file.getId() + file.getName(); - } catch(NoCurrentCaseException ex) { - throw new FileReaderInitException("No current case open when trying to get temp directory", ex); - } - } - - /** - * Return the a mapping of table names to table schemas (may be in the form of - * headers or create table statements for databases). - * - * @return Mapping of table names to schemas - * @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException - */ - public abstract Map getTableSchemas() throws FileReaderException; - - /** - * Returns the row count fo the given table name. - * - * @param tableName - * @return number of rows in the current table - * @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException - */ - public abstract Integer getRowCountFromTable(String tableName) throws FileReaderException; - - /** - * Returns a collection view of the rows in a table. - * - * @param tableName - * @return List view of the rows in the table - * @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException - */ - public abstract List> getRowsFromTable(String tableName) throws FileReaderException; - - /** - * Returns a map of column names to a list of column values. - * - * @param tableName - * @return A map of column names to a list of column values - * @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException - */ - public abstract Map> getColumnsFromTable(String tableName) throws FileReaderException; - - /** - * Returns a window of rows starting at the offset and ending when the number of rows read - * equals the 'numRowsToRead' parameter or there is nothing left to read. - * - * @param tableName table name to be read from - * @param offset start index to begin reading - * @param numRowsToRead number of rows to read past offset - * @return List view of the rows in the table - * @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException - */ - public abstract List> getRowsFromTable(String tableName, - int offset, int numRowsToRead) throws FileReaderException; - - @Override - public abstract void close(); - - /** - * Checked exceptions are specific to a given implementation, so this custom - * exception allows for a common interface to accommodate all of them. Init - * exception allows for more flexibility in logging. - */ - public static class FileReaderInitException extends Exception { - public FileReaderInitException(String message, Throwable cause) { - super(message, cause); - } - - public FileReaderInitException(Throwable cause) { - super(cause); - } - - public FileReaderInitException(String message) { - super(message); - } - } - - /** - * Checked exceptions are specific to a given implementation, so this custom - * exception allows for a common interface to accommodate all of them. - */ - public class FileReaderException extends Exception { - public FileReaderException(String message, Throwable cause) { - super(message, cause); - } - - public FileReaderException(Throwable cause) { - super(cause); - } - - public FileReaderException(String message) { - super(message); - } - } -} \ No newline at end of file diff --git a/Core/src/org/sleuthkit/autopsy/tabulardatareader/ExcelReader.java b/Core/src/org/sleuthkit/autopsy/tabulardatareader/ExcelReader.java deleted file mode 100755 index d9b4a124f5..0000000000 --- a/Core/src/org/sleuthkit/autopsy/tabulardatareader/ExcelReader.java +++ /dev/null @@ -1,313 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2018-2018 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.tabulardatareader; - -import static com.google.common.collect.Lists.newArrayList; -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.logging.Level; -import org.apache.commons.lang3.StringUtils; -import org.apache.poi.hssf.usermodel.HSSFWorkbook; -import org.apache.poi.ss.usermodel.Cell; -import org.apache.poi.ss.usermodel.DateUtil; -import org.apache.poi.ss.usermodel.Row; -import org.apache.poi.ss.usermodel.Sheet; -import org.apache.poi.ss.usermodel.Workbook; -import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.ingest.IngestServices; -import com.monitorjbl.xlsx.StreamingReader; -import org.apache.poi.hssf.OldExcelFormatException; -import org.sleuthkit.datamodel.AbstractFile; - -/** - * Reads excel files and implements the abstract reader api for interfacing with - * the content. Supports .xls and .xlsx files. - */ -public final class ExcelReader extends AbstractReader { - - private final static IngestServices services = IngestServices.getInstance(); - private final static Logger logger = services.getLogger(ExcelReader.class.getName()); - - private Workbook workbook; - private final static String XLSX_MIME_TYPE = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"; - private final static String XLS_MIME_TYPE = "application/vnd.ms-excel"; - private final static String EMPTY_CELL_STRING = ""; - - private String LOCAL_DISK_PATH; - private String ACTIVE_MIME_TYPE; - - public ExcelReader(AbstractFile file, String mimeType) - throws FileReaderInitException { - super(file); - this.LOCAL_DISK_PATH = super.getLocalDiskPath(); - this.ACTIVE_MIME_TYPE = mimeType; - - try { - this.workbook = createWorkbook(); - } catch (IOException ex) { - throw new FileReaderInitException(ex); - } - } - - /** - * Internal factory for creating the correct workbook given the mime type. - * The file reader factory in this module passes both the XLSMimeType and - * XLSXMimeType into this constructor for the reader to handle. This avoided - * the need for creating an AbstractExcelReader class and two sub classes - * overriding the workbook field. Additionally, I don't forsee needing to - * support more than these two mime types. - * - * - * @return The corrent workbook instance - * - * @throws IOException Issue with input stream and opening file - * location at localDiskPath - * @throws FileReaderInitException mimetype unsupported - */ - private Workbook createWorkbook() throws - IOException, FileReaderInitException { - switch (ACTIVE_MIME_TYPE) { - case XLS_MIME_TYPE: - try { - //Apache POI only supports BIFF8 format, anything below is considered - //old excel format and is not a concern for us. - return new HSSFWorkbook(new FileInputStream(new File(LOCAL_DISK_PATH))); - } catch (OldExcelFormatException e) { - throw new FileReaderInitException(e); - } - case XLSX_MIME_TYPE: - //StreamingReader is part of the xlsx streamer dependency that creates - //a streaming version of XSSFWorkbook for reading (SXSSFWorkbook is only for writing - //large workbooks, not reading). This libary provides a workbook interface - //that is mostly identical to the poi workbook api, hence both the HSSFWorkbook - //and this can use the same functions below. - return StreamingReader.builder().rowCacheSize(500).open(new File(LOCAL_DISK_PATH)); - default: - throw new FileReaderInitException(String.format("Excel reader for mime " - + "type [%s] is not supported", ACTIVE_MIME_TYPE)); - } - } - - /** - * Returns the number of rows in a given excel table (aka sheet). - * - * @param tableName Name of table to count total rows from - * - * @return row count for requested table name - * - * @throws - * org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException - */ - @Override - public Integer getRowCountFromTable(String tableName) throws FileReaderException { - return workbook.getSheet(tableName).getLastRowNum(); - } - - /** - * Returns a collection of all the rows from a given table in an excel - * document. - * - * @param tableName Current sheet name being read - * - * @return A collection of row maps - * - * @throws - * org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException - */ - @Override - public List> getRowsFromTable(String tableName) throws FileReaderException { - //StreamingReader maintains the same pointer to a sheet rowIterator, so this - //call returns an iterator that could have already been iterated on instead - //of a fresh copy. We must cache the header value from the call to - //getTableSchemas as important information in the first row could have been - //missed. - Iterator sheetIter = workbook.getSheet(tableName).rowIterator(); - List> rowList = new ArrayList<>(); - - while (sheetIter.hasNext()) { - Row currRow = sheetIter.next(); - rowList.add(getRowMap(currRow)); - } - - //Reset the streaming reader for xlsx, so that there is a fresh iterator - //on each sheet. That way each call to this function returns all the results. - resetStreamingReader(); - - return rowList; - } - - /** - * Returns a map of column numbers to a list of column values. - * - * @param tableName - * - * @return - * - * @throws - * org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException - */ - @Override - public Map> getColumnsFromTable(String tableName) throws FileReaderException { - Map> columnViewOfSheet = new HashMap<>(); - - Iterator sheetIter = workbook.getSheet(tableName).rowIterator(); - - while (sheetIter.hasNext()) { - Row row = sheetIter.next(); - for (Cell cell : row) { - String index = String.valueOf(cell.getColumnIndex()); - if (columnViewOfSheet.containsKey(index)) { - columnViewOfSheet.get(index).add(getCellValue(cell)); - } else { - columnViewOfSheet.put(index, newArrayList(getCellValue(cell))); - } - } - } - - //Reset the streaming reader for xlsx, so that there is a fresh iterator - //on each sheet. That way each call to this function returns all the results. - resetStreamingReader(); - - return columnViewOfSheet; - } - - /** - * Currently not supported. Returns a window of rows starting at the offset - * and ending when the number of rows read equals the 'numRowsToRead' - * parameter or the iterator has nothing left to read. - * - * For instance: offset 1, numRowsToRead 5 would return 5 results (1-5). - * offset 0, numRowsToRead 5 would return 5 results (0-4). - * - * @param tableName Current name of sheet to be read - * @param offset start index to begin reading (documents are 0 - * indexed) - * @param numRowsToRead number of rows to read - * - * @return - * - * @throws - * org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException - */ - @Override - public List> getRowsFromTable(String tableName, - int offset, int numRowsToRead) throws FileReaderException { - throw new FileReaderException("Operation Not Supported."); - } - - private Map getRowMap(Row row) { - Map rowMap = new HashMap<>(); - for (Cell cell : row) { - Object value = getCellValue(cell); - rowMap.put(String.valueOf(cell.getColumnIndex()), value); - } - return rowMap; - } - - /** - * Returns the value of a given cell. The correct value function must be - * called on a cell depending on its type, hence the switch. - * - * @param cell Cell object containing a getter function for its value type - * - * @return A generic object pointer to the cell's value - */ - private Object getCellValue(Cell cell) { - switch (cell.getCellTypeEnum()) { - case BOOLEAN: - return cell.getBooleanCellValue(); - case STRING: - return cell.getStringCellValue(); - case NUMERIC: - if (DateUtil.isCellDateFormatted(cell)) { - return cell.getDateCellValue(); - } else { - return cell.getNumericCellValue(); - } - case FORMULA: - return cell.getCellFormula(); - default: - //Cell must be empty at this branch - return EMPTY_CELL_STRING; - } - } - - /** - * Returns a map of sheet names to headers (header is in a comma-seperated - * string). Warning: Only call this ONCE per excel file. - * - * @return A map of sheet names to header strings. - * - * @throws - * org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException - */ - @Override - public Map getTableSchemas() throws FileReaderException { - Map tableSchemas = new HashMap<>(); - for (Sheet sheet : workbook) { - Iterator iterator = sheet.rowIterator(); - if (iterator.hasNext()) { - //Consume header - Row header = iterator.next(); - String headerStringFormat = StringUtils.join(header.cellIterator(), ", "); - tableSchemas.put(sheet.getSheetName(), headerStringFormat); - } - } - - //Reset the streaming reader for xlsx, so that there is a fresh iterator - //on each sheet. That way each call to this function returns all the results. - resetStreamingReader(); - - return tableSchemas; - } - - /** - * Resets the streaming reader so that the iterator starts at the start of each - * sheet. Matches functionality provided by apache POI. - * - * @throws - * org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException - */ - public void resetStreamingReader() throws FileReaderException { - if (ACTIVE_MIME_TYPE.equals(XLSX_MIME_TYPE)) { - try { - this.workbook = createWorkbook(); - } catch (IOException | FileReaderInitException ex) { - throw new FileReaderException("Could not reset streaming iterator", ex); - } - } - } - - @Override - public void close() { - try { - workbook.close(); - } catch (IOException ex) { - //Non-essential exception, user has no need for the connection - //object at this stage so closing details are not important - logger.log(Level.WARNING, "Could not close excel file input stream", ex); - } - } -} diff --git a/Core/src/org/sleuthkit/autopsy/tabulardatareader/FileReaderFactory.java b/Core/src/org/sleuthkit/autopsy/tabulardatareader/FileReaderFactory.java deleted file mode 100755 index 32625d17d9..0000000000 --- a/Core/src/org/sleuthkit/autopsy/tabulardatareader/FileReaderFactory.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2018-2018 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.tabulardatareader; - -import org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException; -import org.sleuthkit.datamodel.AbstractFile; - -/** - * Factory for creating the correct reader given the mime type of a file. - */ -public final class FileReaderFactory { - - private FileReaderFactory() { - } - - /** - * Instantiates the appropriate reader given the mimeType argument. - * Currently supports SQLite files and Excel files (.xls and .xlsx). BIFF5 - * format of .xls is not supported. - * - * @param mimeType mimeType passed in from the ingest module g * @param file - * current file under inspection - * - * @param file Content file to be copied into - * - * @return The correct reader class needed to read the file contents - * - * @throws - * org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException - */ - public static AbstractReader createReader(AbstractFile file, String mimeType) throws FileReaderInitException { - switch (mimeType) { - case "application/x-sqlite3": - return new SQLiteReader(file); - case "application/vnd.ms-excel": - case "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet": - try { - return new ExcelReader(file, mimeType); - //Catches runtime exceptions being emitted from Apache - //POI (such as EncryptedDocumentException) and wraps them - //into FileReaderInitException to be caught and logged - //in the ingest module. - } catch (Exception poiInitException) { - throw new FileReaderInitException(poiInitException); - } - default: - throw new FileReaderInitException(String.format("Reader for mime " - + "type [%s] is not supported", mimeType)); - } - } -} diff --git a/Core/src/org/sleuthkit/autopsy/tabulardatareader/SQLiteReader.java b/Core/src/org/sleuthkit/autopsy/tabulardatareader/SQLiteReader.java deleted file mode 100755 index 2acd5e1d00..0000000000 --- a/Core/src/org/sleuthkit/autopsy/tabulardatareader/SQLiteReader.java +++ /dev/null @@ -1,360 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2018-2018 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.tabulardatareader; - -import java.io.File; -import java.io.IOException; -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.ResultSet; -import java.sql.ResultSetMetaData; -import java.sql.SQLException; -import java.sql.Statement; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.TreeMap; -import java.util.logging.Level; -import org.openide.util.NbBundle; -import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; -import org.sleuthkit.autopsy.casemodule.services.FileManager; -import org.sleuthkit.autopsy.casemodule.services.Services; -import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.datamodel.ContentUtils; -import org.sleuthkit.autopsy.ingest.IngestServices; -import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.Content; -import org.sleuthkit.datamodel.SleuthkitCase; -import org.sleuthkit.datamodel.TskCoreException; - -/** - * Reads sqlite databases and returns results in a list collection. - */ -public final class SQLiteReader extends AbstractReader { - - private final Connection connection; - private final static IngestServices ingestServices = IngestServices.getInstance(); - private final static Logger logger = ingestServices.getLogger(SQLiteReader.class.getName()); - - /** - * Writes data source file contents to local disk and opens a sqlite JDBC - * connection. - * - * @param sqliteDbFile Data source abstract file - * @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException - */ - public SQLiteReader(AbstractFile sqliteDbFile) throws FileReaderInitException { - super(sqliteDbFile); - try { - final String localDiskPath = super.getLocalDiskPath(); - // Look for any meta files associated with this DB - WAL, SHM, etc. - findAndCopySQLiteMetaFile(sqliteDbFile, sqliteDbFile.getName() + "-wal"); - findAndCopySQLiteMetaFile(sqliteDbFile, sqliteDbFile.getName() + "-shm"); - - connection = getDatabaseConnection(localDiskPath); - } catch (ClassNotFoundException | SQLException |IOException | - NoCurrentCaseException | TskCoreException ex) { - throw new FileReaderInitException(ex); - } - } - - /** - * Searches for a meta file associated with the give SQLite database. If - * found, copies the file to the local disk folder - * - * @param sqliteFile file being processed - * @param metaFileName name of meta file to look for - * - * @throws NoCurrentCaseException Case has been closed. - * @throws TskCoreException fileManager cannot find AbstractFile - * files. - * @throws IOException Issue during writing to file. - */ - private void findAndCopySQLiteMetaFile(Content sqliteFile, - String metaFileName) throws NoCurrentCaseException, TskCoreException, IOException { - - Case openCase = Case.getCurrentCaseThrows(); - SleuthkitCase sleuthkitCase = openCase.getSleuthkitCase(); - Services services = new Services(sleuthkitCase); - FileManager fileManager = services.getFileManager(); - - List metaFiles = fileManager.findFiles( - sqliteFile.getDataSource(), metaFileName, - sqliteFile.getParent().getName()); - - if (metaFiles != null) { - for (AbstractFile metaFile : metaFiles) { - String tmpMetafilePathName = openCase.getTempDirectory() - + File.separator + metaFile.getId() + metaFile.getName(); - File tmpMetafile = new File(tmpMetafilePathName); - ContentUtils.writeToFile(metaFile, tmpMetafile); - } - } - } - - /** - * Opens a JDBC connection to the sqlite database specified by the path - * parameter. - * - * @param databasePath Local path of sqlite database - * - * @return Connection JDBC connection, to be maintained and closed by the - * reader - * - * @throws ClassNotFoundException missing SQLite JDBC class - * @throws SQLException Exception during opening database - * connection - */ - private Connection getDatabaseConnection(String databasePath) - throws ClassNotFoundException, SQLException { - - // Load the SQLite JDBC driver, if necessary. - Class.forName("org.sqlite.JDBC"); //NON-NLS - return DriverManager.getConnection( - "jdbc:sqlite:" + databasePath); //NON-NLS - } - - /** - * Retrieves a map view of table names to table schemas (in the form of - * CREATE TABLE statments). - * - * @return A map of table names to table schemas - * - * @throws - * org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException - */ - @Override - public Map getTableSchemas() throws FileReaderException { - - Map dbTablesMap = new TreeMap<>(); - - try (Statement statement = connection.createStatement(); - ResultSet resultSet = statement.executeQuery( - "SELECT name, sql FROM sqlite_master " //NON-NLS - + " WHERE type= 'table' " //NON-NLS - + " ORDER BY name;")) { //NON-NLS - - while (resultSet.next()) { - String tableName = resultSet.getString("name"); //NON-NLS - String tableSQL = resultSet.getString("sql"); //NON-NLS - dbTablesMap.put(tableName, tableSQL); - } - - } catch (SQLException ex) { - throw new FileReaderException(ex); - } - - return dbTablesMap; - } - - /** - * Retrieves the total number of rows from a table in the SQLite database. - * - * @param tableName - * - * @return Row count from tableName - * - * @throws - * org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException - */ - @Override - public Integer getRowCountFromTable(String tableName) - throws FileReaderException { - String quotedTableName = wrapTableNameStringWithQuotes(tableName); - try (Statement statement = connection.createStatement(); - ResultSet resultSet = statement.executeQuery( - "SELECT count (*) as count FROM " + quotedTableName)) { //NON-NLS - return resultSet.getInt("count"); //NON-NLS - } catch (SQLException ex) { - throw new FileReaderException(ex); - } - } - - /** - * Retrieves all rows from a given table in the SQLite database. If only a - * subset of rows are desired, see the overloaded function below. - * - * @param tableName - * - * @return List of rows, where each row is represented as a column-value - * map. - * - * @throws - * org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException - */ - @Override - public List> getRowsFromTable(String tableName) - throws FileReaderException { - //This method does not directly call its overloaded counterpart - //since the second parameter would need to be retreived from a call to - //getTableRowCount(). - String quotedTableName = wrapTableNameStringWithQuotes(tableName); - try (Statement statement = connection.createStatement(); - ResultSet resultSet = statement.executeQuery( - "SELECT * FROM " + quotedTableName)) { //NON-NLS - return resultSetToList(resultSet); - } catch (SQLException ex) { - throw new FileReaderException(ex); - } - } - - /** - * Retrieves a subset of the rows from a given table in the SQLite database. - * - * @param tableName - * @param offset Desired start index (rows begin at 1) - * @param numRowsToRead Number of rows past the start index - * - * @return List of rows, where each row is represented as a column-value - * map. - * - * @throws - * org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException - */ - @Override - public List> getRowsFromTable(String tableName, - int offset, int numRowsToRead) throws FileReaderException { - String quotedTableName = wrapTableNameStringWithQuotes(tableName); - try (Statement statement = connection.createStatement(); - ResultSet resultSet = statement.executeQuery( - "SELECT * FROM " + quotedTableName //NON-NLS - + " LIMIT " + Integer.toString(numRowsToRead) //NON-NLS - + " OFFSET " + Integer.toString(offset - 1))) { //NON-NLS - return resultSetToList(resultSet); - } catch (SQLException ex) { - throw new FileReaderException(ex); - } - } - - /** - * Wraps table name with quotation marks in case table name contains spaces. - * sqliteJDBC cannot read table names with spaces in them unless surrounded - * by quotation marks. - * - * @param tableName - * - * @return Input name: Result Table -> "Result Table" - */ - private String wrapTableNameStringWithQuotes(String tableName) { - return "\"" + tableName + "\""; - } - - /** - * Converts a ResultSet (row results from a table read) into a list. - * - * @param resultSet row results from a table read - * - * @return List of rows, where each row is represented as a column-value - * map. - * - * @throws SQLException occurs if ResultSet is closed while attempting to - * access it's data. - */ - @NbBundle.Messages("SQLiteReader.BlobNotShown.message=BLOB Data not shown") - private List> resultSetToList(ResultSet resultSet) throws SQLException { - - ResultSetMetaData metaData = resultSet.getMetaData(); - int columns = metaData.getColumnCount(); - List> rowMap = new ArrayList<>(); - while (resultSet.next()) { - Map row = new LinkedHashMap<>(columns); - for (int i = 1; i <= columns; ++i) { - if (resultSet.getObject(i) == null) { - row.put(metaData.getColumnName(i), ""); - } else { - if (metaData.getColumnTypeName(i).compareToIgnoreCase("blob") == 0) { - row.put(metaData.getColumnName(i), Bundle.SQLiteReader_BlobNotShown_message()); - } else { - row.put(metaData.getColumnName(i), resultSet.getObject(i)); - } - } - } - rowMap.add(row); - } - - return rowMap; - } - - /** - * Returns a column view of the table. Maps the column name to a list of - * that column's values. - * - * @param tableName - * - * @return - * - * @throws - * org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException - */ - @Override - public Map> getColumnsFromTable(String tableName) - throws FileReaderException { - - String quotedTableName = wrapTableNameStringWithQuotes(tableName); - try (Statement statement = connection.createStatement(); - ResultSet resultSet = statement.executeQuery( - "SELECT * FROM " + quotedTableName)) { //NON-NLS - - Map> columnView = new HashMap<>(); - ResultSetMetaData metaData = resultSet.getMetaData(); - int columns = metaData.getColumnCount(); - for (int i = 1; i <= columns; i++) { - columnView.put(metaData.getColumnName(i), new LinkedList<>()); - } - - while (resultSet.next()) { - for (int i = 1; i <= columns; i++) { - if (resultSet.getObject(i) == null) { - columnView.get(metaData.getColumnName(i)).add(""); - } else { - if (metaData.getColumnTypeName(i).compareToIgnoreCase("blob") == 0) { - columnView.get(metaData.getColumnName(i)).add( - Bundle.SQLiteReader_BlobNotShown_message()); - } else { - columnView.get(metaData.getColumnName(i)).add( - resultSet.getObject(i)); - } - } - } - } - - return columnView; - } catch (SQLException ex) { - throw new FileReaderException(ex); - } - } - - /** - * Closes underlying JDBC connection. - */ - @Override - public void close() { - try { - connection.close(); - } catch (SQLException ex) { - //Non-essential exception, user has no need for the connection - //object at this stage so closing details are not important - logger.log(Level.WARNING, "Could not close JDBC connection", ex); - } - } -} diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java index 17800d5c06..99516521f7 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java @@ -21,31 +21,30 @@ package org.sleuthkit.autopsy.keywordsearch; import com.google.common.io.CharSource; import java.io.IOException; import java.io.Reader; -import java.util.ArrayList; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Statement; import java.util.Collection; import java.util.Iterator; import java.util.LinkedList; -import java.util.List; -import java.util.Map; import java.util.logging.Level; -import javax.swing.text.Segment; +import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.tabulardatareader.AbstractReader; -import org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException; +import org.sleuthkit.autopsy.coreutils.SqliteUtil; import org.sleuthkit.datamodel.Content; -import org.apache.commons.lang3.StringUtils; -import org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException; -import org.sleuthkit.autopsy.tabulardatareader.FileReaderFactory; import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.TskCoreException; /** * Dedicated SqliteTextExtractor to solve the problems associated with Tika's * Sqlite parser. * - * Tika problems: - * 1) Tika fails to open virtual tables - * 2) Tika fails to open tables with spaces in table name - * 3) Tika fails to include the table names in output (except for the first table it parses) + * Tika problems: 1) Tika fails to open virtual tables 2) Tika fails to open + * tables with spaces in table name 3) Tika fails to include the table names in + * output (except for the first table it parses) */ class SqliteTextExtractor extends ContentTextExtractor { @@ -93,126 +92,203 @@ class SqliteTextExtractor extends ContentTextExtractor { */ @Override public Reader getReader(Content source) throws TextExtractorException { - //Firewall for any content that is not an AbstractFile - if (!AbstractFile.class.isInstance(source)) { - try { + try { + //Firewall for any content that is not an AbstractFile + if (!AbstractFile.class.isInstance(source)) { return CharSource.wrap(EMPTY_CHARACTER_SEQUENCE).openStream(); - } catch (IOException ex) { - throw new TextExtractorException( - String.format("Encountered an issue wrapping blank string" //NON-NLS - + " with CharSource for non-abstract file with id: [%s]," //NON-NLS - + " name: [%s].", source.getId(), source.getName()), ex); //NON-NLS } - } - - try (AbstractReader reader = FileReaderFactory.createReader( - (AbstractFile) source, SQLITE_MIMETYPE)) { - final CharSequence databaseContent = getDatabaseContents(source, reader); - //CharSource will maintain unicode strings correctly - return CharSource.wrap(databaseContent).openStream(); - } catch (FileReaderInitException | IOException ex) { + return new SQLiteTableReader((AbstractFile) source); + } catch (NoCurrentCaseException | IOException | TskCoreException + | ClassNotFoundException | SQLException ex) { throw new TextExtractorException( - String.format("Encountered a FileReaderInitException" //NON-NLS - + " when trying to initialize a SQLiteReader" //NON-NLS - + " for AbstractFile with id: [%s], name: [%s].", //NON-NLS - source.getId(), source.getName()), ex); - } catch (FileReaderException ex) { - throw new TextExtractorException( - String.format("Could not get contents from database " //NON-NLS - + "tables for AbstractFile with id [%s], name: [%s].", //NON-NLS - source.getId(), source.getName()), ex); + String.format("Encountered an issue while trying to initialize " //NON-NLS + + "a sqlite table steamer for abstract file with id: [%s], name: " //NON-NLS + + "[%s].", source.getId(), source.getName()), ex); //NON-NLS } } /** - * Queries the sqlite database and adds all tables and rows to a - * TableBuilder, which formats the strings into a table view for clean - * results while searching for keywords in the application. - * - * @param reader Sqlite reader for the content source - * @param source Sqlite file source + * Wraps each table in a reader as the tables are streamed one at a time + * from the database. */ - private CharSequence getDatabaseContents(Content source, AbstractReader reader) throws FileReaderException { - Collection databaseStorage = new LinkedList<>(); + private class SQLiteTableReader extends Reader { - Integer charactersCopied = loadDatabaseIntoCollection(databaseStorage, - reader, source); + private final Iterator tableIterator; + private final Connection connection; + private Reader currentTableReader; + private final AbstractFile source; - return toCharSequence(databaseStorage, charactersCopied); - } + /** + * Creates a reader that streams each table into memory and wraps a + * reader around it. Designed to save memory for large databases. + * + * @param file Sqlite database file + * + * @throws NoCurrentCaseException Current case has closed + * @throws IOException Exception copying abstract file over + * to local temp directory + * @throws TskCoreException Exception using file manager to find + * meta files + * @throws ClassNotFoundException Could not find sqlite JDBC class + * @throws SQLException Could not establish jdbc connection + */ + public SQLiteTableReader(AbstractFile file) throws NoCurrentCaseException, + IOException, TskCoreException, ClassNotFoundException, SQLException { + source = file; - /** - * Iterates all of the tables and populate the TableBuilder with all of the - * rows from the table. The table string will be added to the list of - * contents. - * - * @param databaseStorage Collection containing all of the database content - * @param tables A map of table names to table schemas - * @param reader SqliteReader for interfacing with the database - * @param source Source database file for logging - */ - private int loadDatabaseIntoCollection(Collection databaseStorage, - AbstractReader reader, Content source) throws FileReaderException { - //Will throw a FileReaderException if table schemas are unattainable - Map tables = reader.getTableSchemas(); + String localDiskPath = SqliteUtil.writeAbstractFileToLocalDisk(file); + SqliteUtil.findAndCopySQLiteMetaFile(file); + Class.forName("org.sqlite.JDBC"); //NON-NLS + connection = DriverManager.getConnection("jdbc:sqlite:" + localDiskPath); //NON-NLS + tableIterator = getTables().iterator(); + } - int charactersCopied = 0; - for (String tableName : tables.keySet()) { - TableBuilder tableBuilder = new TableBuilder(); - tableBuilder.setTableName(tableName); - - try { - //Catch any exception at a particular table, we want to ensure we grab - //content from as many tables as possible - List> rowsInTable = reader.getRowsFromTable(tableName); - if (!rowsInTable.isEmpty()) { - tableBuilder.addHeader(new ArrayList<>(rowsInTable.get(0).keySet())); - for (Map row : rowsInTable) { - tableBuilder.addRow(row.values()); - } + /** + * Gets the table names from the SQLite database file. + * + * @return Collection of table names from the database schema + */ + private Collection getTables() throws SQLException { + Collection tableNames = new LinkedList<>(); + try (Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery( + "SELECT name FROM sqlite_master " + + " WHERE type= 'table' ")) { + while (resultSet.next()) { + tableNames.add(resultSet.getString("name")); //NON-NLS } - } catch (FileReaderException ex) { + } + return tableNames; + } + + /** + * Reads from the database table and loads in the contents to a table + * builder, so that its properly formatted during indexing. + * + * @param tableName Database table to be read + */ + private String getTableAsString(String tableName) { + TableBuilder table = new TableBuilder(); + table.addTableName(tableName); + String quotedTableName = "\"" + tableName + "\""; + + try (Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery( + "SELECT * FROM " + quotedTableName)) { //NON-NLS + ResultSetMetaData metaData = resultSet.getMetaData(); + int columnCount = resultSet.getMetaData().getColumnCount(); + Collection row = new LinkedList<>(); + + //Add column names once from metadata + for (int i = 1; i < columnCount; i++) { + row.add(metaData.getColumnName(i)); + } + + table.addHeader(row); + while (resultSet.next()) { + row = new LinkedList<>(); + for (int i = 1; i <= columnCount; i++) { + Object result = resultSet.getObject(i); + String type = metaData.getColumnTypeName(i); + if (isValuableResult(result, type)) { + row.add(resultSet.getObject(i).toString()); + } + } + table.addRow(row); + } + table.addCell("\n"); + } catch (SQLException ex) { logger.log(Level.WARNING, String.format( "Error attempting to read file table: [%s]" //NON-NLS + " for file: [%s] (id=%d).", tableName, //NON-NLS source.getName(), source.getId()), ex); } - String formattedTable = tableBuilder.toString(); - charactersCopied += formattedTable.length(); - databaseStorage.add(formattedTable); + return table.toString(); } - return charactersCopied; - } - - - /** - * Copy elements from collection (which contains formatted database tables) - * into a CharSequence so that it can be wrapped and used by the Google CharSource - * lib. - * - * @param databaseStorage Collection containing database contents - * @param characterCount Number of characters needed to be allocated in the buffer - * so that all of the contents in the collection can be copied over. - * - * @return CharSource of the formatted database contents - */ - private CharSequence toCharSequence(Collection databaseStorage, - int characterCount) { - - final char[] databaseCharArray = new char[characterCount]; - - int currIndex = 0; - for (String table : databaseStorage) { - System.arraycopy(table.toCharArray(), 0, databaseCharArray, - currIndex, table.length()); - currIndex += table.length(); + + /** + * Determines if the object result from the result set is worth addign to + * the row or not. Ignores nulls and blobs for the time being. + * + * @param result Object result retrieved from resultSet + * @param type Type of objet retrieved from resultSet + * @return boolean where true means valuable, false implies it can be skipped. + */ + private boolean isValuableResult(Object result, String type) { + //Ignore nulls and blobs + return result != null && type.compareToIgnoreCase("blob") != 0; + } + + /** + * Loads a database file into the character buffer until there are not + * more contents to read. The underlying implementation here only loads + * one table at a time, to conserve memory. + * + * @param cbuf Buffer to copy database content characters into + * @param off offset to begin loading in buffer + * @param len length of the buffer + * + * @return The number of characters read from the reader + * + * @throws IOException If there is an error with the CharSource wrapping + */ + @Override + public int read(char[] cbuf, int off, int len) throws IOException { + if (currentTableReader == null) { + String tableResults = getNextTable(); + if (tableResults == null) { + return -1; + } + currentTableReader = CharSource.wrap(tableResults).openStream(); + } + + int charactersRead = currentTableReader.read(cbuf, off, len); + while (charactersRead == -1) { + String tableResults = getNextTable(); + if (tableResults == null) { + return -1; + } + currentTableReader = CharSource.wrap(tableResults).openStream(); + charactersRead = currentTableReader.read(cbuf, off, len); + } + + return charactersRead; + } + + /** + * Grab the next table name from the collection of all table names, once + * we no longer have a table to process, return null which will be + * understoon to mean the end of parsing. + * + * @return String of current table contents or null if not more tables + * to read + */ + private String getNextTable() { + if (tableIterator.hasNext()) { + return getTableAsString(tableIterator.next()); + } else { + return null; + } + } + + /** + * Close the underlying connection to the database. + * + * @throws IOException Not applicable, we can just catch the + * SQLException + */ + @Override + public void close() throws IOException { + try { + connection.close(); + } catch (SQLException ex) { + //Non-essential exception, user has no need for the connection + //object at this stage so closing details are not important + logger.log(Level.WARNING, "Could not close JDBC connection", ex); + } } - //Segment class does not make an internal copy of the character array - //being passed in (more efficient). It also implements a CharSequences - //necessary for the CharSource class to create a compatible reader. - return new Segment(databaseCharArray, 0, characterCount); } /** @@ -221,41 +297,20 @@ class SqliteTextExtractor extends ContentTextExtractor { */ private class TableBuilder { - private final List rows = new LinkedList<>(); - private Integer charactersAdded = 0; - - //Formatters - private static final String HORIZONTAL_DELIMITER = "-"; - private static final String VERTICAL_DELIMITER = "|"; - private static final String HEADER_CORNER = "+"; + private final StringBuilder table = new StringBuilder(); private static final String TAB = "\t"; private static final String NEW_LINE = "\n"; private static final String SPACE = " "; - //Number of escape sequences in the header row - private static final int ESCAPE_SEQUENCES = 4; - - private String tableName = ""; - /** * Add the section to the top left corner of the table. This is where * the name of the table should go. * * @param tableName Table name */ - public void setTableName(String tableName) { - this.tableName = tableName + NEW_LINE + NEW_LINE; - } - - /** - * Creates a border given the length param. - * - * @return Ex: \t+----------------------+\n - */ - private String createBorder(int length) { - return TAB + HEADER_CORNER + StringUtils.repeat( - HORIZONTAL_DELIMITER, length) + HEADER_CORNER + NEW_LINE; + public void addTableName(String tableName) { + table.append(tableName).append(NEW_LINE + NEW_LINE); } /** @@ -264,7 +319,7 @@ class SqliteTextExtractor extends ContentTextExtractor { * * @param vals */ - public void addHeader(Collection vals) { + public void addHeader(Collection vals) { addRow(vals); } @@ -274,126 +329,28 @@ class SqliteTextExtractor extends ContentTextExtractor { * * @param vals */ - public void addRow(Collection vals) { - List rowValues = new ArrayList<>(); + public void addRow(Collection vals) { + table.append(TAB); vals.forEach((val) -> { - rowValues.add(val.toString()); - charactersAdded += val.toString().length(); + table.append(val); + table.append(SPACE); }); - rows.add(rowValues.toArray( - new String[rowValues.size()])); + table.append(NEW_LINE); + } + + public void addCell(String cell) { + table.append(cell); } /** - * Gets the max width of a cell in each column and the max number of - * columns in any given row. This ensures that there are enough columns - * and enough space for even the longest entry. - * - * @return array of column widths - */ - private int[] getMaxWidthPerColumn() { - int maxNumberOfColumns = 0; - for (String[] row : rows) { - maxNumberOfColumns = Math.max( - maxNumberOfColumns, row.length); - } - - int[] widths = new int[maxNumberOfColumns]; - for (String[] row : rows) { - for (int colNum = 0; colNum < row.length; colNum++) { - widths[colNum] = Math.max( - widths[colNum], - row[colNum].length() - ); - } - } - - return widths; - } - - /** - * Returns a string version of the table, with all of the formatters and - * escape sequences necessary to print nicely in the console output. + * Returns a string version of the table, with all of the escape + * sequences necessary to print nicely in the console output. * * @return */ @Override public String toString() { - StringBuilder outputTable = new StringBuilder(charactersAdded); - int[] colMaxWidths = getMaxWidthPerColumn(); - int borderLength = 0; - - Iterator rowIterator = rows.iterator(); - if (rowIterator.hasNext()) { - //Length of the header defines the table boundaries - borderLength = appendFormattedHeader(rowIterator.next(), - colMaxWidths, outputTable); - - while (rowIterator.hasNext()) { - appendFormattedRow(rowIterator.next(), colMaxWidths, outputTable); - } - - outputTable.insert(0, tableName); - outputTable.append(createBorder(borderLength)); - outputTable.append(NEW_LINE); - } - - return outputTable.toString(); - } - - /** - * Outputs a fully formatted row in the table - * - * Example: \t| John | 12345678 | john@email.com |\n - * - * @param row Array containing unformatted row content - * @param colMaxWidths An array of column maximum widths, so that - * everything is pretty printed. - * @param outputTable Buffer that formatted contents are written to - */ - private void appendFormattedRow(String[] row, - int[] colMaxWidths, StringBuilder outputTable) { - outputTable.append(TAB); - for (int colNum = 0; colNum < row.length; colNum++) { - outputTable.append(VERTICAL_DELIMITER); - outputTable.append(SPACE); - outputTable.append(StringUtils.rightPad( - StringUtils.defaultString(row[colNum]), - colMaxWidths[colNum])); - outputTable.append(SPACE); - } - outputTable.append(VERTICAL_DELIMITER); - outputTable.append(NEW_LINE); - } - - /** - * Adds a fully formatted header to the table builder and returns the - * length of this header. The length of the header is needed to set the - * table boundaries - * - * Example: \t+----------------------+\n - * \t| Email | Phone | Name |\n - * \t+----------------------+\n - * - * @param row Array of contents in each column - * @param colMaxWidths Widths for each column in the table - * @param outputTable Output stringbuilder - * - * @return length of the formatted header, this length will be needed to - * correctly print the bottom table border. - */ - private int appendFormattedHeader(String[] row, int[] colMaxWidths, StringBuilder outputTable) { - appendFormattedRow(row, colMaxWidths, outputTable); - //Printable table dimensions are equal to the length of the header minus - //the number of escape sequences used to for formatting. - int borderLength = outputTable.length() - ESCAPE_SEQUENCES; - String border = createBorder(borderLength); - - //Surround the header with borders above and below. - outputTable.insert(0, border); - outputTable.append(border); - - return borderLength; + return table.toString(); } } }