Merge branch 'develop' of https://github.com/sleuthkit/autopsy into 4114-AddHasCommentColumn

This commit is contained in:
William Schaefer 2018-08-23 13:42:29 -04:00
commit 4fd868b62c
9 changed files with 632 additions and 103 deletions

View File

@ -29,6 +29,8 @@
<dependency conf="core->default" org="org.apache.commons" name="commons-dbcp2" rev="2.1.1"/>
<dependency conf="core->default" org="org.apache.commons" name="commons-pool2" rev="2.4.2"/>
<dependency org="com.monitorjbl" name="xlsx-streamer" rev="1.2.1"/>
<dependency conf="core->default" org="org.jsoup" name="jsoup" rev="1.10.3"/>
<dependency conf="core->default" org="com.googlecode.plist" name="dd-plist" rev="1.20"/>

View File

@ -15,7 +15,7 @@ file.reference.postgresql-9.4.1211.jre7.jar=release/modules/ext/postgresql-9.4.1
file.reference.Rejistry-1.0-SNAPSHOT.jar=release/modules/ext/Rejistry-1.0-SNAPSHOT.jar
file.reference.sevenzipjbinding-AllPlatforms.jar=release/modules/ext/sevenzipjbinding-AllPlatforms.jar
file.reference.sevenzipjbinding.jar=release/modules/ext/sevenzipjbinding.jar
file.reference.sqlite-jdbc-3.8.11.jar=release/modules/ext/sqlite-jdbc-3.8.11.jar
file.reference.sqlite-jdbc-3.8.11.jar=release\\modules\\ext\\sqlite-jdbc-3.8.11.jar
file.reference.StixLib.jar=release/modules/ext/StixLib.jar
file.reference.bcprov-jdk15on-1.54.jar=release/modules/ext/bcprov-jdk15on-1.54.jar
file.reference.jackcess-2.1.8.jar=release/modules/ext/jackcess-2.1.8.jar
@ -35,6 +35,7 @@ file.reference.tika-parsers-1.17.jar=release/modules/ext/tika-parsers-1.17.jar
file.reference.curator-client-2.8.0.jar=release/modules/ext/curator-client-2.8.0.jar
file.reference.curator-framework-2.8.0.jar=release/modules/ext/curator-framework-2.8.0.jar
file.reference.curator-recipes-2.8.0.jar=release/modules/ext/curator-recipes-2.8.0.jar
file.reference.xlsx-streamer-1.2.1.jar=release/modules/ext/xlsx-streamer-1.2.1.jar
file.reference.xmpcore-5.1.3.jar=release/modules/ext/xmpcore-5.1.3.jar
file.reference.xz-1.6.jar=release/modules/ext/xz-1.6.jar
file.reference.zookeeper-3.4.6.jar=release/modules/ext/zookeeper-3.4.6.jar

View File

@ -337,7 +337,7 @@
<package>org.sleuthkit.autopsy.modules.vmextractor</package>
<package>org.sleuthkit.autopsy.progress</package>
<package>org.sleuthkit.autopsy.report</package>
<package>org.sleuthkit.autopsy.sqlitereader</package>
<package>org.sleuthkit.autopsy.tabulardatareader</package>
<package>org.sleuthkit.datamodel</package>
</public-packages>
<class-path-extension>
@ -388,6 +388,10 @@
<runtime-relative-path>ext/sevenzipjbinding.jar</runtime-relative-path>
<binary-origin>release/modules/ext/sevenzipjbinding.jar</binary-origin>
</class-path-extension>
<class-path-extension>
<runtime-relative-path>ext/sleuthkit-postgresql-4.6.2.jar</runtime-relative-path>
<binary-origin>release/modules/ext/sleuthkit-postgresql-4.6.2.jar</binary-origin>
</class-path-extension>
<class-path-extension>
<runtime-relative-path>ext/mchange-commons-java-0.2.9.jar</runtime-relative-path>
<binary-origin>release/modules/ext/mchange-commons-java-0.2.9.jar</binary-origin>
@ -412,10 +416,6 @@
<runtime-relative-path>ext/metadata-extractor-2.10.1.jar</runtime-relative-path>
<binary-origin>release/modules/ext/metadata-extractor-2.10.1.jar</binary-origin>
</class-path-extension>
<class-path-extension>
<runtime-relative-path>ext/sleuthkit-postgresql-4.6.2.jar</runtime-relative-path>
<binary-origin>release/modules/ext/sleuthkit-postgresql-4.6.2.jar</binary-origin>
</class-path-extension>
<class-path-extension>
<runtime-relative-path>ext/tika-core-1.17.jar</runtime-relative-path>
<binary-origin>release/modules/ext/tika-core-1.17.jar</binary-origin>
@ -442,7 +442,7 @@
</class-path-extension>
<class-path-extension>
<runtime-relative-path>ext/sqlite-jdbc-3.8.11.jar</runtime-relative-path>
<binary-origin>release/modules/ext/sqlite-jdbc-3.8.11.jar</binary-origin>
<binary-origin>release\modules\ext\sqlite-jdbc-3.8.11.jar</binary-origin>
</class-path-extension>
<class-path-extension>
<runtime-relative-path>ext/activemq-all-5.11.1.jar</runtime-relative-path>
@ -488,6 +488,14 @@
<runtime-relative-path>ext/jdom-2.0.5-contrib.jar</runtime-relative-path>
<binary-origin>release/modules/ext/jdom-2.0.5-contrib.jar</binary-origin>
</class-path-extension>
<class-path-extension>
<runtime-relative-path>ext/SparseBitSet-1.1.jar</runtime-relative-path>
<binary-origin>release/modules/ext/SparseBitSet-1.1.jar</binary-origin>
</class-path-extension>
<class-path-extension>
<runtime-relative-path>ext/xlsx-streamer-1.2.1.jar</runtime-relative-path>
<binary-origin>release/modules/ext/xlsx-streamer-1.2.1.jar</binary-origin>
</class-path-extension>
<class-path-extension>
<runtime-relative-path>ext/pdfbox-2.0.8.jar</runtime-relative-path>
<binary-origin>release/modules/ext/pdfbox-2.0.8.jar</binary-origin>
@ -500,10 +508,6 @@
<runtime-relative-path>ext/xmpcore-5.1.3.jar</runtime-relative-path>
<binary-origin>release/modules/ext/xmpcore-5.1.3.jar</binary-origin>
</class-path-extension>
<class-path-extension>
<runtime-relative-path>ext/SparseBitSet-1.1.jar</runtime-relative-path>
<binary-origin>release/modules/ext/SparseBitSet-1.1.jar</binary-origin>
</class-path-extension>
</data>
</configuration>
</project>

View File

@ -25,7 +25,6 @@ import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
@ -44,9 +43,11 @@ import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.sqlitereader.SQLiteReader;
import org.sleuthkit.autopsy.tabulardatareader.AbstractReader;
import org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException;
import org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException;
import org.sleuthkit.autopsy.tabulardatareader.FileReaderFactory;
/**
* A file content viewer for SQLite database files.
@ -61,7 +62,7 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer {
private final SQLiteTableView selectedTableView = new SQLiteTableView();
private AbstractFile sqliteDbFile;
private File tmpDbFile;
private SQLiteReader sqliteReader;
private AbstractReader sqliteReader;
private int numRows; // num of rows in the selected table
private int currPage = 0; // curr page of rows being displayed
@ -339,12 +340,8 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer {
// close DB connection to file
if (null != sqliteReader) {
try {
sqliteReader.close();
sqliteReader = null;
} catch (SQLException ex) {
logger.log(Level.SEVERE, "Failed to close DB connection to file.", ex); //NON-NLS
}
sqliteReader.close();
sqliteReader = null;
}
sqliteDbFile = null;
@ -366,7 +363,8 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer {
try {
String localDiskPath = Case.getCurrentCaseThrows().getTempDirectory() +
File.separator + sqliteDbFile.getName();
sqliteReader = new SQLiteReader(sqliteDbFile, localDiskPath);
sqliteReader = FileReaderFactory.createReader(SUPPORTED_MIMETYPES[0], sqliteDbFile, localDiskPath);
Map<String, String> dbTablesMap = sqliteReader.getTableSchemas();
@ -381,24 +379,16 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer {
} catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Current case has been closed", ex); //NON-NLS
MessageNotifyUtil.Message.error(Bundle.SQLiteViewer_errorMessage_noCurrentCase());
} catch (IOException | TskCoreException ex) {
logger.log(Level.SEVERE, String.format(
"Failed to create temp copy of DB file '%s' (objId=%d)", //NON-NLS
sqliteDbFile.getName(), sqliteDbFile.getId()), ex);
MessageNotifyUtil.Message.error(
Bundle.SQLiteViewer_errorMessage_failedToExtractFile());
} catch (ClassNotFoundException ex) {
logger.log(Level.SEVERE, String.format(
"Failed to initialize JDBC SQLite '%s' (objId=%d)", //NON-NLS
sqliteDbFile.getName(), sqliteDbFile.getId()), ex);
MessageNotifyUtil.Message.error(
Bundle.SQLiteViewer_errorMessage_failedToinitJDBCDriver());
} catch (SQLException ex) {
} catch (FileReaderException ex) {
logger.log(Level.SEVERE, String.format(
"Failed to get tables from DB file '%s' (objId=%d)", //NON-NLS
sqliteDbFile.getName(), sqliteDbFile.getId()), ex);
MessageNotifyUtil.Message.error(
Bundle.SQLiteViewer_errorMessage_failedToQueryDatabase());
} catch (FileReaderInitException ex) {
logger.log(Level.SEVERE, String.format(
"Failed to create a SQLiteReader '%s' (objId=%d)", //NON-NLS
sqliteDbFile.getName(), sqliteDbFile.getId()), ex);
}
}
@ -407,7 +397,7 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer {
})
private void selectTable(String tableName) {
try {
numRows = sqliteReader.getTableRowCount(tableName);
numRows = sqliteReader.getRowCountFromTable(tableName);
numEntriesField.setText(numRows + " entries");
currPage = 1;
@ -426,7 +416,7 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer {
selectedTableView.setupTable(Collections.emptyList());
}
} catch (SQLException ex) {
} catch (FileReaderException ex) {
logger.log(Level.SEVERE, String.format(
"Failed to load table %s from DB file '%s' (objId=%d)", tableName, //NON-NLS
sqliteDbFile.getName(), sqliteDbFile.getId()), ex);
@ -447,7 +437,7 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer {
} else {
selectedTableView.setupTable(Collections.emptyList());
}
} catch (SQLException ex) {
} catch (FileReaderException ex) {
logger.log(Level.SEVERE, String.format(
"Failed to read table %s from DB file '%s' (objId=%d)", tableName, //NON-NLS
sqliteDbFile.getName(), sqliteDbFile.getId()), ex);
@ -461,7 +451,7 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer {
*
* @param file
* @param tableName
* @param rowMap -- A list of rows in the table, where each row is represented as a column-value
* @param rowMap A list of rows in the table, where each row is represented as a column-value
* map.
* @throws FileNotFoundException
* @throws IOException
@ -516,7 +506,7 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer {
} else {
exportTableToCSV(file, tableName, currentTableRows);
}
} catch (SQLException ex) {
} catch (FileReaderException ex) {
logger.log(Level.SEVERE, String.format(
"Failed to read table %s from DB file '%s' (objId=%d)", //NON-NLS
tableName, sqliteDbFile.getName(), sqliteDbFile.getId()), ex);
@ -534,8 +524,8 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer {
* Returns a comma seperated header string from the keys of the column
* row map.
*
* @param row -- column header row map
* @return -- comma seperated header string
* @param row column header row map
* @return comma seperated header string
*/
private String createColumnHeader(Map<String, Object> row) {
return row.entrySet()

View File

@ -0,0 +1,143 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2018-2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.tabulardatareader;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.TskCoreException;
/**
* An abstract reader interface for retrieving contents from files via a common
* API.
*/
public abstract class AbstractReader implements AutoCloseable {
public AbstractReader(AbstractFile file, String localDiskPath)
throws FileReaderInitException {
writeDataSourceToLocalDisk(file, localDiskPath);
}
/**
* Copies the data source file contents to local drive for processing.
* This function is common to all readers.
*
* @param file AbstractFile from the data source
* @param localDiskPath Local drive path to copy AbstractFile contents
* @throws IOException Exception writing file contents
* @throws NoCurrentCaseException Current case closed during file copying
* @throws TskCoreException Exception finding files from abstract file
*/
private void writeDataSourceToLocalDisk(AbstractFile file, String localDiskPath)
throws FileReaderInitException {
try {
File localDatabaseFile = new File(localDiskPath);
if (!localDatabaseFile.exists()) {
ContentUtils.writeToFile(file, localDatabaseFile);
}
} catch (IOException ex) {
throw new FileReaderInitException(ex);
}
}
/**
* Return the a mapping of table names to table schemas (may be in the form of
* headers or create table statements for databases).
*
* @return Mapping of table names to schemas
* @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException
*/
public abstract Map<String, String> getTableSchemas() throws FileReaderException;
/**
* Returns the row count fo the given table name.
*
* @param tableName
* @return number of rows in the current table
* @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException
*/
public abstract Integer getRowCountFromTable(String tableName) throws FileReaderException;
/**
* Returns a collection view of the rows in a table.
*
* @param tableName
* @return List view of the rows in the table
* @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException
*/
public abstract List<Map<String, Object>> getRowsFromTable(String tableName) throws FileReaderException;
/**
* Returns a window of rows starting at the offset and ending when the number of rows read
* equals the 'numRowsToRead' parameter or there is nothing left to read.
*
* @param tableName table name to be read from
* @param offset start index to begin reading
* @param numRowsToRead number of rows to read past offset
* @return List view of the rows in the table
* @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException
*/
public abstract List<Map<String, Object>> getRowsFromTable(String tableName,
int offset, int numRowsToRead) throws FileReaderException;
@Override
public abstract void close();
/**
* Checked exceptions are specific to a given implementation, so this custom
* exception allows for a common interface to accommodate all of them. Init
* exception allows for more flexibility in logging.
*/
public static class FileReaderInitException extends Exception {
public FileReaderInitException(String message, Throwable cause) {
super(message, cause);
}
public FileReaderInitException(Throwable cause) {
super(cause);
}
public FileReaderInitException(String message) {
super(message);
}
}
/**
* Checked exceptions are specific to a given implementation, so this custom
* exception allows for a common interface to accommodate all of them.
*/
public class FileReaderException extends Exception {
public FileReaderException(String message, Throwable cause) {
super(message, cause);
}
public FileReaderException(Throwable cause) {
super(cause);
}
public FileReaderException(String message) {
super(message);
}
}
}

View File

@ -0,0 +1,282 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2018-2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.tabulardatareader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.logging.Level;
import org.apache.commons.lang3.StringUtils;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.apache.poi.ss.usermodel.Cell;
import org.apache.poi.ss.usermodel.DateUtil;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.ss.usermodel.Workbook;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.datamodel.AbstractFile;
import com.monitorjbl.xlsx.StreamingReader;
import org.apache.poi.hssf.OldExcelFormatException;
/**
* Reads excel files and implements the abstract reader api for interfacing with the
* content. Supports .xls and .xlsx files.
*/
public final class ExcelReader extends AbstractReader {
/* Boilerplate code */
private final static IngestServices services = IngestServices.getInstance();
private final static Logger logger = services.getLogger(ExcelReader.class.getName());
private Workbook workbook;
private final static String XLSX_MIME_TYPE = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet";
private final static String XLS_MIME_TYPE = "application/vnd.ms-excel";
private final static String EMPTY_CELL_STRING = "";
private Map<String, Row> headerCache;
public ExcelReader(AbstractFile file, String localDiskPath, String mimeType)
throws FileReaderInitException {
super(file, localDiskPath);
try {
this.workbook = createWorkbook(localDiskPath, mimeType);
headerCache = new HashMap<>();
} catch (IOException ex) {
throw new FileReaderInitException(ex);
}
}
/**
* Internal factory for creating the correct workbook given the mime type. The
* file reader factory in this module passes both the XLSMimeType and XLSXMimeType
* into this constructor for the reader to handle. This avoided the need for creating
* an AbstractExcelReader class and two sub classes overriding the workbook field.
* Additionally, I don't forsee needing to support more than these two mime types.
*
* @param localDiskPath To open an input stream for poi to read from
* @param mimeType The mimeType passed to the constructor
* @return The corrent workbook instance
* @throws IOException Issue with input stream and opening file location at
* localDiskPath
* @throws FileReaderInitException mimetype unsupported
*/
private Workbook createWorkbook(String localDiskPath, String mimeType) throws
IOException, FileReaderInitException {
switch (mimeType) {
case XLS_MIME_TYPE:
try {
//Apache POI only supports BIFF8 format, anything below is considered
//old excel format and is not a concern for us.
return new HSSFWorkbook(new FileInputStream(new File(localDiskPath)));
} catch (OldExcelFormatException e) {
throw new FileReaderInitException(e);
}
case XLSX_MIME_TYPE:
//StreamingReader is part of the xlsx streamer dependency that creates
//a streaming version of XSSFWorkbook for reading (SXSSFWorkbook is only for writing
//large workbooks, not reading). This libary provides a workbook interface
//that is mostly identical to the poi workbook api, hence both the HSSFWorkbook
//and this can use the same functions below.
return StreamingReader.builder().rowCacheSize(500).open(new File(localDiskPath));
default:
throw new FileReaderInitException(String.format("Excel reader for mime " +
"type [%s] is not supported", mimeType));
}
}
/**
* Returns the number of rows in a given excel table (aka sheet).
*
* @param tableName Name of table to count total rows from
* @return row count for requested table name
* @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException
*/
@Override
public Integer getRowCountFromTable(String tableName) throws FileReaderException {
return workbook.getSheet(tableName).getLastRowNum();
}
/**
* Returns a collection of all the rows from a given table in an excel document.
*
* @param tableName Current sheet name being read
* @return A collection of row maps
* @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException
*/
@Override
public List<Map<String, Object>> getRowsFromTable(String tableName) throws FileReaderException {
//Pad with + 1 because rows are zero index, thus a LastRowNum() (in getRowCountFromTable()) of 1
//indicates that there are records in 0 and 1 and so a total row count of
//2. This also implies there is no way to determine if a workbook is empty,
//since a last row num of 0 doesnt differentiate between a record in 0 or
//nothing in the workbook. Such a HSSF.
return getRowsFromTable(tableName, 0, getRowCountFromTable(tableName));
}
/**
* Returns a window of rows starting at the offset and ending when the number of rows read
* equals the 'numRowsToRead' parameter or the iterator has nothing left to read.
*
* For instance: offset 1, numRowsToRead 5 would return 5 results (1-5).
* offset 0, numRowsToRead 5 would return 5 results (0-4).
*
* @param tableName Current name of sheet to be read
* @param offset start index to begin reading (documents are 0 indexed)
* @param numRowsToRead number of rows to read
* @return
* @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException
*/
@Override
public List<Map<String, Object>> getRowsFromTable(String tableName,
int offset, int numRowsToRead) throws FileReaderException {
//StreamingReader maintains the same pointer to a sheet rowIterator, so this
//call returns an iterator that could have already been iterated on instead
//of a fresh copy. We must cache the header value from the call to
//getTableSchemas as important information in the first row could have been
//missed.
Iterator<Row> sheetIter = workbook.getSheet(tableName).rowIterator();
List<Map<String, Object>> rowList = new ArrayList<>();
//Read the header value as the header may be a row of data in the
//excel sheet
if(headerCache.containsKey(tableName)) {
Row header = headerCache.get(tableName);
if(header.getRowNum() >= offset
&& header.getRowNum() < (offset + numRowsToRead)) {
rowList.add(getRowMap(tableName, header));
}
}
while(sheetIter.hasNext()) {
Row currRow = sheetIter.next();
//If the current row number is within the window of our row capture
if(currRow.getRowNum() >= offset
&& currRow.getRowNum() < (offset + numRowsToRead)) {
rowList.add(getRowMap(tableName, currRow));
}
//if current row number is equal to our upper bound
//of rows requested to be read.
if(currRow.getRowNum() >= (offset + numRowsToRead)) {
break;
}
}
return rowList;
}
private Map<String, Object> getRowMap(String tableName, Row row) {
Map<String, Object> rowMap = new HashMap<>();
for(Cell cell : row) {
String columnName = getColumnName(cell, tableName);
Object value = getCellValue(cell);
rowMap.put(columnName, value);
}
return rowMap;
}
/**
* Returns the value of a given cell. The correct value function must be
* called on a cell depending on its type, hence the switch.
*
* @param cell Cell object containing a getter function for its value type
* @return A generic object pointer to the cell's value
*/
private Object getCellValue(Cell cell){
switch (cell.getCellTypeEnum()) {
case BOOLEAN:
return cell.getBooleanCellValue();
case STRING:
return cell.getRichStringCellValue().getString();
case NUMERIC:
if (DateUtil.isCellDateFormatted(cell)) {
return cell.getDateCellValue();
} else {
return cell.getNumericCellValue();
}
case FORMULA:
return cell.getCellFormula();
default:
//Cell must be empty at this branch
return EMPTY_CELL_STRING;
}
}
/**
* Returns the name of the column that the cell currently lives in
* Cell Value: 6784022342 -> Header name: Phone Number
*
* @param cell current cell being read
* @param tableName current sheet name being read
* @return the name of the column the current cell lives in
*/
private String getColumnName(Cell cell, String tableName) {
if(headerCache.containsKey(tableName)) {
Row header = headerCache.get(tableName);
Cell columnHeaderCell = header.getCell(cell.getRowIndex());
if(columnHeaderCell == null) {
return EMPTY_CELL_STRING;
}
Object columnHeaderValue = getCellValue(columnHeaderCell);
return columnHeaderValue.toString();
}
//No header present
return EMPTY_CELL_STRING;
}
/**
* Returns a map of sheet names to headers (header is in a comma-seperated string).
* Warning: Only call this ONCE per excel file.
*
* @return A map of sheet names to header strings.
* @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException
*/
@Override
public Map<String, String> getTableSchemas() throws FileReaderException {
Map<String, String> tableSchemas = new HashMap<>();
for(Sheet sheet : workbook) {
Iterator<Row> iterator = sheet.rowIterator();
if(iterator.hasNext()) {
//Consume header
Row header = iterator.next();
headerCache.put(sheet.getSheetName(), header);
String headerStringFormat = StringUtils.join(header.cellIterator(), ", ");
tableSchemas.put(sheet.getSheetName(), headerStringFormat);
}
}
return tableSchemas;
}
@Override
public void close() {
try {
workbook.close();
} catch (IOException ex) {
//Non-essential exception, user has no need for the connection
//object at this stage so closing details are not important
logger.log(Level.WARNING, "Could not close excel file input stream", ex);
}
}
}

View File

@ -0,0 +1,63 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2018-2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.tabulardatareader;
import org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException;
import org.sleuthkit.datamodel.AbstractFile;
/**
* Factory for creating the correct reader given the mime type of a file.
*/
public final class FileReaderFactory {
private FileReaderFactory() {
}
/**
* Instantiates the appropriate reader given the mimeType argument. Currently
* supports SQLite files and Excel files (.xls and .xlsx). BIFF5 format of .xls
* is not supported.
*
* @param mimeType mimeType passed in from the ingest module
g * @param file current file under inspection
* @param localDiskPath path for abstract file contents to be written
* @return The correct reader class needed to read the file contents
* @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException
*/
public static AbstractReader createReader(String mimeType, AbstractFile file,
String localDiskPath) throws FileReaderInitException {
switch (mimeType) {
case "application/x-sqlite3":
return new SQLiteReader(file, localDiskPath);
case "application/vnd.ms-excel":
case "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet":
try {
return new ExcelReader(file, localDiskPath, mimeType);
//Catches runtime exceptions being emitted from Apache
//POI (such as EncryptedDocumentException) and wraps them
//into FileReaderInitException to be caught and logged
//in the ingest module.
} catch(Exception poiInitException) {
throw new FileReaderInitException(poiInitException);
}
default:
throw new FileReaderInitException(String.format("Reader for mime "
+ "type [%s] is not supported", mimeType));
}
}
}

View File

@ -16,7 +16,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.sqlitereader;
package org.sleuthkit.autopsy.tabulardatareader;
import java.io.File;
import java.io.IOException;
@ -31,22 +31,27 @@ import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.logging.Level;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.FileManager;
import org.sleuthkit.autopsy.casemodule.services.Services;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
/**
* Reads rows from SQLite tables and returns results in a list collection.
* Reads sqlite databases and returns results in a list collection.
*/
public class SQLiteReader implements AutoCloseable {
public final class SQLiteReader extends AbstractReader {
private final Connection connection;
private final static IngestServices services = IngestServices.getInstance();
private final static Logger logger = services.getLogger(SQLiteReader.class.getName());
/**
* Writes data source file contents to local disk and opens a sqlite JDBC
@ -54,38 +59,19 @@ public class SQLiteReader implements AutoCloseable {
*
* @param sqliteDbFile Data source abstract file
* @param localDiskPath Location for database contents to be copied to
* @throws ClassNotFoundException missing SQLite JDBC class
* @throws SQLException Exception opening JDBC connection
* @throws IOException Exception writing file contents
* @throws NoCurrentCaseException Current case closed during file copying
* @throws TskCoreException Exception finding files from abstract file
* @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException
*/
public SQLiteReader(AbstractFile sqliteDbFile, String localDiskPath) throws ClassNotFoundException,
SQLException, IOException, NoCurrentCaseException, TskCoreException{
writeDataSourceToLocalDisk(sqliteDbFile, localDiskPath);
connection = getDatabaseConnection(localDiskPath);
}
/**
* Copies the data source file contents to local drive for processing.
*
* @param file AbstractFile from the data source
* @param localDiskPath Local drive path to copy AbstractFile contents
* @throws IOException Exception writing file contents
* @throws NoCurrentCaseException Current case closed during file copying
* @throws TskCoreException Exception finding files from abstract file
*/
private void writeDataSourceToLocalDisk(AbstractFile file, String localDiskPath)
throws IOException, NoCurrentCaseException, TskCoreException {
File localDatabaseFile = new File(localDiskPath);
if (!localDatabaseFile.exists()) {
ContentUtils.writeToFile(file, localDatabaseFile);
public SQLiteReader(AbstractFile sqliteDbFile, String localDiskPath) throws FileReaderInitException {
super(sqliteDbFile, localDiskPath);
try {
// Look for any meta files associated with this DB - WAL, SHM, etc.
findAndCopySQLiteMetaFile(file, file.getName() + "-wal");
findAndCopySQLiteMetaFile(file, file.getName() + "-shm");
findAndCopySQLiteMetaFile(sqliteDbFile, sqliteDbFile.getName() + "-wal");
findAndCopySQLiteMetaFile(sqliteDbFile, sqliteDbFile.getName() + "-shm");
connection = getDatabaseConnection(localDiskPath);
} catch (ClassNotFoundException | SQLException |IOException |
NoCurrentCaseException | TskCoreException ex) {
throw new FileReaderInitException(ex);
}
}
@ -93,7 +79,7 @@ public class SQLiteReader implements AutoCloseable {
* Searches for a meta file associated with the give SQLite database. If found,
* copies the file to the local disk folder
*
* @param sqliteFile SQLIte db file being processed
* @param sqliteFile file being processed
* @param metaFileName name of meta file to look for
* @throws NoCurrentCaseException Case has been closed.
* @throws TskCoreException fileManager cannot find AbstractFile files.
@ -145,10 +131,10 @@ public class SQLiteReader implements AutoCloseable {
* CREATE TABLE statments).
*
* @return A map of table names to table schemas
* @throws SQLException
* @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException
*/
public Map<String, String> getTableSchemas()
throws SQLException {
@Override
public Map<String, String> getTableSchemas() throws FileReaderException {
Map<String, String> dbTablesMap = new TreeMap<>();
@ -158,11 +144,14 @@ public class SQLiteReader implements AutoCloseable {
+ " WHERE type= 'table' " //NON-NLS
+ " ORDER BY name;")){ //NON-NLS
while (resultSet.next()) {
String tableName = resultSet.getString("name"); //NON-NLS
String tableSQL = resultSet.getString("sql"); //NON-NLS
dbTablesMap.put(tableName, tableSQL);
}
while (resultSet.next()) {
String tableName = resultSet.getString("name"); //NON-NLS
String tableSQL = resultSet.getString("sql"); //NON-NLS
dbTablesMap.put(tableName, tableSQL);
}
} catch (SQLException ex) {
throw new FileReaderException(ex);
}
return dbTablesMap;
@ -173,13 +162,18 @@ public class SQLiteReader implements AutoCloseable {
*
* @param tableName
* @return Row count from tableName
* @throws SQLException
* @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException
*/
public Integer getTableRowCount(String tableName) throws SQLException {
@Override
public Integer getRowCountFromTable(String tableName)
throws FileReaderException {
String quotedTableName = wrapTableNameStringWithQuotes(tableName);
try (Statement statement = connection.createStatement();
ResultSet resultSet = statement.executeQuery(
"SELECT count (*) as count FROM " + tableName)){ //NON-NLS
"SELECT count (*) as count FROM " + quotedTableName)){ //NON-NLS
return resultSet.getInt("count"); //NON-NLS
} catch (SQLException ex) {
throw new FileReaderException(ex);
}
}
@ -190,17 +184,21 @@ public class SQLiteReader implements AutoCloseable {
* @param tableName
* @return List of rows, where each row is
* represented as a column-value map.
* @throws SQLException
* @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException
*/
public List<Map<String, Object>> getRowsFromTable(String tableName) throws SQLException {
@Override
public List<Map<String, Object>> getRowsFromTable(String tableName)
throws FileReaderException {
//This method does not directly call its overloaded counterpart
//since the second parameter would need to be retreived from a call to
//getTableRowCount().
String quotedTableName = wrapTableNameStringWithQuotes(tableName);
try(Statement statement = connection.createStatement();
ResultSet resultSet = statement.executeQuery(
"SELECT * FROM " + tableName)) { //NON-NLS
"SELECT * FROM " + quotedTableName)) { //NON-NLS
return resultSetToList(resultSet);
} catch (SQLException ex) {
throw new FileReaderException(ex);
}
}
@ -208,24 +206,39 @@ public class SQLiteReader implements AutoCloseable {
* Retrieves a subset of the rows from a given table in the SQLite database.
*
* @param tableName
* @param startRow Desired start index (rows begin at 1)
* @param offset Desired start index (rows begin at 1)
* @param numRowsToRead Number of rows past the start index
* @return List of rows, where each row is
* represented as a column-value map.
* @throws SQLException
* @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException
*/
@Override
public List<Map<String, Object>> getRowsFromTable(String tableName,
int startRow, int numRowsToRead) throws SQLException{
int offset, int numRowsToRead) throws FileReaderException{
String quotedTableName = wrapTableNameStringWithQuotes(tableName);
try(Statement statement = connection.createStatement();
ResultSet resultSet = statement.executeQuery(
"SELECT * FROM " + tableName //NON-NLS
"SELECT * FROM " + quotedTableName //NON-NLS
+ " LIMIT " + Integer.toString(numRowsToRead) //NON-NLS
+ " OFFSET " + Integer.toString(startRow - 1))) { //NON-NLS
+ " OFFSET " + Integer.toString(offset - 1))) { //NON-NLS
return resultSetToList(resultSet);
} catch (SQLException ex) {
throw new FileReaderException(ex);
}
}
/**
* Wraps table name with quotation marks in case table name contains spaces.
* sqliteJDBC cannot read table names with spaces in them unless surrounded
* by quotation marks.
*
* @param tableName
* @return Input name: Result Table -> "Result Table"
*/
private String wrapTableNameStringWithQuotes(String tableName) {
return "\"" + tableName +"\"";
}
/**
* Converts a ResultSet (row results from a table read) into a list.
*
@ -260,13 +273,18 @@ public class SQLiteReader implements AutoCloseable {
return rowMap;
}
/**
* Closes underlying JDBC connection.
*
* @throws SQLException
*/
@Override
public void close() throws SQLException {
connection.close();
public void close() {
try {
connection.close();
} catch (SQLException ex) {
//Non-essential exception, user has no need for the connection
//object at this stage so closing details are not important
logger.log(Level.WARNING, "Could not close JDBC connection", ex);
}
}
}

View File

@ -0,0 +1,26 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.sleuthkit.autopsy.testutils;
//import junit.framework.Test;
//import org.netbeans.junit.NbModuleSuite;
/**
*
* @author dsmyda
*/
public final class SuiteUtils {
/*
public static Test createSuite(Class cls) {
NbModuleSuite.Configuration conf = NbModuleSuite.createConfiguration(cls).
clusters(".*").
enableModules(".*");
return conf.suite();
}
*/
}