Merge branch 'develop' of https://github.com/sleuthkit/autopsy into develop
@ -26,7 +26,6 @@
|
|||||||
|
|
||||||
<dependency conf="core->default" org="org.apache.commons" name="commons-dbcp2" rev="2.1.1"/>
|
<dependency conf="core->default" org="org.apache.commons" name="commons-dbcp2" rev="2.1.1"/>
|
||||||
<dependency conf="core->default" org="org.apache.commons" name="commons-pool2" rev="2.4.2"/>
|
<dependency conf="core->default" org="org.apache.commons" name="commons-pool2" rev="2.4.2"/>
|
||||||
<dependency org="com.monitorjbl" name="xlsx-streamer" rev="1.2.1"/>
|
|
||||||
|
|
||||||
<dependency conf="core->default" org="org.jsoup" name="jsoup" rev="1.10.3"/>
|
<dependency conf="core->default" org="org.jsoup" name="jsoup" rev="1.10.3"/>
|
||||||
<dependency conf="core->default" org="com.googlecode.plist" name="dd-plist" rev="1.20"/>
|
<dependency conf="core->default" org="com.googlecode.plist" name="dd-plist" rev="1.20"/>
|
||||||
|
@ -35,7 +35,6 @@ file.reference.tika-parsers-1.17.jar=release/modules/ext/tika-parsers-1.17.jar
|
|||||||
file.reference.curator-client-2.8.0.jar=release/modules/ext/curator-client-2.8.0.jar
|
file.reference.curator-client-2.8.0.jar=release/modules/ext/curator-client-2.8.0.jar
|
||||||
file.reference.curator-framework-2.8.0.jar=release/modules/ext/curator-framework-2.8.0.jar
|
file.reference.curator-framework-2.8.0.jar=release/modules/ext/curator-framework-2.8.0.jar
|
||||||
file.reference.curator-recipes-2.8.0.jar=release/modules/ext/curator-recipes-2.8.0.jar
|
file.reference.curator-recipes-2.8.0.jar=release/modules/ext/curator-recipes-2.8.0.jar
|
||||||
file.reference.xlsx-streamer-1.2.1.jar=release/modules/ext/xlsx-streamer-1.2.1.jar
|
|
||||||
file.reference.xmpcore-5.1.3.jar=release/modules/ext/xmpcore-5.1.3.jar
|
file.reference.xmpcore-5.1.3.jar=release/modules/ext/xmpcore-5.1.3.jar
|
||||||
file.reference.xz-1.6.jar=release/modules/ext/xz-1.6.jar
|
file.reference.xz-1.6.jar=release/modules/ext/xz-1.6.jar
|
||||||
file.reference.zookeeper-3.4.6.jar=release/modules/ext/zookeeper-3.4.6.jar
|
file.reference.zookeeper-3.4.6.jar=release/modules/ext/zookeeper-3.4.6.jar
|
||||||
|
@ -497,10 +497,6 @@
|
|||||||
<runtime-relative-path>ext/SparseBitSet-1.1.jar</runtime-relative-path>
|
<runtime-relative-path>ext/SparseBitSet-1.1.jar</runtime-relative-path>
|
||||||
<binary-origin>release/modules/ext/SparseBitSet-1.1.jar</binary-origin>
|
<binary-origin>release/modules/ext/SparseBitSet-1.1.jar</binary-origin>
|
||||||
</class-path-extension>
|
</class-path-extension>
|
||||||
<class-path-extension>
|
|
||||||
<runtime-relative-path>ext/xlsx-streamer-1.2.1.jar</runtime-relative-path>
|
|
||||||
<binary-origin>release/modules/ext/xlsx-streamer-1.2.1.jar</binary-origin>
|
|
||||||
</class-path-extension>
|
|
||||||
<class-path-extension>
|
<class-path-extension>
|
||||||
<runtime-relative-path>ext/pdfbox-2.0.8.jar</runtime-relative-path>
|
<runtime-relative-path>ext/pdfbox-2.0.8.jar</runtime-relative-path>
|
||||||
<binary-origin>release/modules/ext/pdfbox-2.0.8.jar</binary-origin>
|
<binary-origin>release/modules/ext/pdfbox-2.0.8.jar</binary-origin>
|
||||||
|
@ -850,7 +850,7 @@ public class SingleUserCaseConverter {
|
|||||||
// content_tags
|
// content_tags
|
||||||
biggestPK = 0;
|
biggestPK = 0;
|
||||||
inputStatement = sqliteConnection.createStatement();
|
inputStatement = sqliteConnection.createStatement();
|
||||||
inputResultSet = inputStatement.executeQuery("SELECT * FROM content_tags"); //NON-NLS
|
inputResultSet = inputStatement.executeQuery("SELECT * FROM content_tags LEFT OUTER JOIN tsk_examiners ON content_tags.examiner_id = tsk_examiners.examiner_id"); //NON-NLS
|
||||||
|
|
||||||
while (inputResultSet.next()) {
|
while (inputResultSet.next()) {
|
||||||
outputStatement = postgreSQLConnection.createStatement();
|
outputStatement = postgreSQLConnection.createStatement();
|
||||||
@ -859,14 +859,14 @@ public class SingleUserCaseConverter {
|
|||||||
if (value > biggestPK) {
|
if (value > biggestPK) {
|
||||||
biggestPK = value;
|
biggestPK = value;
|
||||||
}
|
}
|
||||||
outputStatement.executeUpdate("INSERT INTO content_tags (tag_id, obj_id, tag_name_id, comment, begin_byte_offset, end_byte_offset, user_name) VALUES (" //NON-NLS
|
outputStatement.executeUpdate("INSERT INTO content_tags (tag_id, obj_id, tag_name_id, comment, begin_byte_offset, end_byte_offset, examiner_id) VALUES (" //NON-NLS
|
||||||
+ value + ","
|
+ value + ","
|
||||||
+ inputResultSet.getLong(2) + ","
|
+ inputResultSet.getLong(2) + ","
|
||||||
+ inputResultSet.getLong(3) + ",'"
|
+ inputResultSet.getLong(3) + ",'"
|
||||||
+ inputResultSet.getString(4) + "',"
|
+ inputResultSet.getString(4) + "',"
|
||||||
+ inputResultSet.getLong(5) + ","
|
+ inputResultSet.getLong(5) + ","
|
||||||
+ inputResultSet.getLong(6) + ",'"
|
+ inputResultSet.getLong(6) + ","
|
||||||
+ inputResultSet.getString(7)+ "')"); //NON-NLS
|
+ inputResultSet.getInt(7) + ")"); //NON-NLS
|
||||||
|
|
||||||
} catch (SQLException ex) {
|
} catch (SQLException ex) {
|
||||||
if (ex.getErrorCode() != 0) { // 0 if the entry already exists
|
if (ex.getErrorCode() != 0) { // 0 if the entry already exists
|
||||||
|
@ -21,6 +21,8 @@ package org.sleuthkit.autopsy.centralrepository;
|
|||||||
import java.awt.event.ActionEvent;
|
import java.awt.event.ActionEvent;
|
||||||
import java.util.logging.Level;
|
import java.util.logging.Level;
|
||||||
import javax.swing.AbstractAction;
|
import javax.swing.AbstractAction;
|
||||||
|
import javax.swing.Action;
|
||||||
|
import org.apache.commons.lang.StringUtils;
|
||||||
import org.openide.DialogDisplayer;
|
import org.openide.DialogDisplayer;
|
||||||
import org.openide.NotifyDescriptor;
|
import org.openide.NotifyDescriptor;
|
||||||
import org.openide.util.NbBundle.Messages;
|
import org.openide.util.NbBundle.Messages;
|
||||||
@ -37,9 +39,9 @@ import org.sleuthkit.datamodel.AbstractFile;
|
|||||||
* An AbstractAction to manage adding and modifying a Central Repository file
|
* An AbstractAction to manage adding and modifying a Central Repository file
|
||||||
* instance comment.
|
* instance comment.
|
||||||
*/
|
*/
|
||||||
|
@Messages({"AddEditCentralRepoCommentAction.menuItemText.addEditCentralRepoCommentEmptyFile=Add/Edit Central Repository Comment (Empty File)",
|
||||||
|
"AddEditCentralRepoCommentAction.menuItemText.addEditCentralRepoCommentNoMD5=Add/Edit Central Repository Comment (No MD5 Hash)",
|
||||||
@Messages({"AddEditCentralRepoCommentAction.menuItemText.addEditCentralRepoComment=Add/Edit Central Repository Comment"})
|
"AddEditCentralRepoCommentAction.menuItemText.addEditCentralRepoComment=Add/Edit Central Repository Comment"})
|
||||||
public final class AddEditCentralRepoCommentAction extends AbstractAction {
|
public final class AddEditCentralRepoCommentAction extends AbstractAction {
|
||||||
|
|
||||||
private static final Logger logger = Logger.getLogger(AddEditCentralRepoCommentAction.class.getName());
|
private static final Logger logger = Logger.getLogger(AddEditCentralRepoCommentAction.class.getName());
|
||||||
@ -58,14 +60,19 @@ public final class AddEditCentralRepoCommentAction extends AbstractAction {
|
|||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
public AddEditCentralRepoCommentAction(AbstractFile file) {
|
public AddEditCentralRepoCommentAction(AbstractFile file) {
|
||||||
super(Bundle.AddEditCentralRepoCommentAction_menuItemText_addEditCentralRepoComment());
|
|
||||||
fileId = file.getId();
|
fileId = file.getId();
|
||||||
correlationAttributeInstance = EamArtifactUtil.getInstanceFromContent(file);
|
correlationAttributeInstance = EamArtifactUtil.getInstanceFromContent(file);
|
||||||
if (correlationAttributeInstance == null) {
|
if (correlationAttributeInstance == null) {
|
||||||
addToDatabase = true;
|
addToDatabase = true;
|
||||||
correlationAttributeInstance = EamArtifactUtil.makeInstanceFromContent(file);
|
correlationAttributeInstance = EamArtifactUtil.makeInstanceFromContent(file);
|
||||||
}
|
}
|
||||||
|
if (file.getSize() == 0) {
|
||||||
|
putValue(Action.NAME, Bundle.AddEditCentralRepoCommentAction_menuItemText_addEditCentralRepoCommentEmptyFile());
|
||||||
|
} else if (StringUtils.isBlank(file.getMd5Hash())) {
|
||||||
|
putValue(Action.NAME, Bundle.AddEditCentralRepoCommentAction_menuItemText_addEditCentralRepoCommentNoMD5());
|
||||||
|
} else {
|
||||||
|
putValue(Action.NAME, Bundle.AddEditCentralRepoCommentAction_menuItemText_addEditCentralRepoComment());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
246
Core/src/org/sleuthkit/autopsy/contentviewers/SQLiteViewer.java
Normal file → Executable file
@ -22,16 +22,24 @@ import java.awt.BorderLayout;
|
|||||||
import java.awt.Component;
|
import java.awt.Component;
|
||||||
import java.awt.Cursor;
|
import java.awt.Cursor;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.FileNotFoundException;
|
|
||||||
import java.io.FileOutputStream;
|
import java.io.FileOutputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.sql.Connection;
|
||||||
|
import java.sql.DriverManager;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.ResultSetMetaData;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.sql.Statement;
|
||||||
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
import java.util.Collection;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
import java.util.LinkedHashMap;
|
||||||
|
import java.util.LinkedList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.logging.Level;
|
import java.util.logging.Level;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
import javax.swing.JComboBox;
|
import javax.swing.JComboBox;
|
||||||
import javax.swing.JFileChooser;
|
import javax.swing.JFileChooser;
|
||||||
import javax.swing.JOptionPane;
|
import javax.swing.JOptionPane;
|
||||||
@ -43,11 +51,9 @@ import org.sleuthkit.autopsy.casemodule.Case;
|
|||||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||||
import org.sleuthkit.datamodel.AbstractFile;
|
import org.sleuthkit.datamodel.AbstractFile;
|
||||||
|
import org.sleuthkit.datamodel.TskCoreException;
|
||||||
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
||||||
import org.sleuthkit.autopsy.tabulardatareader.AbstractReader;
|
import org.sleuthkit.autopsy.coreutils.SqliteUtil;
|
||||||
import org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException;
|
|
||||||
import org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException;
|
|
||||||
import org.sleuthkit.autopsy.tabulardatareader.FileReaderFactory;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A file content viewer for SQLite database files.
|
* A file content viewer for SQLite database files.
|
||||||
@ -62,7 +68,7 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer {
|
|||||||
private final SQLiteTableView selectedTableView = new SQLiteTableView();
|
private final SQLiteTableView selectedTableView = new SQLiteTableView();
|
||||||
private AbstractFile sqliteDbFile;
|
private AbstractFile sqliteDbFile;
|
||||||
private File tmpDbFile;
|
private File tmpDbFile;
|
||||||
private AbstractReader sqliteReader;
|
private Connection connection;
|
||||||
private int numRows; // num of rows in the selected table
|
private int numRows; // num of rows in the selected table
|
||||||
private int currPage = 0; // curr page of rows being displayed
|
private int currPage = 0; // curr page of rows being displayed
|
||||||
|
|
||||||
@ -339,9 +345,13 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer {
|
|||||||
numEntriesField.setText("");
|
numEntriesField.setText("");
|
||||||
|
|
||||||
// close DB connection to file
|
// close DB connection to file
|
||||||
if (null != sqliteReader) {
|
if (null != connection) {
|
||||||
sqliteReader.close();
|
try {
|
||||||
sqliteReader = null;
|
connection.close();
|
||||||
|
connection = null;
|
||||||
|
} catch (SQLException ex) {
|
||||||
|
logger.log(Level.SEVERE, "Failed to close DB connection to file.", ex); //NON-NLS
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
sqliteDbFile = null;
|
sqliteDbFile = null;
|
||||||
@ -359,39 +369,65 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer {
|
|||||||
"SQLiteViewer.errorMessage.failedToinitJDBCDriver=The JDBC driver for SQLite could not be loaded.",
|
"SQLiteViewer.errorMessage.failedToinitJDBCDriver=The JDBC driver for SQLite could not be loaded.",
|
||||||
"# {0} - exception message", "SQLiteViewer.errorMessage.unexpectedError=An unexpected error occurred:\n{0).",})
|
"# {0} - exception message", "SQLiteViewer.errorMessage.unexpectedError=An unexpected error occurred:\n{0).",})
|
||||||
private void processSQLiteFile() {
|
private void processSQLiteFile() {
|
||||||
|
|
||||||
tablesDropdownList.removeAllItems();
|
tablesDropdownList.removeAllItems();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
sqliteReader = FileReaderFactory.createReader(sqliteDbFile, SUPPORTED_MIMETYPES[0]);
|
String localDiskPath = SqliteUtil.writeAbstractFileToLocalDisk(sqliteDbFile);
|
||||||
|
SqliteUtil.findAndCopySQLiteMetaFile(sqliteDbFile);
|
||||||
Map<String, String> dbTablesMap = sqliteReader.getTableSchemas();
|
// Load the SQLite JDBC driver, if necessary.
|
||||||
|
Class.forName("org.sqlite.JDBC"); //NON-NLS
|
||||||
|
connection = DriverManager.getConnection("jdbc:sqlite:" + localDiskPath); //NON-NLS
|
||||||
|
|
||||||
|
Collection<String> dbTablesMap = getTables();
|
||||||
if (dbTablesMap.isEmpty()) {
|
if (dbTablesMap.isEmpty()) {
|
||||||
tablesDropdownList.addItem(Bundle.SQLiteViewer_comboBox_noTableEntry());
|
tablesDropdownList.addItem(Bundle.SQLiteViewer_comboBox_noTableEntry());
|
||||||
tablesDropdownList.setEnabled(false);
|
tablesDropdownList.setEnabled(false);
|
||||||
} else {
|
} else {
|
||||||
dbTablesMap.keySet().forEach((tableName) -> {
|
dbTablesMap.forEach((tableName) -> {
|
||||||
tablesDropdownList.addItem(tableName);
|
tablesDropdownList.addItem(tableName);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} catch (FileReaderException ex) {
|
} catch (ClassNotFoundException ex) {
|
||||||
logger.log(Level.SEVERE, String.format(
|
logger.log(Level.SEVERE, String.format("Failed to initialize JDBC SQLite '%s' (objId=%d)", sqliteDbFile.getName(), sqliteDbFile.getId()), ex); //NON-NLS
|
||||||
"Failed to get tables from DB file '%s' (objId=%d)", //NON-NLS
|
MessageNotifyUtil.Message.error(Bundle.SQLiteViewer_errorMessage_failedToinitJDBCDriver());
|
||||||
sqliteDbFile.getName(), sqliteDbFile.getId()), ex);
|
} catch (SQLException ex) {
|
||||||
MessageNotifyUtil.Message.error(
|
logger.log(Level.SEVERE, String.format("Failed to get tables from DB file '%s' (objId=%d)", sqliteDbFile.getName(), sqliteDbFile.getId()), ex); //NON-NLS
|
||||||
Bundle.SQLiteViewer_errorMessage_failedToQueryDatabase());
|
MessageNotifyUtil.Message.error(Bundle.SQLiteViewer_errorMessage_failedToQueryDatabase());
|
||||||
} catch (FileReaderInitException ex) {
|
} catch (IOException | NoCurrentCaseException | TskCoreException ex) {
|
||||||
logger.log(Level.SEVERE, String.format(
|
logger.log(Level.SEVERE, String.format("Failed to create temp copy of DB file '%s' (objId=%d)", sqliteDbFile.getName(), sqliteDbFile.getId()), ex); //NON-NLS
|
||||||
"Failed to create a SQLiteReader for file: '%s' (objId=%d)", //NON-NLS
|
MessageNotifyUtil.Message.error(Bundle.SQLiteViewer_errorMessage_failedToExtractFile());
|
||||||
sqliteDbFile.getName(), sqliteDbFile.getId()), ex);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets a collection of table names from the SQLite database file.
|
||||||
|
*
|
||||||
|
* @return A collection of table names
|
||||||
|
*/
|
||||||
|
private Collection<String> getTables() throws SQLException {
|
||||||
|
Collection<String> tableNames = new LinkedList<>();
|
||||||
|
try (Statement statement = connection.createStatement();
|
||||||
|
ResultSet resultSet = statement.executeQuery(
|
||||||
|
"SELECT name FROM sqlite_master "
|
||||||
|
+ " WHERE type= 'table' ")){
|
||||||
|
while (resultSet.next()) {
|
||||||
|
tableNames.add(resultSet.getString("name")); //NON-NLS
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return tableNames;
|
||||||
|
}
|
||||||
|
|
||||||
@NbBundle.Messages({"# {0} - tableName",
|
@NbBundle.Messages({"# {0} - tableName",
|
||||||
"SQLiteViewer.selectTable.errorText=Error getting row count for table: {0}"
|
"SQLiteViewer.selectTable.errorText=Error getting row count for table: {0}"
|
||||||
})
|
})
|
||||||
private void selectTable(String tableName) {
|
private void selectTable(String tableName) {
|
||||||
try {
|
|
||||||
numRows = sqliteReader.getRowCountFromTable(tableName);
|
try (Statement statement = connection.createStatement();
|
||||||
|
ResultSet resultSet = statement.executeQuery(
|
||||||
|
"SELECT count (*) as count FROM " + "\"" + tableName + "\"")) { //NON-NLS{
|
||||||
|
|
||||||
|
numRows = resultSet.getInt("count");
|
||||||
numEntriesField.setText(numRows + " entries");
|
numEntriesField.setText(numRows + " entries");
|
||||||
|
|
||||||
currPage = 1;
|
currPage = 1;
|
||||||
@ -410,12 +446,9 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer {
|
|||||||
selectedTableView.setupTable(Collections.emptyList());
|
selectedTableView.setupTable(Collections.emptyList());
|
||||||
}
|
}
|
||||||
|
|
||||||
} catch (FileReaderException ex) {
|
} catch (SQLException ex) {
|
||||||
logger.log(Level.SEVERE, String.format(
|
logger.log(Level.SEVERE, String.format("Failed to load table %s from DB file '%s' (objId=%d)", tableName, sqliteDbFile.getName(), sqliteDbFile.getId()), ex); //NON-NLS
|
||||||
"Failed to load table %s from DB file '%s' (objId=%d)", tableName, //NON-NLS
|
MessageNotifyUtil.Message.error(Bundle.SQLiteViewer_selectTable_errorText(tableName));
|
||||||
sqliteDbFile.getName(), sqliteDbFile.getId()), ex);
|
|
||||||
MessageNotifyUtil.Message.error(
|
|
||||||
Bundle.SQLiteViewer_selectTable_errorText(tableName));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -423,40 +456,63 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer {
|
|||||||
"SQLiteViewer.readTable.errorText=Error getting rows for table: {0}"})
|
"SQLiteViewer.readTable.errorText=Error getting rows for table: {0}"})
|
||||||
private void readTable(String tableName, int startRow, int numRowsToRead) {
|
private void readTable(String tableName, int startRow, int numRowsToRead) {
|
||||||
|
|
||||||
try {
|
try (
|
||||||
List<Map<String, Object>> rows = sqliteReader.getRowsFromTable(
|
Statement statement = connection.createStatement();
|
||||||
tableName, startRow, numRowsToRead);
|
ResultSet resultSet = statement.executeQuery(
|
||||||
|
"SELECT * FROM " + "\"" + tableName + "\""
|
||||||
|
+ " LIMIT " + Integer.toString(numRowsToRead)
|
||||||
|
+ " OFFSET " + Integer.toString(startRow - 1))) {
|
||||||
|
|
||||||
|
List<Map<String, Object>> rows = resultSetToArrayList(resultSet);
|
||||||
if (Objects.nonNull(rows)) {
|
if (Objects.nonNull(rows)) {
|
||||||
selectedTableView.setupTable(rows);
|
selectedTableView.setupTable(rows);
|
||||||
} else {
|
} else {
|
||||||
selectedTableView.setupTable(Collections.emptyList());
|
selectedTableView.setupTable(Collections.emptyList());
|
||||||
}
|
}
|
||||||
} catch (FileReaderException ex) {
|
} catch (SQLException ex) {
|
||||||
logger.log(Level.SEVERE, String.format(
|
logger.log(Level.SEVERE, String.format("Failed to read table %s from DB file '%s' (objId=%d)", tableName, sqliteDbFile.getName(), sqliteDbFile.getId()), ex); //NON-NLS
|
||||||
"Failed to read table %s from DB file '%s' (objId=%d)", tableName, //NON-NLS
|
MessageNotifyUtil.Message.error(Bundle.SQLiteViewer_readTable_errorText(tableName));
|
||||||
sqliteDbFile.getName(), sqliteDbFile.getId()), ex);
|
|
||||||
MessageNotifyUtil.Message.error(
|
|
||||||
Bundle.SQLiteViewer_readTable_errorText(tableName));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
@NbBundle.Messages("SQLiteViewer.BlobNotShown.message=BLOB Data not shown")
|
||||||
* Converts a sqlite table into a CSV file.
|
private List<Map<String, Object>> resultSetToArrayList(ResultSet resultSet) throws SQLException {
|
||||||
*
|
ResultSetMetaData metaData = resultSet.getMetaData();
|
||||||
* @param file
|
int columns = metaData.getColumnCount();
|
||||||
* @param tableName
|
ArrayList<Map<String, Object>> rowlist = new ArrayList<>();
|
||||||
* @param rowMap A list of rows in the table, where each row is represented as a column-value
|
while (resultSet.next()) {
|
||||||
* map.
|
Map<String, Object> row = new LinkedHashMap<>(columns);
|
||||||
* @throws FileNotFoundException
|
for (int i = 1; i <= columns; ++i) {
|
||||||
* @throws IOException
|
if (resultSet.getObject(i) == null) {
|
||||||
*/
|
row.put(metaData.getColumnName(i), "");
|
||||||
@NbBundle.Messages({
|
} else {
|
||||||
|
if (metaData.getColumnTypeName(i).compareToIgnoreCase("blob") == 0) {
|
||||||
|
row.put(metaData.getColumnName(i), Bundle.SQLiteViewer_BlobNotShown_message());
|
||||||
|
} else {
|
||||||
|
row.put(metaData.getColumnName(i), resultSet.getObject(i));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
rowlist.add(row);
|
||||||
|
}
|
||||||
|
|
||||||
|
return rowlist;
|
||||||
|
}
|
||||||
|
|
||||||
|
@NbBundle.Messages({"SQLiteViewer.exportTableToCsv.write.errText=Failed to export table content to csv file.",
|
||||||
"SQLiteViewer.exportTableToCsv.FileName=File name: ",
|
"SQLiteViewer.exportTableToCsv.FileName=File name: ",
|
||||||
"SQLiteViewer.exportTableToCsv.TableName=Table name: "
|
"SQLiteViewer.exportTableToCsv.TableName=Table name: "
|
||||||
})
|
})
|
||||||
public void exportTableToCSV(File file, String tableName,
|
private void exportTableToCsv(File file) {
|
||||||
List<Map<String, Object>> rowMap) throws FileNotFoundException, IOException{
|
String tableName = (String) this.tablesDropdownList.getSelectedItem();
|
||||||
|
try (
|
||||||
|
Statement statement = connection.createStatement();
|
||||||
|
ResultSet resultSet = statement.executeQuery("SELECT * FROM " + "\"" + tableName + "\"")) {
|
||||||
|
List<Map<String, Object>> currentTableRows = resultSetToArrayList(resultSet);
|
||||||
|
|
||||||
|
if (Objects.isNull(currentTableRows) || currentTableRows.isEmpty()) {
|
||||||
|
logger.log(Level.INFO, String.format("The table %s is empty. (objId=%d)", tableName, sqliteDbFile.getId())); //NON-NLS
|
||||||
|
} else {
|
||||||
File csvFile;
|
File csvFile;
|
||||||
String fileName = file.getName();
|
String fileName = file.getName();
|
||||||
if (FilenameUtils.getExtension(fileName).equalsIgnoreCase("csv")) {
|
if (FilenameUtils.getExtension(fileName).equalsIgnoreCase("csv")) {
|
||||||
@ -469,62 +525,40 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer {
|
|||||||
|
|
||||||
out.write((Bundle.SQLiteViewer_exportTableToCsv_FileName() + csvFile.getName() + "\n").getBytes());
|
out.write((Bundle.SQLiteViewer_exportTableToCsv_FileName() + csvFile.getName() + "\n").getBytes());
|
||||||
out.write((Bundle.SQLiteViewer_exportTableToCsv_TableName() + tableName + "\n").getBytes());
|
out.write((Bundle.SQLiteViewer_exportTableToCsv_TableName() + tableName + "\n").getBytes());
|
||||||
|
// Set up the column names
|
||||||
String header = createColumnHeader(rowMap.get(0)).concat("\n");
|
Map<String, Object> row = currentTableRows.get(0);
|
||||||
out.write(header.getBytes());
|
StringBuffer header = new StringBuffer();
|
||||||
|
for (Map.Entry<String, Object> col : row.entrySet()) {
|
||||||
for (Map<String, Object> maps : rowMap) {
|
String colName = col.getKey();
|
||||||
String row = maps.values()
|
if (header.length() > 0) {
|
||||||
.stream()
|
header.append(',').append(colName);
|
||||||
.map(Object::toString)
|
|
||||||
.collect(Collectors.joining(","))
|
|
||||||
.concat("\n");
|
|
||||||
out.write(row.getBytes());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@NbBundle.Messages({
|
|
||||||
"SQLiteViewer.exportTableToCsv.write.errText=Failed to export table content to csv file.",
|
|
||||||
})
|
|
||||||
private void exportTableToCsv(File file) {
|
|
||||||
String tableName = (String) this.tablesDropdownList.getSelectedItem();
|
|
||||||
try {
|
|
||||||
List<Map<String, Object>> currentTableRows =
|
|
||||||
sqliteReader.getRowsFromTable(tableName);
|
|
||||||
|
|
||||||
if (Objects.isNull(currentTableRows) || currentTableRows.isEmpty()) {
|
|
||||||
logger.log(Level.INFO, String.format(
|
|
||||||
"The table %s is empty. (objId=%d)", tableName, //NON-NLS
|
|
||||||
sqliteDbFile.getId()));
|
|
||||||
} else {
|
} else {
|
||||||
exportTableToCSV(file, tableName, currentTableRows);
|
header.append(colName);
|
||||||
}
|
}
|
||||||
} catch (FileReaderException ex) {
|
}
|
||||||
logger.log(Level.SEVERE, String.format(
|
out.write(header.append('\n').toString().getBytes());
|
||||||
"Failed to read table %s from DB file '%s' (objId=%d)", //NON-NLS
|
|
||||||
tableName, sqliteDbFile.getName(), sqliteDbFile.getId()), ex);
|
for (Map<String, Object> maps : currentTableRows) {
|
||||||
MessageNotifyUtil.Message.error(
|
StringBuffer valueLine = new StringBuffer();
|
||||||
Bundle.SQLiteViewer_readTable_errorText(tableName));
|
maps.values().forEach((value) -> {
|
||||||
|
if (valueLine.length() > 0) {
|
||||||
|
valueLine.append(',').append(value.toString());
|
||||||
|
} else {
|
||||||
|
valueLine.append(value.toString());
|
||||||
|
}
|
||||||
|
});
|
||||||
|
out.write(valueLine.append('\n').toString().getBytes());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (SQLException ex) {
|
||||||
|
logger.log(Level.SEVERE, String.format("Failed to read table %s from DB file '%s' (objId=%d)", tableName, sqliteDbFile.getName(), sqliteDbFile.getId()), ex); //NON-NLS
|
||||||
|
MessageNotifyUtil.Message.error(Bundle.SQLiteViewer_readTable_errorText(tableName));
|
||||||
} catch (IOException ex) {
|
} catch (IOException ex) {
|
||||||
logger.log(Level.SEVERE, String.format(
|
logger.log(Level.SEVERE, String.format("Failed to export table %s to file '%s'", tableName, file.getName()), ex); //NON-NLS
|
||||||
"Failed to export table %s to file '%s'", tableName, file.getName()), ex); //NON-NLS
|
MessageNotifyUtil.Message.error(Bundle.SQLiteViewer_exportTableToCsv_write_errText());
|
||||||
MessageNotifyUtil.Message.error(
|
|
||||||
Bundle.SQLiteViewer_exportTableToCsv_write_errText());
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns a comma seperated header string from the keys of the column
|
|
||||||
* row map.
|
|
||||||
*
|
|
||||||
* @param row column header row map
|
|
||||||
* @return comma seperated header string
|
|
||||||
*/
|
|
||||||
private String createColumnHeader(Map<String, Object> row) {
|
|
||||||
return row.entrySet()
|
|
||||||
.stream()
|
|
||||||
.map(Map.Entry::getKey)
|
|
||||||
.collect(Collectors.joining(","));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -188,6 +188,6 @@ ViewPreferencesPanel.dataSourcesHideSlackCheckbox.text=Data Sources area (the di
|
|||||||
ViewPreferencesPanel.viewsHideSlackCheckbox.text=Views area
|
ViewPreferencesPanel.viewsHideSlackCheckbox.text=Views area
|
||||||
ViewPreferencesPanel.currentSessionSettingsPanel.border.title=Current Session Settings
|
ViewPreferencesPanel.currentSessionSettingsPanel.border.title=Current Session Settings
|
||||||
ViewPreferencesPanel.hideRejectedResultsCheckbox.text=Hide rejected results
|
ViewPreferencesPanel.hideRejectedResultsCheckbox.text=Hide rejected results
|
||||||
ViewPreferencesPanel.hideOtherUsersTagsLabel.text=Hide other user's tags in the:
|
ViewPreferencesPanel.hideOtherUsersTagsLabel.text=Hide other users' tags in the:
|
||||||
ViewPreferencesPanel.centralRepoLabel.text=Do not use Central Repository for:
|
ViewPreferencesPanel.centralRepoLabel.text=Do not use Central Repository for:
|
||||||
ViewPreferencesPanel.commentsOccurencesColumnsCheckbox.text=C(omments) and O(ccurences) columns to reduce loading times
|
ViewPreferencesPanel.commentsOccurencesColumnsCheckbox.text=C(omments) and O(ccurences) columns to reduce loading times
|
||||||
|
130
Core/src/org/sleuthkit/autopsy/coreutils/SqliteUtil.java
Executable file
@ -0,0 +1,130 @@
|
|||||||
|
/*
|
||||||
|
* Autopsy Forensic Browser
|
||||||
|
*
|
||||||
|
* Copyright 2018-2018 Basis Technology Corp.
|
||||||
|
* Contact: carrier <at> sleuthkit <dot> org
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.sleuthkit.autopsy.coreutils;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.List;
|
||||||
|
import org.sleuthkit.autopsy.casemodule.Case;
|
||||||
|
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||||
|
import org.sleuthkit.autopsy.casemodule.services.FileManager;
|
||||||
|
import org.sleuthkit.autopsy.casemodule.services.Services;
|
||||||
|
import org.sleuthkit.autopsy.datamodel.ContentUtils;
|
||||||
|
import org.sleuthkit.datamodel.AbstractFile;
|
||||||
|
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||||
|
import org.sleuthkit.datamodel.TskCoreException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sqlite utility class. Find and copy metafiles, write sqlite abstract files to
|
||||||
|
* temp directory, and generate unique temp directory paths.
|
||||||
|
*/
|
||||||
|
public final class SqliteUtil {
|
||||||
|
|
||||||
|
private SqliteUtil() {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Overloaded implementation of
|
||||||
|
* {@link #findAndCopySQLiteMetaFile(AbstractFile, String) findAndCopySQLiteMetaFile}
|
||||||
|
* , automatically tries to copy -wal and -shm files without needing to know
|
||||||
|
* their existence.
|
||||||
|
*
|
||||||
|
* @param sqliteFile file which has -wal and -shm meta files
|
||||||
|
*
|
||||||
|
* @throws NoCurrentCaseException Case has been closed.
|
||||||
|
* @throws TskCoreException fileManager cannot find AbstractFile
|
||||||
|
* files.
|
||||||
|
* @throws IOException Issue during writing to file.
|
||||||
|
*/
|
||||||
|
public static void findAndCopySQLiteMetaFile(AbstractFile sqliteFile)
|
||||||
|
throws NoCurrentCaseException, TskCoreException, IOException {
|
||||||
|
|
||||||
|
findAndCopySQLiteMetaFile(sqliteFile, sqliteFile.getName() + "-wal");
|
||||||
|
findAndCopySQLiteMetaFile(sqliteFile, sqliteFile.getName() + "-shm");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Searches for a meta file associated with the give SQLite database. If
|
||||||
|
* found, it copies this file into the temp directory of the current case.
|
||||||
|
*
|
||||||
|
* @param sqliteFile file being processed
|
||||||
|
* @param metaFileName name of meta file to look for
|
||||||
|
*
|
||||||
|
* @throws NoCurrentCaseException Case has been closed.
|
||||||
|
* @throws TskCoreException fileManager cannot find AbstractFile
|
||||||
|
* files.
|
||||||
|
* @throws IOException Issue during writing to file.
|
||||||
|
*/
|
||||||
|
public static void findAndCopySQLiteMetaFile(AbstractFile sqliteFile,
|
||||||
|
String metaFileName) throws NoCurrentCaseException, TskCoreException, IOException {
|
||||||
|
|
||||||
|
Case openCase = Case.getCurrentCaseThrows();
|
||||||
|
SleuthkitCase sleuthkitCase = openCase.getSleuthkitCase();
|
||||||
|
Services services = new Services(sleuthkitCase);
|
||||||
|
FileManager fileManager = services.getFileManager();
|
||||||
|
|
||||||
|
List<AbstractFile> metaFiles = fileManager.findFiles(
|
||||||
|
sqliteFile.getDataSource(), metaFileName,
|
||||||
|
sqliteFile.getParent().getName());
|
||||||
|
|
||||||
|
if (metaFiles != null) {
|
||||||
|
for (AbstractFile metaFile : metaFiles) {
|
||||||
|
writeAbstractFileToLocalDisk(metaFile);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Copies the file contents into a unique path in the current case temp
|
||||||
|
* directory.
|
||||||
|
*
|
||||||
|
* @param file AbstractFile from the data source
|
||||||
|
*
|
||||||
|
* @return The path of the file on disk
|
||||||
|
*
|
||||||
|
* @throws IOException Exception writing file contents
|
||||||
|
* @throws NoCurrentCaseException Current case closed during file copying
|
||||||
|
*/
|
||||||
|
public static String writeAbstractFileToLocalDisk(AbstractFile file)
|
||||||
|
throws IOException, NoCurrentCaseException {
|
||||||
|
|
||||||
|
String localDiskPath = getUniqueTempDirectoryPath(file);
|
||||||
|
File localDatabaseFile = new File(localDiskPath);
|
||||||
|
if (!localDatabaseFile.exists()) {
|
||||||
|
ContentUtils.writeToFile(file, localDatabaseFile);
|
||||||
|
}
|
||||||
|
return localDiskPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates a unique local disk path that resides in the temp directory of
|
||||||
|
* the current case.
|
||||||
|
*
|
||||||
|
* @param file The database abstract file
|
||||||
|
*
|
||||||
|
* @return Unique local disk path living in the temp directory of the case
|
||||||
|
*
|
||||||
|
* @throws org.sleuthkit.autopsy.casemodule.NoCurrentCaseException
|
||||||
|
*/
|
||||||
|
public static String getUniqueTempDirectoryPath(AbstractFile file) throws NoCurrentCaseException {
|
||||||
|
return Case.getCurrentCaseThrows().getTempDirectory()
|
||||||
|
+ File.separator + file.getId() + file.getName();
|
||||||
|
}
|
||||||
|
}
|
@ -61,6 +61,7 @@ import org.sleuthkit.autopsy.coreutils.Logger;
|
|||||||
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
||||||
import static org.sleuthkit.autopsy.datamodel.DisplayableItemNode.findLinked;
|
import static org.sleuthkit.autopsy.datamodel.DisplayableItemNode.findLinked;
|
||||||
import org.sleuthkit.autopsy.corecomponents.DataResultViewerTable.HasCommentStatus;
|
import org.sleuthkit.autopsy.corecomponents.DataResultViewerTable.HasCommentStatus;
|
||||||
|
import org.sleuthkit.autopsy.modules.hashdatabase.HashDbManager;
|
||||||
import org.sleuthkit.autopsy.timeline.actions.ViewArtifactInTimelineAction;
|
import org.sleuthkit.autopsy.timeline.actions.ViewArtifactInTimelineAction;
|
||||||
import org.sleuthkit.autopsy.timeline.actions.ViewFileInTimelineAction;
|
import org.sleuthkit.autopsy.timeline.actions.ViewFileInTimelineAction;
|
||||||
import org.sleuthkit.datamodel.AbstractFile;
|
import org.sleuthkit.datamodel.AbstractFile;
|
||||||
@ -637,6 +638,22 @@ public class BlackboardArtifactNode extends AbstractContentNode<BlackboardArtifa
|
|||||||
description = Bundle.BlackboardArtifactNode_createSheet_notableFile_description();
|
description = Bundle.BlackboardArtifactNode_createSheet_notableFile_description();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
//if the artifact being viewed is a hashhit check if the hashset is notable
|
||||||
|
if ((score == Score.NO_SCORE || score == Score.INTERESTING_SCORE) && content.getArtifactTypeID() == ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID()) {
|
||||||
|
try {
|
||||||
|
BlackboardAttribute attr = content.getAttribute(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_SET_NAME));
|
||||||
|
List<HashDbManager.HashDb> notableHashsets = HashDbManager.getInstance().getKnownBadFileHashSets();
|
||||||
|
for (HashDbManager.HashDb hashDb : notableHashsets) {
|
||||||
|
if (hashDb.getHashSetName().equals(attr.getValueString())) {
|
||||||
|
score = Score.NOTABLE_SCORE;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (TskCoreException ex) {
|
||||||
|
//unable to get the attribute so we can not update the status based on the attribute
|
||||||
|
logger.log(Level.WARNING, "Unable to get TSK_SET_NAME attribute for artifact of type TSK_HASHSET_HIT with artifact ID " + content.getArtifactID(), ex);
|
||||||
|
}
|
||||||
|
}
|
||||||
try {
|
try {
|
||||||
if (score == Score.NO_SCORE && !content.getArtifacts(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT).isEmpty()) {
|
if (score == Score.NO_SCORE && !content.getArtifacts(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT).isEmpty()) {
|
||||||
score = Score.INTERESTING_SCORE;
|
score = Score.INTERESTING_SCORE;
|
||||||
@ -679,8 +696,7 @@ public class BlackboardArtifactNode extends AbstractContentNode<BlackboardArtifa
|
|||||||
}
|
}
|
||||||
} catch (EamDbException ex) {
|
} catch (EamDbException ex) {
|
||||||
logger.log(Level.WARNING, "Error getting count of datasources with correlation attribute", ex);
|
logger.log(Level.WARNING, "Error getting count of datasources with correlation attribute", ex);
|
||||||
}
|
} catch (CorrelationAttributeNormalizationException ex) {
|
||||||
catch (CorrelationAttributeNormalizationException ex) {
|
|
||||||
logger.log(Level.WARNING, "Unable to normalize data to get count of datasources with correlation attribute", ex);
|
logger.log(Level.WARNING, "Unable to normalize data to get count of datasources with correlation attribute", ex);
|
||||||
}
|
}
|
||||||
sheetSet.put(
|
sheetSet.put(
|
||||||
|
@ -26,26 +26,27 @@
|
|||||||
<Group type="103" groupAlignment="0" attributes="0">
|
<Group type="103" groupAlignment="0" attributes="0">
|
||||||
<Component id="treeView" max="32767" attributes="0"/>
|
<Component id="treeView" max="32767" attributes="0"/>
|
||||||
<Group type="102" attributes="0">
|
<Group type="102" attributes="0">
|
||||||
|
<EmptySpace max="-2" attributes="0"/>
|
||||||
<Component id="backButton" min="-2" max="-2" attributes="0"/>
|
<Component id="backButton" min="-2" max="-2" attributes="0"/>
|
||||||
<EmptySpace max="-2" attributes="0"/>
|
<EmptySpace max="-2" attributes="0"/>
|
||||||
<Component id="forwardButton" min="-2" max="-2" attributes="0"/>
|
<Component id="forwardButton" min="-2" max="-2" attributes="0"/>
|
||||||
<EmptySpace pref="264" max="32767" attributes="0"/>
|
<EmptySpace pref="140" max="32767" attributes="0"/>
|
||||||
<Component id="openViewPreferencesButton" min="-2" max="-2" attributes="0"/>
|
<Component id="openViewPreferencesButton" min="-2" max="-2" attributes="0"/>
|
||||||
<EmptySpace min="-2" max="-2" attributes="0"/>
|
<EmptySpace max="-2" attributes="0"/>
|
||||||
</Group>
|
</Group>
|
||||||
</Group>
|
</Group>
|
||||||
</DimensionLayout>
|
</DimensionLayout>
|
||||||
<DimensionLayout dim="1">
|
<DimensionLayout dim="1">
|
||||||
<Group type="103" groupAlignment="0" attributes="0">
|
<Group type="103" groupAlignment="0" attributes="0">
|
||||||
<Group type="102" alignment="0" attributes="0">
|
<Group type="102" alignment="0" attributes="0">
|
||||||
<EmptySpace max="-2" attributes="0"/>
|
<EmptySpace min="0" pref="0" max="-2" attributes="0"/>
|
||||||
<Group type="103" groupAlignment="0" attributes="0">
|
<Group type="103" groupAlignment="0" attributes="0">
|
||||||
|
<Component id="openViewPreferencesButton" min="-2" max="-2" attributes="0"/>
|
||||||
<Component id="backButton" min="-2" max="-2" attributes="1"/>
|
<Component id="backButton" min="-2" max="-2" attributes="1"/>
|
||||||
<Component id="forwardButton" min="-2" max="-2" attributes="1"/>
|
<Component id="forwardButton" min="-2" max="-2" attributes="1"/>
|
||||||
<Component id="openViewPreferencesButton" min="-2" pref="31" max="-2" attributes="0"/>
|
|
||||||
</Group>
|
</Group>
|
||||||
<EmptySpace max="-2" attributes="0"/>
|
<EmptySpace max="-2" attributes="0"/>
|
||||||
<Component id="treeView" pref="900" max="32767" attributes="0"/>
|
<Component id="treeView" pref="919" max="32767" attributes="0"/>
|
||||||
</Group>
|
</Group>
|
||||||
</Group>
|
</Group>
|
||||||
</DimensionLayout>
|
</DimensionLayout>
|
||||||
@ -66,7 +67,7 @@
|
|||||||
<Component class="javax.swing.JButton" name="backButton">
|
<Component class="javax.swing.JButton" name="backButton">
|
||||||
<Properties>
|
<Properties>
|
||||||
<Property name="icon" type="javax.swing.Icon" editor="org.netbeans.modules.form.editors2.IconEditor">
|
<Property name="icon" type="javax.swing.Icon" editor="org.netbeans.modules.form.editors2.IconEditor">
|
||||||
<Image iconType="3" name="/org/sleuthkit/autopsy/directorytree/btn_step_back_large.png"/>
|
<Image iconType="3" name="/org/sleuthkit/autopsy/directorytree/btn_step_back.png"/>
|
||||||
</Property>
|
</Property>
|
||||||
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
|
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
|
||||||
<ResourceString bundle="org/sleuthkit/autopsy/directorytree/Bundle.properties" key="DirectoryTreeTopComponent.backButton.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, "{key}")"/>
|
<ResourceString bundle="org/sleuthkit/autopsy/directorytree/Bundle.properties" key="DirectoryTreeTopComponent.backButton.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, "{key}")"/>
|
||||||
@ -74,7 +75,7 @@
|
|||||||
<Property name="borderPainted" type="boolean" value="false"/>
|
<Property name="borderPainted" type="boolean" value="false"/>
|
||||||
<Property name="contentAreaFilled" type="boolean" value="false"/>
|
<Property name="contentAreaFilled" type="boolean" value="false"/>
|
||||||
<Property name="disabledIcon" type="javax.swing.Icon" editor="org.netbeans.modules.form.editors2.IconEditor">
|
<Property name="disabledIcon" type="javax.swing.Icon" editor="org.netbeans.modules.form.editors2.IconEditor">
|
||||||
<Image iconType="3" name="/org/sleuthkit/autopsy/directorytree/btn_step_back_disabled_large.png"/>
|
<Image iconType="3" name="/org/sleuthkit/autopsy/directorytree/btn_step_back_disabled.png"/>
|
||||||
</Property>
|
</Property>
|
||||||
<Property name="margin" type="java.awt.Insets" editor="org.netbeans.beaninfo.editors.InsetsEditor">
|
<Property name="margin" type="java.awt.Insets" editor="org.netbeans.beaninfo.editors.InsetsEditor">
|
||||||
<Insets value="[2, 0, 2, 0]"/>
|
<Insets value="[2, 0, 2, 0]"/>
|
||||||
@ -86,10 +87,10 @@
|
|||||||
<Dimension value="[5, 5]"/>
|
<Dimension value="[5, 5]"/>
|
||||||
</Property>
|
</Property>
|
||||||
<Property name="preferredSize" type="java.awt.Dimension" editor="org.netbeans.beaninfo.editors.DimensionEditor">
|
<Property name="preferredSize" type="java.awt.Dimension" editor="org.netbeans.beaninfo.editors.DimensionEditor">
|
||||||
<Dimension value="[32, 32]"/>
|
<Dimension value="[24, 24]"/>
|
||||||
</Property>
|
</Property>
|
||||||
<Property name="rolloverIcon" type="javax.swing.Icon" editor="org.netbeans.modules.form.editors2.IconEditor">
|
<Property name="rolloverIcon" type="javax.swing.Icon" editor="org.netbeans.modules.form.editors2.IconEditor">
|
||||||
<Image iconType="3" name="/org/sleuthkit/autopsy/directorytree/btn_step_back_hover_large.png"/>
|
<Image iconType="3" name="/org/sleuthkit/autopsy/directorytree/btn_step_back_hover.png"/>
|
||||||
</Property>
|
</Property>
|
||||||
</Properties>
|
</Properties>
|
||||||
<Events>
|
<Events>
|
||||||
@ -99,7 +100,7 @@
|
|||||||
<Component class="javax.swing.JButton" name="forwardButton">
|
<Component class="javax.swing.JButton" name="forwardButton">
|
||||||
<Properties>
|
<Properties>
|
||||||
<Property name="icon" type="javax.swing.Icon" editor="org.netbeans.modules.form.editors2.IconEditor">
|
<Property name="icon" type="javax.swing.Icon" editor="org.netbeans.modules.form.editors2.IconEditor">
|
||||||
<Image iconType="3" name="/org/sleuthkit/autopsy/directorytree/btn_step_forward_large.png"/>
|
<Image iconType="3" name="/org/sleuthkit/autopsy/directorytree/btn_step_forward.png"/>
|
||||||
</Property>
|
</Property>
|
||||||
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
|
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
|
||||||
<ResourceString bundle="org/sleuthkit/autopsy/directorytree/Bundle.properties" key="DirectoryTreeTopComponent.forwardButton.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, "{key}")"/>
|
<ResourceString bundle="org/sleuthkit/autopsy/directorytree/Bundle.properties" key="DirectoryTreeTopComponent.forwardButton.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, "{key}")"/>
|
||||||
@ -107,7 +108,7 @@
|
|||||||
<Property name="borderPainted" type="boolean" value="false"/>
|
<Property name="borderPainted" type="boolean" value="false"/>
|
||||||
<Property name="contentAreaFilled" type="boolean" value="false"/>
|
<Property name="contentAreaFilled" type="boolean" value="false"/>
|
||||||
<Property name="disabledIcon" type="javax.swing.Icon" editor="org.netbeans.modules.form.editors2.IconEditor">
|
<Property name="disabledIcon" type="javax.swing.Icon" editor="org.netbeans.modules.form.editors2.IconEditor">
|
||||||
<Image iconType="3" name="/org/sleuthkit/autopsy/directorytree/btn_step_forward_disabled_large.png"/>
|
<Image iconType="3" name="/org/sleuthkit/autopsy/directorytree/btn_step_forward_disabled.png"/>
|
||||||
</Property>
|
</Property>
|
||||||
<Property name="margin" type="java.awt.Insets" editor="org.netbeans.beaninfo.editors.InsetsEditor">
|
<Property name="margin" type="java.awt.Insets" editor="org.netbeans.beaninfo.editors.InsetsEditor">
|
||||||
<Insets value="[2, 0, 2, 0]"/>
|
<Insets value="[2, 0, 2, 0]"/>
|
||||||
@ -119,10 +120,10 @@
|
|||||||
<Dimension value="[5, 5]"/>
|
<Dimension value="[5, 5]"/>
|
||||||
</Property>
|
</Property>
|
||||||
<Property name="preferredSize" type="java.awt.Dimension" editor="org.netbeans.beaninfo.editors.DimensionEditor">
|
<Property name="preferredSize" type="java.awt.Dimension" editor="org.netbeans.beaninfo.editors.DimensionEditor">
|
||||||
<Dimension value="[32, 32]"/>
|
<Dimension value="[24, 24]"/>
|
||||||
</Property>
|
</Property>
|
||||||
<Property name="rolloverIcon" type="javax.swing.Icon" editor="org.netbeans.modules.form.editors2.IconEditor">
|
<Property name="rolloverIcon" type="javax.swing.Icon" editor="org.netbeans.modules.form.editors2.IconEditor">
|
||||||
<Image iconType="3" name="/org/sleuthkit/autopsy/directorytree/btn_step_forward_hover_large.png"/>
|
<Image iconType="3" name="/org/sleuthkit/autopsy/directorytree/btn_step_forward_hover.png"/>
|
||||||
</Property>
|
</Property>
|
||||||
</Properties>
|
</Properties>
|
||||||
<Events>
|
<Events>
|
||||||
@ -132,18 +133,26 @@
|
|||||||
<Component class="javax.swing.JButton" name="openViewPreferencesButton">
|
<Component class="javax.swing.JButton" name="openViewPreferencesButton">
|
||||||
<Properties>
|
<Properties>
|
||||||
<Property name="icon" type="javax.swing.Icon" editor="org.netbeans.modules.form.editors2.IconEditor">
|
<Property name="icon" type="javax.swing.Icon" editor="org.netbeans.modules.form.editors2.IconEditor">
|
||||||
<Image iconType="3" name="/org/sleuthkit/autopsy/directorytree/view-preferences-24.png"/>
|
<Image iconType="3" name="/org/sleuthkit/autopsy/directorytree/view-preferences-23.png"/>
|
||||||
</Property>
|
</Property>
|
||||||
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
|
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
|
||||||
<ResourceString bundle="org/sleuthkit/autopsy/directorytree/Bundle.properties" key="DirectoryTreeTopComponent.openViewPreferencesButton.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, "{key}")"/>
|
<ResourceString bundle="org/sleuthkit/autopsy/directorytree/Bundle.properties" key="DirectoryTreeTopComponent.openViewPreferencesButton.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, "{key}")"/>
|
||||||
</Property>
|
</Property>
|
||||||
<Property name="border" type="javax.swing.border.Border" editor="org.netbeans.modules.form.editors2.BorderEditor">
|
<Property name="border" type="javax.swing.border.Border" editor="org.netbeans.modules.form.editors2.BorderEditor">
|
||||||
<Border info="org.netbeans.modules.form.compat2.border.SoftBevelBorderInfo">
|
<Border info="org.netbeans.modules.form.compat2.border.EmptyBorderInfo">
|
||||||
<BevelBorder/>
|
<EmptyBorder/>
|
||||||
</Border>
|
</Border>
|
||||||
</Property>
|
</Property>
|
||||||
|
<Property name="borderPainted" type="boolean" value="false"/>
|
||||||
|
<Property name="contentAreaFilled" type="boolean" value="false"/>
|
||||||
|
<Property name="maximumSize" type="java.awt.Dimension" editor="org.netbeans.beaninfo.editors.DimensionEditor">
|
||||||
|
<Dimension value="[24, 24]"/>
|
||||||
|
</Property>
|
||||||
|
<Property name="minimumSize" type="java.awt.Dimension" editor="org.netbeans.beaninfo.editors.DimensionEditor">
|
||||||
|
<Dimension value="[24, 24]"/>
|
||||||
|
</Property>
|
||||||
<Property name="preferredSize" type="java.awt.Dimension" editor="org.netbeans.beaninfo.editors.DimensionEditor">
|
<Property name="preferredSize" type="java.awt.Dimension" editor="org.netbeans.beaninfo.editors.DimensionEditor">
|
||||||
<Dimension value="[30, 30]"/>
|
<Dimension value="[24, 24]"/>
|
||||||
</Property>
|
</Property>
|
||||||
</Properties>
|
</Properties>
|
||||||
<Events>
|
<Events>
|
||||||
|
@ -166,9 +166,11 @@ public final class DirectoryTreeTopComponent extends TopComponent implements Dat
|
|||||||
@Override
|
@Override
|
||||||
public void preferenceChange(PreferenceChangeEvent evt) {
|
public void preferenceChange(PreferenceChangeEvent evt) {
|
||||||
switch (evt.getKey()) {
|
switch (evt.getKey()) {
|
||||||
|
case UserPreferences.DISPLAY_TIMES_IN_LOCAL_TIME:
|
||||||
case UserPreferences.HIDE_KNOWN_FILES_IN_DATA_SRCS_TREE:
|
case UserPreferences.HIDE_KNOWN_FILES_IN_DATA_SRCS_TREE:
|
||||||
case UserPreferences.HIDE_SLACK_FILES_IN_DATA_SRCS_TREE:
|
case UserPreferences.HIDE_SLACK_FILES_IN_DATA_SRCS_TREE:
|
||||||
case UserPreferences.HIDE_CENTRAL_REPO_COMMENTS_AND_OCCURRENCES:
|
case UserPreferences.HIDE_CENTRAL_REPO_COMMENTS_AND_OCCURRENCES:
|
||||||
|
case UserPreferences.KEEP_PREFERRED_VIEWER:
|
||||||
refreshContentTreeSafe();
|
refreshContentTreeSafe();
|
||||||
break;
|
break;
|
||||||
case UserPreferences.SHOW_ONLY_CURRENT_USER_TAGS:
|
case UserPreferences.SHOW_ONLY_CURRENT_USER_TAGS:
|
||||||
@ -176,7 +178,8 @@ public final class DirectoryTreeTopComponent extends TopComponent implements Dat
|
|||||||
break;
|
break;
|
||||||
case UserPreferences.HIDE_KNOWN_FILES_IN_VIEWS_TREE:
|
case UserPreferences.HIDE_KNOWN_FILES_IN_VIEWS_TREE:
|
||||||
case UserPreferences.HIDE_SLACK_FILES_IN_VIEWS_TREE:
|
case UserPreferences.HIDE_SLACK_FILES_IN_VIEWS_TREE:
|
||||||
// TODO: Need a way to refresh the Views subtree
|
// TODO: Need a way to refresh the Views subtree alone.
|
||||||
|
refreshContentTreeSafe();
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -238,42 +241,46 @@ public final class DirectoryTreeTopComponent extends TopComponent implements Dat
|
|||||||
|
|
||||||
treeView.setBorder(null);
|
treeView.setBorder(null);
|
||||||
|
|
||||||
backButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/directorytree/btn_step_back_large.png"))); // NOI18N
|
backButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/directorytree/btn_step_back.png"))); // NOI18N
|
||||||
org.openide.awt.Mnemonics.setLocalizedText(backButton, org.openide.util.NbBundle.getMessage(DirectoryTreeTopComponent.class, "DirectoryTreeTopComponent.backButton.text")); // NOI18N
|
org.openide.awt.Mnemonics.setLocalizedText(backButton, org.openide.util.NbBundle.getMessage(DirectoryTreeTopComponent.class, "DirectoryTreeTopComponent.backButton.text")); // NOI18N
|
||||||
backButton.setBorderPainted(false);
|
backButton.setBorderPainted(false);
|
||||||
backButton.setContentAreaFilled(false);
|
backButton.setContentAreaFilled(false);
|
||||||
backButton.setDisabledIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/directorytree/btn_step_back_disabled_large.png"))); // NOI18N
|
backButton.setDisabledIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/directorytree/btn_step_back_disabled.png"))); // NOI18N
|
||||||
backButton.setMargin(new java.awt.Insets(2, 0, 2, 0));
|
backButton.setMargin(new java.awt.Insets(2, 0, 2, 0));
|
||||||
backButton.setMaximumSize(new java.awt.Dimension(55, 100));
|
backButton.setMaximumSize(new java.awt.Dimension(55, 100));
|
||||||
backButton.setMinimumSize(new java.awt.Dimension(5, 5));
|
backButton.setMinimumSize(new java.awt.Dimension(5, 5));
|
||||||
backButton.setPreferredSize(new java.awt.Dimension(32, 32));
|
backButton.setPreferredSize(new java.awt.Dimension(24, 24));
|
||||||
backButton.setRolloverIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/directorytree/btn_step_back_hover_large.png"))); // NOI18N
|
backButton.setRolloverIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/directorytree/btn_step_back_hover.png"))); // NOI18N
|
||||||
backButton.addActionListener(new java.awt.event.ActionListener() {
|
backButton.addActionListener(new java.awt.event.ActionListener() {
|
||||||
public void actionPerformed(java.awt.event.ActionEvent evt) {
|
public void actionPerformed(java.awt.event.ActionEvent evt) {
|
||||||
backButtonActionPerformed(evt);
|
backButtonActionPerformed(evt);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
forwardButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/directorytree/btn_step_forward_large.png"))); // NOI18N
|
forwardButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/directorytree/btn_step_forward.png"))); // NOI18N
|
||||||
org.openide.awt.Mnemonics.setLocalizedText(forwardButton, org.openide.util.NbBundle.getMessage(DirectoryTreeTopComponent.class, "DirectoryTreeTopComponent.forwardButton.text")); // NOI18N
|
org.openide.awt.Mnemonics.setLocalizedText(forwardButton, org.openide.util.NbBundle.getMessage(DirectoryTreeTopComponent.class, "DirectoryTreeTopComponent.forwardButton.text")); // NOI18N
|
||||||
forwardButton.setBorderPainted(false);
|
forwardButton.setBorderPainted(false);
|
||||||
forwardButton.setContentAreaFilled(false);
|
forwardButton.setContentAreaFilled(false);
|
||||||
forwardButton.setDisabledIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/directorytree/btn_step_forward_disabled_large.png"))); // NOI18N
|
forwardButton.setDisabledIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/directorytree/btn_step_forward_disabled.png"))); // NOI18N
|
||||||
forwardButton.setMargin(new java.awt.Insets(2, 0, 2, 0));
|
forwardButton.setMargin(new java.awt.Insets(2, 0, 2, 0));
|
||||||
forwardButton.setMaximumSize(new java.awt.Dimension(55, 100));
|
forwardButton.setMaximumSize(new java.awt.Dimension(55, 100));
|
||||||
forwardButton.setMinimumSize(new java.awt.Dimension(5, 5));
|
forwardButton.setMinimumSize(new java.awt.Dimension(5, 5));
|
||||||
forwardButton.setPreferredSize(new java.awt.Dimension(32, 32));
|
forwardButton.setPreferredSize(new java.awt.Dimension(24, 24));
|
||||||
forwardButton.setRolloverIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/directorytree/btn_step_forward_hover_large.png"))); // NOI18N
|
forwardButton.setRolloverIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/directorytree/btn_step_forward_hover.png"))); // NOI18N
|
||||||
forwardButton.addActionListener(new java.awt.event.ActionListener() {
|
forwardButton.addActionListener(new java.awt.event.ActionListener() {
|
||||||
public void actionPerformed(java.awt.event.ActionEvent evt) {
|
public void actionPerformed(java.awt.event.ActionEvent evt) {
|
||||||
forwardButtonActionPerformed(evt);
|
forwardButtonActionPerformed(evt);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
openViewPreferencesButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/directorytree/view-preferences-24.png"))); // NOI18N
|
openViewPreferencesButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/directorytree/view-preferences-23.png"))); // NOI18N
|
||||||
org.openide.awt.Mnemonics.setLocalizedText(openViewPreferencesButton, org.openide.util.NbBundle.getMessage(DirectoryTreeTopComponent.class, "DirectoryTreeTopComponent.openViewPreferencesButton.text")); // NOI18N
|
org.openide.awt.Mnemonics.setLocalizedText(openViewPreferencesButton, org.openide.util.NbBundle.getMessage(DirectoryTreeTopComponent.class, "DirectoryTreeTopComponent.openViewPreferencesButton.text")); // NOI18N
|
||||||
openViewPreferencesButton.setBorder(new javax.swing.border.SoftBevelBorder(javax.swing.border.BevelBorder.RAISED));
|
openViewPreferencesButton.setBorder(javax.swing.BorderFactory.createEmptyBorder(1, 1, 1, 1));
|
||||||
openViewPreferencesButton.setPreferredSize(new java.awt.Dimension(30, 30));
|
openViewPreferencesButton.setBorderPainted(false);
|
||||||
|
openViewPreferencesButton.setContentAreaFilled(false);
|
||||||
|
openViewPreferencesButton.setMaximumSize(new java.awt.Dimension(24, 24));
|
||||||
|
openViewPreferencesButton.setMinimumSize(new java.awt.Dimension(24, 24));
|
||||||
|
openViewPreferencesButton.setPreferredSize(new java.awt.Dimension(24, 24));
|
||||||
openViewPreferencesButton.addActionListener(new java.awt.event.ActionListener() {
|
openViewPreferencesButton.addActionListener(new java.awt.event.ActionListener() {
|
||||||
public void actionPerformed(java.awt.event.ActionEvent evt) {
|
public void actionPerformed(java.awt.event.ActionEvent evt) {
|
||||||
openViewPreferencesButtonActionPerformed(evt);
|
openViewPreferencesButtonActionPerformed(evt);
|
||||||
@ -286,23 +293,24 @@ public final class DirectoryTreeTopComponent extends TopComponent implements Dat
|
|||||||
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
|
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
|
||||||
.addComponent(treeView)
|
.addComponent(treeView)
|
||||||
.addGroup(layout.createSequentialGroup()
|
.addGroup(layout.createSequentialGroup()
|
||||||
|
.addContainerGap()
|
||||||
.addComponent(backButton, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
|
.addComponent(backButton, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
|
||||||
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
|
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
|
||||||
.addComponent(forwardButton, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
|
.addComponent(forwardButton, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
|
||||||
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 264, Short.MAX_VALUE)
|
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 140, Short.MAX_VALUE)
|
||||||
.addComponent(openViewPreferencesButton, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
|
.addComponent(openViewPreferencesButton, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
|
||||||
.addContainerGap())
|
.addContainerGap())
|
||||||
);
|
);
|
||||||
layout.setVerticalGroup(
|
layout.setVerticalGroup(
|
||||||
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
|
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
|
||||||
.addGroup(layout.createSequentialGroup()
|
.addGroup(layout.createSequentialGroup()
|
||||||
.addContainerGap()
|
.addGap(0, 0, 0)
|
||||||
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
|
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
|
||||||
|
.addComponent(openViewPreferencesButton, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
|
||||||
.addComponent(backButton, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
|
.addComponent(backButton, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
|
||||||
.addComponent(forwardButton, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
|
.addComponent(forwardButton, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
|
||||||
.addComponent(openViewPreferencesButton, javax.swing.GroupLayout.PREFERRED_SIZE, 31, javax.swing.GroupLayout.PREFERRED_SIZE))
|
|
||||||
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
|
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
|
||||||
.addComponent(treeView, javax.swing.GroupLayout.DEFAULT_SIZE, 900, Short.MAX_VALUE))
|
.addComponent(treeView, javax.swing.GroupLayout.DEFAULT_SIZE, 919, Short.MAX_VALUE))
|
||||||
);
|
);
|
||||||
}// </editor-fold>//GEN-END:initComponents
|
}// </editor-fold>//GEN-END:initComponents
|
||||||
|
|
||||||
|
Before Width: | Height: | Size: 1.6 KiB After Width: | Height: | Size: 749 B |
Before Width: | Height: | Size: 1.5 KiB After Width: | Height: | Size: 611 B |
Before Width: | Height: | Size: 1.1 KiB |
Before Width: | Height: | Size: 1.7 KiB After Width: | Height: | Size: 761 B |
Before Width: | Height: | Size: 1.4 KiB |
Before Width: | Height: | Size: 1.4 KiB |
Before Width: | Height: | Size: 1.6 KiB After Width: | Height: | Size: 756 B |
Before Width: | Height: | Size: 1.5 KiB After Width: | Height: | Size: 639 B |
Before Width: | Height: | Size: 1.0 KiB |
Before Width: | Height: | Size: 1.6 KiB After Width: | Height: | Size: 769 B |
Before Width: | Height: | Size: 1.4 KiB |
Before Width: | Height: | Size: 1.4 KiB |
After Width: | Height: | Size: 1.0 KiB |
Before Width: | Height: | Size: 786 B |
@ -27,6 +27,7 @@ import javax.swing.AbstractAction;
|
|||||||
import javax.swing.JMenu;
|
import javax.swing.JMenu;
|
||||||
import javax.swing.JMenuItem;
|
import javax.swing.JMenuItem;
|
||||||
import javax.swing.JOptionPane;
|
import javax.swing.JOptionPane;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.openide.util.NbBundle;
|
import org.openide.util.NbBundle;
|
||||||
import org.openide.util.Utilities;
|
import org.openide.util.Utilities;
|
||||||
import org.openide.util.actions.Presenter;
|
import org.openide.util.actions.Presenter;
|
||||||
@ -44,11 +45,28 @@ import org.sleuthkit.datamodel.TskCoreException;
|
|||||||
final class AddContentToHashDbAction extends AbstractAction implements Presenter.Popup {
|
final class AddContentToHashDbAction extends AbstractAction implements Presenter.Popup {
|
||||||
|
|
||||||
private static AddContentToHashDbAction instance;
|
private static AddContentToHashDbAction instance;
|
||||||
|
|
||||||
private final static String SINGLE_SELECTION_NAME = NbBundle.getMessage(AddContentToHashDbAction.class,
|
private final static String SINGLE_SELECTION_NAME = NbBundle.getMessage(AddContentToHashDbAction.class,
|
||||||
"AddContentToHashDbAction.singleSelectionName");
|
"AddContentToHashDbAction.singleSelectionName");
|
||||||
private final static String MULTIPLE_SELECTION_NAME = NbBundle.getMessage(AddContentToHashDbAction.class,
|
private final static String MULTI_SELECTION_NAME = NbBundle.getMessage(AddContentToHashDbAction.class,
|
||||||
"AddContentToHashDbAction.multipleSelectionName");
|
"AddContentToHashDbAction.multipleSelectionName");
|
||||||
|
|
||||||
|
//During ingest display strings. This text will be greyed out and unclickable
|
||||||
|
private final static String SINGLE_SELECTION_NAME_DURING_INGEST = NbBundle.getMessage(AddContentToHashDbAction.class,
|
||||||
|
"AddContentToHashDbAction.singleSelectionNameDuringIngest");
|
||||||
|
private final static String MULTI_SELECTION_NAME_DURING_INGEST = NbBundle.getMessage(AddContentToHashDbAction.class,
|
||||||
|
"AddContentToHashDbAction.multipleSelectionNameDuringIngest");
|
||||||
|
|
||||||
|
//No MD5 Hash and Empty File display strings. This text will be greyed out and unclickable
|
||||||
|
private final static String SINGLE_SELECTION_NAME_EMPTY_FILE = NbBundle.getMessage(AddContentToHashDbAction.class,
|
||||||
|
"AddContentToHashDbAction.singleSelectionNameEmpty");
|
||||||
|
private final static String MULTI_SELECTION_NAME_EMPTY_FILE = NbBundle.getMessage(AddContentToHashDbAction.class,
|
||||||
|
"AddContentToHashDbAction.multipleSelectionNameEmpty");
|
||||||
|
private final static String SINGLE_SELECTION_NAME_NO_MD5 = NbBundle.getMessage(AddContentToHashDbAction.class,
|
||||||
|
"AddContentToHashDbAction.singleSelectionNameNoMD5");
|
||||||
|
private final static String MULTI_SELECTION_NAME_NO_MD5 = NbBundle.getMessage(AddContentToHashDbAction.class,
|
||||||
|
"AddContentToHashDbAction.multipleSelectionNameNoMD5");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* AddContentToHashDbAction is a singleton to support multi-selection of
|
* AddContentToHashDbAction is a singleton to support multi-selection of
|
||||||
* nodes, since org.openide.nodes.NodeOp.findActions(Node[] nodes) will only
|
* nodes, since org.openide.nodes.NodeOp.findActions(Node[] nodes) will only
|
||||||
@ -80,26 +98,42 @@ final class AddContentToHashDbAction extends AbstractAction implements Presenter
|
|||||||
|
|
||||||
AddContentToHashDbMenu() {
|
AddContentToHashDbMenu() {
|
||||||
super(SINGLE_SELECTION_NAME);
|
super(SINGLE_SELECTION_NAME);
|
||||||
|
// Get any AbstractFile objects from the lookup of the currently focused top component.
|
||||||
|
final Collection<? extends AbstractFile> selectedFiles = Utilities.actionsGlobalContext().lookupAll(AbstractFile.class);
|
||||||
|
int numberOfFilesSelected = selectedFiles.size();
|
||||||
|
|
||||||
// Disable the menu if file ingest is in progress.
|
// Disable the menu if file ingest is in progress.
|
||||||
if (IngestManager.getInstance().isIngestRunning()) {
|
if (IngestManager.getInstance().isIngestRunning()) {
|
||||||
setEnabled(false);
|
setEnabled(false);
|
||||||
|
setTextBasedOnNumberOfSelections(numberOfFilesSelected,
|
||||||
|
SINGLE_SELECTION_NAME_DURING_INGEST,
|
||||||
|
MULTI_SELECTION_NAME_DURING_INGEST);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get any AbstractFile objects from the lookup of the currently focused top component.
|
|
||||||
final Collection<? extends AbstractFile> selectedFiles = Utilities.actionsGlobalContext().lookupAll(AbstractFile.class);
|
|
||||||
if (selectedFiles.isEmpty()) {
|
if (selectedFiles.isEmpty()) {
|
||||||
setEnabled(false);
|
setEnabled(false);
|
||||||
return;
|
return;
|
||||||
} else if (selectedFiles.size() > 1) {
|
} else {
|
||||||
setText(MULTIPLE_SELECTION_NAME);
|
setTextBasedOnNumberOfSelections(numberOfFilesSelected,
|
||||||
|
SINGLE_SELECTION_NAME,
|
||||||
|
MULTI_SELECTION_NAME);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Disable the menu if hashes have not been calculated.
|
// Disable the menu if md5 have not been computed or if the file size
|
||||||
|
// is empty. Display the appropriate reason to the user.
|
||||||
for (AbstractFile file : selectedFiles) {
|
for (AbstractFile file : selectedFiles) {
|
||||||
if (null == file.getMd5Hash()) {
|
if (file.getSize() == 0) {
|
||||||
setEnabled(false);
|
setEnabled(false);
|
||||||
|
setTextBasedOnNumberOfSelections(numberOfFilesSelected,
|
||||||
|
SINGLE_SELECTION_NAME_EMPTY_FILE,
|
||||||
|
MULTI_SELECTION_NAME_EMPTY_FILE);
|
||||||
|
return;
|
||||||
|
} else if (null == file.getMd5Hash() || StringUtils.isBlank(file.getMd5Hash())) {
|
||||||
|
setEnabled(false);
|
||||||
|
setTextBasedOnNumberOfSelections(numberOfFilesSelected,
|
||||||
|
SINGLE_SELECTION_NAME_NO_MD5,
|
||||||
|
MULTI_SELECTION_NAME_NO_MD5);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -144,6 +178,23 @@ final class AddContentToHashDbAction extends AbstractAction implements Presenter
|
|||||||
add(newHashSetItem);
|
add(newHashSetItem);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Determines which (2) display text should be set given the number of
|
||||||
|
* files selected.
|
||||||
|
*
|
||||||
|
* @param numberOfFilesSelected Number of currently selected files
|
||||||
|
* @param multiSelection Text to display with multiple selections
|
||||||
|
* @param singleSelection Text to display with single selection
|
||||||
|
*/
|
||||||
|
private void setTextBasedOnNumberOfSelections(int numberOfFilesSelected,
|
||||||
|
String singleSelection, String multiSelection) {
|
||||||
|
if (numberOfFilesSelected > 1) {
|
||||||
|
setText(multiSelection);
|
||||||
|
} else {
|
||||||
|
setText(singleSelection);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private void addFilesToHashSet(final Collection<? extends AbstractFile> files, HashDb hashSet) {
|
private void addFilesToHashSet(final Collection<? extends AbstractFile> files, HashDb hashSet) {
|
||||||
for (AbstractFile file : files) {
|
for (AbstractFile file : files) {
|
||||||
String md5Hash = file.getMd5Hash();
|
String md5Hash = file.getMd5Hash();
|
||||||
|
@ -151,8 +151,14 @@ HashDbManager.fileNameExtensionFilter.title=Hash Set File
|
|||||||
HashDbSearchAction.dlgMsg.title=File Search by MD5 Hash
|
HashDbSearchAction.dlgMsg.title=File Search by MD5 Hash
|
||||||
HashDbSearchAction.getName.text=Hash Search
|
HashDbSearchAction.getName.text=Hash Search
|
||||||
HashDbSearchPanel.dlgMsg.title=File Search by MD5 Hash
|
HashDbSearchPanel.dlgMsg.title=File Search by MD5 Hash
|
||||||
AddContentToHashDbAction.singleSelectionName=Add file to hash set
|
AddContentToHashDbAction.singleSelectionName=Add File to Hash Set
|
||||||
AddContentToHashDbAction.multipleSelectionName=Add files to hash set
|
AddContentToHashDbAction.multipleSelectionName=Add Files to Hash Set
|
||||||
|
AddContentToHashDbAction.singleSelectionNameDuringIngest=Add File to Hash Set (Ingest is running)
|
||||||
|
AddContentToHashDbAction.multipleSelectionNameDuringIngest=Add Files to Hash Set (Ingest is running)
|
||||||
|
AddContentToHashDbAction.singleSelectionNameNoMD5=Add File to Hash Set (No MD5 Hash)
|
||||||
|
AddContentToHashDbAction.multipleSelectionNameNoMD5=Add Files to Hash Set (No MD5 Hash)
|
||||||
|
AddContentToHashDbAction.singleSelectionNameEmpty=Add File to Hash Set (Empty File)
|
||||||
|
AddContentToHashDbAction.multipleSelectionNameEmpty=Add Files to Hash Set (Empty File)
|
||||||
HashDbManager.ingestRunningExceptionMsg=Ingest is ongoing; this service will be unavailable until it finishes.
|
HashDbManager.ingestRunningExceptionMsg=Ingest is ongoing; this service will be unavailable until it finishes.
|
||||||
HashDbManager.saveErrorExceptionMsg=Error saving hash configuration
|
HashDbManager.saveErrorExceptionMsg=Error saving hash configuration
|
||||||
HashLookupSettingsPanel.jButton3.text=Import Hash Set
|
HashLookupSettingsPanel.jButton3.text=Import Hash Set
|
||||||
|
@ -1,184 +0,0 @@
|
|||||||
/*
|
|
||||||
* Autopsy Forensic Browser
|
|
||||||
*
|
|
||||||
* Copyright 2018-2018 Basis Technology Corp.
|
|
||||||
* Contact: carrier <at> sleuthkit <dot> org
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.sleuthkit.autopsy.tabulardatareader;
|
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import org.sleuthkit.autopsy.casemodule.Case;
|
|
||||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
|
||||||
import org.sleuthkit.autopsy.datamodel.ContentUtils;
|
|
||||||
import org.sleuthkit.datamodel.Content;
|
|
||||||
import org.sleuthkit.datamodel.TskCoreException;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* An abstract reader interface for retrieving contents from files via a common
|
|
||||||
* API.
|
|
||||||
*/
|
|
||||||
public abstract class AbstractReader implements AutoCloseable {
|
|
||||||
|
|
||||||
private final String localDiskPath;
|
|
||||||
|
|
||||||
public AbstractReader(Content file)
|
|
||||||
throws FileReaderInitException {
|
|
||||||
|
|
||||||
try {
|
|
||||||
localDiskPath = getLocalDiskPath(file);
|
|
||||||
writeDataSourceToLocalDisk(file);
|
|
||||||
} catch (FileReaderInitException ex) {
|
|
||||||
throw new FileReaderInitException(ex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Copies the data source file contents to local drive for processing.
|
|
||||||
* This function is common to all readers.
|
|
||||||
*
|
|
||||||
* @param file AbstractFile from the data source
|
|
||||||
* @throws IOException Exception writing file contents
|
|
||||||
* @throws NoCurrentCaseException Current case closed during file copying
|
|
||||||
* @throws TskCoreException Exception finding files from abstract file
|
|
||||||
*/
|
|
||||||
private void writeDataSourceToLocalDisk(Content file)
|
|
||||||
throws FileReaderInitException {
|
|
||||||
|
|
||||||
try {
|
|
||||||
File localDatabaseFile = new File(localDiskPath);
|
|
||||||
if (!localDatabaseFile.exists()) {
|
|
||||||
ContentUtils.writeToFile(file, localDatabaseFile);
|
|
||||||
}
|
|
||||||
} catch (IOException ex) {
|
|
||||||
throw new FileReaderInitException(ex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getLocalDiskPath() {
|
|
||||||
return localDiskPath;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generates a local disk path for abstract file contents to be copied. All
|
|
||||||
* file sources must be copied to local disk to be opened by abstract
|
|
||||||
* reader.
|
|
||||||
*
|
|
||||||
* @param file The database abstract file
|
|
||||||
*
|
|
||||||
* @return Valid local path for copying
|
|
||||||
* @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
private String getLocalDiskPath(Content file) throws FileReaderInitException {
|
|
||||||
try {
|
|
||||||
return Case.getCurrentCaseThrows().getTempDirectory()
|
|
||||||
+ File.separator + file.getId() + file.getName();
|
|
||||||
} catch(NoCurrentCaseException ex) {
|
|
||||||
throw new FileReaderInitException("No current case open when trying to get temp directory", ex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return the a mapping of table names to table schemas (may be in the form of
|
|
||||||
* headers or create table statements for databases).
|
|
||||||
*
|
|
||||||
* @return Mapping of table names to schemas
|
|
||||||
* @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException
|
|
||||||
*/
|
|
||||||
public abstract Map<String, String> getTableSchemas() throws FileReaderException;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the row count fo the given table name.
|
|
||||||
*
|
|
||||||
* @param tableName
|
|
||||||
* @return number of rows in the current table
|
|
||||||
* @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException
|
|
||||||
*/
|
|
||||||
public abstract Integer getRowCountFromTable(String tableName) throws FileReaderException;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns a collection view of the rows in a table.
|
|
||||||
*
|
|
||||||
* @param tableName
|
|
||||||
* @return List view of the rows in the table
|
|
||||||
* @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException
|
|
||||||
*/
|
|
||||||
public abstract List<Map<String, Object>> getRowsFromTable(String tableName) throws FileReaderException;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns a map of column names to a list of column values.
|
|
||||||
*
|
|
||||||
* @param tableName
|
|
||||||
* @return A map of column names to a list of column values
|
|
||||||
* @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException
|
|
||||||
*/
|
|
||||||
public abstract Map<String, List<Object>> getColumnsFromTable(String tableName) throws FileReaderException;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns a window of rows starting at the offset and ending when the number of rows read
|
|
||||||
* equals the 'numRowsToRead' parameter or there is nothing left to read.
|
|
||||||
*
|
|
||||||
* @param tableName table name to be read from
|
|
||||||
* @param offset start index to begin reading
|
|
||||||
* @param numRowsToRead number of rows to read past offset
|
|
||||||
* @return List view of the rows in the table
|
|
||||||
* @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException
|
|
||||||
*/
|
|
||||||
public abstract List<Map<String, Object>> getRowsFromTable(String tableName,
|
|
||||||
int offset, int numRowsToRead) throws FileReaderException;
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public abstract void close();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Checked exceptions are specific to a given implementation, so this custom
|
|
||||||
* exception allows for a common interface to accommodate all of them. Init
|
|
||||||
* exception allows for more flexibility in logging.
|
|
||||||
*/
|
|
||||||
public static class FileReaderInitException extends Exception {
|
|
||||||
public FileReaderInitException(String message, Throwable cause) {
|
|
||||||
super(message, cause);
|
|
||||||
}
|
|
||||||
|
|
||||||
public FileReaderInitException(Throwable cause) {
|
|
||||||
super(cause);
|
|
||||||
}
|
|
||||||
|
|
||||||
public FileReaderInitException(String message) {
|
|
||||||
super(message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Checked exceptions are specific to a given implementation, so this custom
|
|
||||||
* exception allows for a common interface to accommodate all of them.
|
|
||||||
*/
|
|
||||||
public class FileReaderException extends Exception {
|
|
||||||
public FileReaderException(String message, Throwable cause) {
|
|
||||||
super(message, cause);
|
|
||||||
}
|
|
||||||
|
|
||||||
public FileReaderException(Throwable cause) {
|
|
||||||
super(cause);
|
|
||||||
}
|
|
||||||
|
|
||||||
public FileReaderException(String message) {
|
|
||||||
super(message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,313 +0,0 @@
|
|||||||
/*
|
|
||||||
* Autopsy Forensic Browser
|
|
||||||
*
|
|
||||||
* Copyright 2018-2018 Basis Technology Corp.
|
|
||||||
* Contact: carrier <at> sleuthkit <dot> org
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.sleuthkit.autopsy.tabulardatareader;
|
|
||||||
|
|
||||||
import static com.google.common.collect.Lists.newArrayList;
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.FileInputStream;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.Iterator;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.logging.Level;
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
|
||||||
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
|
|
||||||
import org.apache.poi.ss.usermodel.Cell;
|
|
||||||
import org.apache.poi.ss.usermodel.DateUtil;
|
|
||||||
import org.apache.poi.ss.usermodel.Row;
|
|
||||||
import org.apache.poi.ss.usermodel.Sheet;
|
|
||||||
import org.apache.poi.ss.usermodel.Workbook;
|
|
||||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
|
||||||
import org.sleuthkit.autopsy.ingest.IngestServices;
|
|
||||||
import com.monitorjbl.xlsx.StreamingReader;
|
|
||||||
import org.apache.poi.hssf.OldExcelFormatException;
|
|
||||||
import org.sleuthkit.datamodel.AbstractFile;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Reads excel files and implements the abstract reader api for interfacing with
|
|
||||||
* the content. Supports .xls and .xlsx files.
|
|
||||||
*/
|
|
||||||
public final class ExcelReader extends AbstractReader {
|
|
||||||
|
|
||||||
private final static IngestServices services = IngestServices.getInstance();
|
|
||||||
private final static Logger logger = services.getLogger(ExcelReader.class.getName());
|
|
||||||
|
|
||||||
private Workbook workbook;
|
|
||||||
private final static String XLSX_MIME_TYPE = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet";
|
|
||||||
private final static String XLS_MIME_TYPE = "application/vnd.ms-excel";
|
|
||||||
private final static String EMPTY_CELL_STRING = "";
|
|
||||||
|
|
||||||
private String LOCAL_DISK_PATH;
|
|
||||||
private String ACTIVE_MIME_TYPE;
|
|
||||||
|
|
||||||
public ExcelReader(AbstractFile file, String mimeType)
|
|
||||||
throws FileReaderInitException {
|
|
||||||
super(file);
|
|
||||||
this.LOCAL_DISK_PATH = super.getLocalDiskPath();
|
|
||||||
this.ACTIVE_MIME_TYPE = mimeType;
|
|
||||||
|
|
||||||
try {
|
|
||||||
this.workbook = createWorkbook();
|
|
||||||
} catch (IOException ex) {
|
|
||||||
throw new FileReaderInitException(ex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Internal factory for creating the correct workbook given the mime type.
|
|
||||||
* The file reader factory in this module passes both the XLSMimeType and
|
|
||||||
* XLSXMimeType into this constructor for the reader to handle. This avoided
|
|
||||||
* the need for creating an AbstractExcelReader class and two sub classes
|
|
||||||
* overriding the workbook field. Additionally, I don't forsee needing to
|
|
||||||
* support more than these two mime types.
|
|
||||||
*
|
|
||||||
*
|
|
||||||
* @return The corrent workbook instance
|
|
||||||
*
|
|
||||||
* @throws IOException Issue with input stream and opening file
|
|
||||||
* location at localDiskPath
|
|
||||||
* @throws FileReaderInitException mimetype unsupported
|
|
||||||
*/
|
|
||||||
private Workbook createWorkbook() throws
|
|
||||||
IOException, FileReaderInitException {
|
|
||||||
switch (ACTIVE_MIME_TYPE) {
|
|
||||||
case XLS_MIME_TYPE:
|
|
||||||
try {
|
|
||||||
//Apache POI only supports BIFF8 format, anything below is considered
|
|
||||||
//old excel format and is not a concern for us.
|
|
||||||
return new HSSFWorkbook(new FileInputStream(new File(LOCAL_DISK_PATH)));
|
|
||||||
} catch (OldExcelFormatException e) {
|
|
||||||
throw new FileReaderInitException(e);
|
|
||||||
}
|
|
||||||
case XLSX_MIME_TYPE:
|
|
||||||
//StreamingReader is part of the xlsx streamer dependency that creates
|
|
||||||
//a streaming version of XSSFWorkbook for reading (SXSSFWorkbook is only for writing
|
|
||||||
//large workbooks, not reading). This libary provides a workbook interface
|
|
||||||
//that is mostly identical to the poi workbook api, hence both the HSSFWorkbook
|
|
||||||
//and this can use the same functions below.
|
|
||||||
return StreamingReader.builder().rowCacheSize(500).open(new File(LOCAL_DISK_PATH));
|
|
||||||
default:
|
|
||||||
throw new FileReaderInitException(String.format("Excel reader for mime "
|
|
||||||
+ "type [%s] is not supported", ACTIVE_MIME_TYPE));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the number of rows in a given excel table (aka sheet).
|
|
||||||
*
|
|
||||||
* @param tableName Name of table to count total rows from
|
|
||||||
*
|
|
||||||
* @return row count for requested table name
|
|
||||||
*
|
|
||||||
* @throws
|
|
||||||
* org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public Integer getRowCountFromTable(String tableName) throws FileReaderException {
|
|
||||||
return workbook.getSheet(tableName).getLastRowNum();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns a collection of all the rows from a given table in an excel
|
|
||||||
* document.
|
|
||||||
*
|
|
||||||
* @param tableName Current sheet name being read
|
|
||||||
*
|
|
||||||
* @return A collection of row maps
|
|
||||||
*
|
|
||||||
* @throws
|
|
||||||
* org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public List<Map<String, Object>> getRowsFromTable(String tableName) throws FileReaderException {
|
|
||||||
//StreamingReader maintains the same pointer to a sheet rowIterator, so this
|
|
||||||
//call returns an iterator that could have already been iterated on instead
|
|
||||||
//of a fresh copy. We must cache the header value from the call to
|
|
||||||
//getTableSchemas as important information in the first row could have been
|
|
||||||
//missed.
|
|
||||||
Iterator<Row> sheetIter = workbook.getSheet(tableName).rowIterator();
|
|
||||||
List<Map<String, Object>> rowList = new ArrayList<>();
|
|
||||||
|
|
||||||
while (sheetIter.hasNext()) {
|
|
||||||
Row currRow = sheetIter.next();
|
|
||||||
rowList.add(getRowMap(currRow));
|
|
||||||
}
|
|
||||||
|
|
||||||
//Reset the streaming reader for xlsx, so that there is a fresh iterator
|
|
||||||
//on each sheet. That way each call to this function returns all the results.
|
|
||||||
resetStreamingReader();
|
|
||||||
|
|
||||||
return rowList;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns a map of column numbers to a list of column values.
|
|
||||||
*
|
|
||||||
* @param tableName
|
|
||||||
*
|
|
||||||
* @return
|
|
||||||
*
|
|
||||||
* @throws
|
|
||||||
* org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public Map<String, List<Object>> getColumnsFromTable(String tableName) throws FileReaderException {
|
|
||||||
Map<String, List<Object>> columnViewOfSheet = new HashMap<>();
|
|
||||||
|
|
||||||
Iterator<Row> sheetIter = workbook.getSheet(tableName).rowIterator();
|
|
||||||
|
|
||||||
while (sheetIter.hasNext()) {
|
|
||||||
Row row = sheetIter.next();
|
|
||||||
for (Cell cell : row) {
|
|
||||||
String index = String.valueOf(cell.getColumnIndex());
|
|
||||||
if (columnViewOfSheet.containsKey(index)) {
|
|
||||||
columnViewOfSheet.get(index).add(getCellValue(cell));
|
|
||||||
} else {
|
|
||||||
columnViewOfSheet.put(index, newArrayList(getCellValue(cell)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
//Reset the streaming reader for xlsx, so that there is a fresh iterator
|
|
||||||
//on each sheet. That way each call to this function returns all the results.
|
|
||||||
resetStreamingReader();
|
|
||||||
|
|
||||||
return columnViewOfSheet;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Currently not supported. Returns a window of rows starting at the offset
|
|
||||||
* and ending when the number of rows read equals the 'numRowsToRead'
|
|
||||||
* parameter or the iterator has nothing left to read.
|
|
||||||
*
|
|
||||||
* For instance: offset 1, numRowsToRead 5 would return 5 results (1-5).
|
|
||||||
* offset 0, numRowsToRead 5 would return 5 results (0-4).
|
|
||||||
*
|
|
||||||
* @param tableName Current name of sheet to be read
|
|
||||||
* @param offset start index to begin reading (documents are 0
|
|
||||||
* indexed)
|
|
||||||
* @param numRowsToRead number of rows to read
|
|
||||||
*
|
|
||||||
* @return
|
|
||||||
*
|
|
||||||
* @throws
|
|
||||||
* org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public List<Map<String, Object>> getRowsFromTable(String tableName,
|
|
||||||
int offset, int numRowsToRead) throws FileReaderException {
|
|
||||||
throw new FileReaderException("Operation Not Supported.");
|
|
||||||
}
|
|
||||||
|
|
||||||
private Map<String, Object> getRowMap(Row row) {
|
|
||||||
Map<String, Object> rowMap = new HashMap<>();
|
|
||||||
for (Cell cell : row) {
|
|
||||||
Object value = getCellValue(cell);
|
|
||||||
rowMap.put(String.valueOf(cell.getColumnIndex()), value);
|
|
||||||
}
|
|
||||||
return rowMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the value of a given cell. The correct value function must be
|
|
||||||
* called on a cell depending on its type, hence the switch.
|
|
||||||
*
|
|
||||||
* @param cell Cell object containing a getter function for its value type
|
|
||||||
*
|
|
||||||
* @return A generic object pointer to the cell's value
|
|
||||||
*/
|
|
||||||
private Object getCellValue(Cell cell) {
|
|
||||||
switch (cell.getCellTypeEnum()) {
|
|
||||||
case BOOLEAN:
|
|
||||||
return cell.getBooleanCellValue();
|
|
||||||
case STRING:
|
|
||||||
return cell.getStringCellValue();
|
|
||||||
case NUMERIC:
|
|
||||||
if (DateUtil.isCellDateFormatted(cell)) {
|
|
||||||
return cell.getDateCellValue();
|
|
||||||
} else {
|
|
||||||
return cell.getNumericCellValue();
|
|
||||||
}
|
|
||||||
case FORMULA:
|
|
||||||
return cell.getCellFormula();
|
|
||||||
default:
|
|
||||||
//Cell must be empty at this branch
|
|
||||||
return EMPTY_CELL_STRING;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns a map of sheet names to headers (header is in a comma-seperated
|
|
||||||
* string). Warning: Only call this ONCE per excel file.
|
|
||||||
*
|
|
||||||
* @return A map of sheet names to header strings.
|
|
||||||
*
|
|
||||||
* @throws
|
|
||||||
* org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public Map<String, String> getTableSchemas() throws FileReaderException {
|
|
||||||
Map<String, String> tableSchemas = new HashMap<>();
|
|
||||||
for (Sheet sheet : workbook) {
|
|
||||||
Iterator<Row> iterator = sheet.rowIterator();
|
|
||||||
if (iterator.hasNext()) {
|
|
||||||
//Consume header
|
|
||||||
Row header = iterator.next();
|
|
||||||
String headerStringFormat = StringUtils.join(header.cellIterator(), ", ");
|
|
||||||
tableSchemas.put(sheet.getSheetName(), headerStringFormat);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
//Reset the streaming reader for xlsx, so that there is a fresh iterator
|
|
||||||
//on each sheet. That way each call to this function returns all the results.
|
|
||||||
resetStreamingReader();
|
|
||||||
|
|
||||||
return tableSchemas;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Resets the streaming reader so that the iterator starts at the start of each
|
|
||||||
* sheet. Matches functionality provided by apache POI.
|
|
||||||
*
|
|
||||||
* @throws
|
|
||||||
* org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException
|
|
||||||
*/
|
|
||||||
public void resetStreamingReader() throws FileReaderException {
|
|
||||||
if (ACTIVE_MIME_TYPE.equals(XLSX_MIME_TYPE)) {
|
|
||||||
try {
|
|
||||||
this.workbook = createWorkbook();
|
|
||||||
} catch (IOException | FileReaderInitException ex) {
|
|
||||||
throw new FileReaderException("Could not reset streaming iterator", ex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void close() {
|
|
||||||
try {
|
|
||||||
workbook.close();
|
|
||||||
} catch (IOException ex) {
|
|
||||||
//Non-essential exception, user has no need for the connection
|
|
||||||
//object at this stage so closing details are not important
|
|
||||||
logger.log(Level.WARNING, "Could not close excel file input stream", ex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,67 +0,0 @@
|
|||||||
/*
|
|
||||||
* Autopsy Forensic Browser
|
|
||||||
*
|
|
||||||
* Copyright 2018-2018 Basis Technology Corp.
|
|
||||||
* Contact: carrier <at> sleuthkit <dot> org
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.sleuthkit.autopsy.tabulardatareader;
|
|
||||||
|
|
||||||
import org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException;
|
|
||||||
import org.sleuthkit.datamodel.AbstractFile;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Factory for creating the correct reader given the mime type of a file.
|
|
||||||
*/
|
|
||||||
public final class FileReaderFactory {
|
|
||||||
|
|
||||||
private FileReaderFactory() {
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Instantiates the appropriate reader given the mimeType argument.
|
|
||||||
* Currently supports SQLite files and Excel files (.xls and .xlsx). BIFF5
|
|
||||||
* format of .xls is not supported.
|
|
||||||
*
|
|
||||||
* @param mimeType mimeType passed in from the ingest module g * @param file
|
|
||||||
* current file under inspection
|
|
||||||
*
|
|
||||||
* @param file Content file to be copied into
|
|
||||||
*
|
|
||||||
* @return The correct reader class needed to read the file contents
|
|
||||||
*
|
|
||||||
* @throws
|
|
||||||
* org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException
|
|
||||||
*/
|
|
||||||
public static AbstractReader createReader(AbstractFile file, String mimeType) throws FileReaderInitException {
|
|
||||||
switch (mimeType) {
|
|
||||||
case "application/x-sqlite3":
|
|
||||||
return new SQLiteReader(file);
|
|
||||||
case "application/vnd.ms-excel":
|
|
||||||
case "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet":
|
|
||||||
try {
|
|
||||||
return new ExcelReader(file, mimeType);
|
|
||||||
//Catches runtime exceptions being emitted from Apache
|
|
||||||
//POI (such as EncryptedDocumentException) and wraps them
|
|
||||||
//into FileReaderInitException to be caught and logged
|
|
||||||
//in the ingest module.
|
|
||||||
} catch (Exception poiInitException) {
|
|
||||||
throw new FileReaderInitException(poiInitException);
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
throw new FileReaderInitException(String.format("Reader for mime "
|
|
||||||
+ "type [%s] is not supported", mimeType));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,360 +0,0 @@
|
|||||||
/*
|
|
||||||
* Autopsy Forensic Browser
|
|
||||||
*
|
|
||||||
* Copyright 2018-2018 Basis Technology Corp.
|
|
||||||
* Contact: carrier <at> sleuthkit <dot> org
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.sleuthkit.autopsy.tabulardatareader;
|
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.sql.Connection;
|
|
||||||
import java.sql.DriverManager;
|
|
||||||
import java.sql.ResultSet;
|
|
||||||
import java.sql.ResultSetMetaData;
|
|
||||||
import java.sql.SQLException;
|
|
||||||
import java.sql.Statement;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.LinkedHashMap;
|
|
||||||
import java.util.LinkedList;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.TreeMap;
|
|
||||||
import java.util.logging.Level;
|
|
||||||
import org.openide.util.NbBundle;
|
|
||||||
import org.sleuthkit.autopsy.casemodule.Case;
|
|
||||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
|
||||||
import org.sleuthkit.autopsy.casemodule.services.FileManager;
|
|
||||||
import org.sleuthkit.autopsy.casemodule.services.Services;
|
|
||||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
|
||||||
import org.sleuthkit.autopsy.datamodel.ContentUtils;
|
|
||||||
import org.sleuthkit.autopsy.ingest.IngestServices;
|
|
||||||
import org.sleuthkit.datamodel.AbstractFile;
|
|
||||||
import org.sleuthkit.datamodel.Content;
|
|
||||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
|
||||||
import org.sleuthkit.datamodel.TskCoreException;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Reads sqlite databases and returns results in a list collection.
|
|
||||||
*/
|
|
||||||
public final class SQLiteReader extends AbstractReader {
|
|
||||||
|
|
||||||
private final Connection connection;
|
|
||||||
private final static IngestServices ingestServices = IngestServices.getInstance();
|
|
||||||
private final static Logger logger = ingestServices.getLogger(SQLiteReader.class.getName());
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Writes data source file contents to local disk and opens a sqlite JDBC
|
|
||||||
* connection.
|
|
||||||
*
|
|
||||||
* @param sqliteDbFile Data source abstract file
|
|
||||||
* @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException
|
|
||||||
*/
|
|
||||||
public SQLiteReader(AbstractFile sqliteDbFile) throws FileReaderInitException {
|
|
||||||
super(sqliteDbFile);
|
|
||||||
try {
|
|
||||||
final String localDiskPath = super.getLocalDiskPath();
|
|
||||||
// Look for any meta files associated with this DB - WAL, SHM, etc.
|
|
||||||
findAndCopySQLiteMetaFile(sqliteDbFile, sqliteDbFile.getName() + "-wal");
|
|
||||||
findAndCopySQLiteMetaFile(sqliteDbFile, sqliteDbFile.getName() + "-shm");
|
|
||||||
|
|
||||||
connection = getDatabaseConnection(localDiskPath);
|
|
||||||
} catch (ClassNotFoundException | SQLException |IOException |
|
|
||||||
NoCurrentCaseException | TskCoreException ex) {
|
|
||||||
throw new FileReaderInitException(ex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Searches for a meta file associated with the give SQLite database. If
|
|
||||||
* found, copies the file to the local disk folder
|
|
||||||
*
|
|
||||||
* @param sqliteFile file being processed
|
|
||||||
* @param metaFileName name of meta file to look for
|
|
||||||
*
|
|
||||||
* @throws NoCurrentCaseException Case has been closed.
|
|
||||||
* @throws TskCoreException fileManager cannot find AbstractFile
|
|
||||||
* files.
|
|
||||||
* @throws IOException Issue during writing to file.
|
|
||||||
*/
|
|
||||||
private void findAndCopySQLiteMetaFile(Content sqliteFile,
|
|
||||||
String metaFileName) throws NoCurrentCaseException, TskCoreException, IOException {
|
|
||||||
|
|
||||||
Case openCase = Case.getCurrentCaseThrows();
|
|
||||||
SleuthkitCase sleuthkitCase = openCase.getSleuthkitCase();
|
|
||||||
Services services = new Services(sleuthkitCase);
|
|
||||||
FileManager fileManager = services.getFileManager();
|
|
||||||
|
|
||||||
List<AbstractFile> metaFiles = fileManager.findFiles(
|
|
||||||
sqliteFile.getDataSource(), metaFileName,
|
|
||||||
sqliteFile.getParent().getName());
|
|
||||||
|
|
||||||
if (metaFiles != null) {
|
|
||||||
for (AbstractFile metaFile : metaFiles) {
|
|
||||||
String tmpMetafilePathName = openCase.getTempDirectory()
|
|
||||||
+ File.separator + metaFile.getId() + metaFile.getName();
|
|
||||||
File tmpMetafile = new File(tmpMetafilePathName);
|
|
||||||
ContentUtils.writeToFile(metaFile, tmpMetafile);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Opens a JDBC connection to the sqlite database specified by the path
|
|
||||||
* parameter.
|
|
||||||
*
|
|
||||||
* @param databasePath Local path of sqlite database
|
|
||||||
*
|
|
||||||
* @return Connection JDBC connection, to be maintained and closed by the
|
|
||||||
* reader
|
|
||||||
*
|
|
||||||
* @throws ClassNotFoundException missing SQLite JDBC class
|
|
||||||
* @throws SQLException Exception during opening database
|
|
||||||
* connection
|
|
||||||
*/
|
|
||||||
private Connection getDatabaseConnection(String databasePath)
|
|
||||||
throws ClassNotFoundException, SQLException {
|
|
||||||
|
|
||||||
// Load the SQLite JDBC driver, if necessary.
|
|
||||||
Class.forName("org.sqlite.JDBC"); //NON-NLS
|
|
||||||
return DriverManager.getConnection(
|
|
||||||
"jdbc:sqlite:" + databasePath); //NON-NLS
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Retrieves a map view of table names to table schemas (in the form of
|
|
||||||
* CREATE TABLE statments).
|
|
||||||
*
|
|
||||||
* @return A map of table names to table schemas
|
|
||||||
*
|
|
||||||
* @throws
|
|
||||||
* org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public Map<String, String> getTableSchemas() throws FileReaderException {
|
|
||||||
|
|
||||||
Map<String, String> dbTablesMap = new TreeMap<>();
|
|
||||||
|
|
||||||
try (Statement statement = connection.createStatement();
|
|
||||||
ResultSet resultSet = statement.executeQuery(
|
|
||||||
"SELECT name, sql FROM sqlite_master " //NON-NLS
|
|
||||||
+ " WHERE type= 'table' " //NON-NLS
|
|
||||||
+ " ORDER BY name;")) { //NON-NLS
|
|
||||||
|
|
||||||
while (resultSet.next()) {
|
|
||||||
String tableName = resultSet.getString("name"); //NON-NLS
|
|
||||||
String tableSQL = resultSet.getString("sql"); //NON-NLS
|
|
||||||
dbTablesMap.put(tableName, tableSQL);
|
|
||||||
}
|
|
||||||
|
|
||||||
} catch (SQLException ex) {
|
|
||||||
throw new FileReaderException(ex);
|
|
||||||
}
|
|
||||||
|
|
||||||
return dbTablesMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Retrieves the total number of rows from a table in the SQLite database.
|
|
||||||
*
|
|
||||||
* @param tableName
|
|
||||||
*
|
|
||||||
* @return Row count from tableName
|
|
||||||
*
|
|
||||||
* @throws
|
|
||||||
* org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public Integer getRowCountFromTable(String tableName)
|
|
||||||
throws FileReaderException {
|
|
||||||
String quotedTableName = wrapTableNameStringWithQuotes(tableName);
|
|
||||||
try (Statement statement = connection.createStatement();
|
|
||||||
ResultSet resultSet = statement.executeQuery(
|
|
||||||
"SELECT count (*) as count FROM " + quotedTableName)) { //NON-NLS
|
|
||||||
return resultSet.getInt("count"); //NON-NLS
|
|
||||||
} catch (SQLException ex) {
|
|
||||||
throw new FileReaderException(ex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Retrieves all rows from a given table in the SQLite database. If only a
|
|
||||||
* subset of rows are desired, see the overloaded function below.
|
|
||||||
*
|
|
||||||
* @param tableName
|
|
||||||
*
|
|
||||||
* @return List of rows, where each row is represented as a column-value
|
|
||||||
* map.
|
|
||||||
*
|
|
||||||
* @throws
|
|
||||||
* org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public List<Map<String, Object>> getRowsFromTable(String tableName)
|
|
||||||
throws FileReaderException {
|
|
||||||
//This method does not directly call its overloaded counterpart
|
|
||||||
//since the second parameter would need to be retreived from a call to
|
|
||||||
//getTableRowCount().
|
|
||||||
String quotedTableName = wrapTableNameStringWithQuotes(tableName);
|
|
||||||
try (Statement statement = connection.createStatement();
|
|
||||||
ResultSet resultSet = statement.executeQuery(
|
|
||||||
"SELECT * FROM " + quotedTableName)) { //NON-NLS
|
|
||||||
return resultSetToList(resultSet);
|
|
||||||
} catch (SQLException ex) {
|
|
||||||
throw new FileReaderException(ex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Retrieves a subset of the rows from a given table in the SQLite database.
|
|
||||||
*
|
|
||||||
* @param tableName
|
|
||||||
* @param offset Desired start index (rows begin at 1)
|
|
||||||
* @param numRowsToRead Number of rows past the start index
|
|
||||||
*
|
|
||||||
* @return List of rows, where each row is represented as a column-value
|
|
||||||
* map.
|
|
||||||
*
|
|
||||||
* @throws
|
|
||||||
* org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public List<Map<String, Object>> getRowsFromTable(String tableName,
|
|
||||||
int offset, int numRowsToRead) throws FileReaderException {
|
|
||||||
String quotedTableName = wrapTableNameStringWithQuotes(tableName);
|
|
||||||
try (Statement statement = connection.createStatement();
|
|
||||||
ResultSet resultSet = statement.executeQuery(
|
|
||||||
"SELECT * FROM " + quotedTableName //NON-NLS
|
|
||||||
+ " LIMIT " + Integer.toString(numRowsToRead) //NON-NLS
|
|
||||||
+ " OFFSET " + Integer.toString(offset - 1))) { //NON-NLS
|
|
||||||
return resultSetToList(resultSet);
|
|
||||||
} catch (SQLException ex) {
|
|
||||||
throw new FileReaderException(ex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Wraps table name with quotation marks in case table name contains spaces.
|
|
||||||
* sqliteJDBC cannot read table names with spaces in them unless surrounded
|
|
||||||
* by quotation marks.
|
|
||||||
*
|
|
||||||
* @param tableName
|
|
||||||
*
|
|
||||||
* @return Input name: Result Table -> "Result Table"
|
|
||||||
*/
|
|
||||||
private String wrapTableNameStringWithQuotes(String tableName) {
|
|
||||||
return "\"" + tableName + "\"";
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Converts a ResultSet (row results from a table read) into a list.
|
|
||||||
*
|
|
||||||
* @param resultSet row results from a table read
|
|
||||||
*
|
|
||||||
* @return List of rows, where each row is represented as a column-value
|
|
||||||
* map.
|
|
||||||
*
|
|
||||||
* @throws SQLException occurs if ResultSet is closed while attempting to
|
|
||||||
* access it's data.
|
|
||||||
*/
|
|
||||||
@NbBundle.Messages("SQLiteReader.BlobNotShown.message=BLOB Data not shown")
|
|
||||||
private List<Map<String, Object>> resultSetToList(ResultSet resultSet) throws SQLException {
|
|
||||||
|
|
||||||
ResultSetMetaData metaData = resultSet.getMetaData();
|
|
||||||
int columns = metaData.getColumnCount();
|
|
||||||
List<Map<String, Object>> rowMap = new ArrayList<>();
|
|
||||||
while (resultSet.next()) {
|
|
||||||
Map<String, Object> row = new LinkedHashMap<>(columns);
|
|
||||||
for (int i = 1; i <= columns; ++i) {
|
|
||||||
if (resultSet.getObject(i) == null) {
|
|
||||||
row.put(metaData.getColumnName(i), "");
|
|
||||||
} else {
|
|
||||||
if (metaData.getColumnTypeName(i).compareToIgnoreCase("blob") == 0) {
|
|
||||||
row.put(metaData.getColumnName(i), Bundle.SQLiteReader_BlobNotShown_message());
|
|
||||||
} else {
|
|
||||||
row.put(metaData.getColumnName(i), resultSet.getObject(i));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
rowMap.add(row);
|
|
||||||
}
|
|
||||||
|
|
||||||
return rowMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns a column view of the table. Maps the column name to a list of
|
|
||||||
* that column's values.
|
|
||||||
*
|
|
||||||
* @param tableName
|
|
||||||
*
|
|
||||||
* @return
|
|
||||||
*
|
|
||||||
* @throws
|
|
||||||
* org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public Map<String, List<Object>> getColumnsFromTable(String tableName)
|
|
||||||
throws FileReaderException {
|
|
||||||
|
|
||||||
String quotedTableName = wrapTableNameStringWithQuotes(tableName);
|
|
||||||
try (Statement statement = connection.createStatement();
|
|
||||||
ResultSet resultSet = statement.executeQuery(
|
|
||||||
"SELECT * FROM " + quotedTableName)) { //NON-NLS
|
|
||||||
|
|
||||||
Map<String, List<Object>> columnView = new HashMap<>();
|
|
||||||
ResultSetMetaData metaData = resultSet.getMetaData();
|
|
||||||
int columns = metaData.getColumnCount();
|
|
||||||
for (int i = 1; i <= columns; i++) {
|
|
||||||
columnView.put(metaData.getColumnName(i), new LinkedList<>());
|
|
||||||
}
|
|
||||||
|
|
||||||
while (resultSet.next()) {
|
|
||||||
for (int i = 1; i <= columns; i++) {
|
|
||||||
if (resultSet.getObject(i) == null) {
|
|
||||||
columnView.get(metaData.getColumnName(i)).add("");
|
|
||||||
} else {
|
|
||||||
if (metaData.getColumnTypeName(i).compareToIgnoreCase("blob") == 0) {
|
|
||||||
columnView.get(metaData.getColumnName(i)).add(
|
|
||||||
Bundle.SQLiteReader_BlobNotShown_message());
|
|
||||||
} else {
|
|
||||||
columnView.get(metaData.getColumnName(i)).add(
|
|
||||||
resultSet.getObject(i));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return columnView;
|
|
||||||
} catch (SQLException ex) {
|
|
||||||
throw new FileReaderException(ex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Closes underlying JDBC connection.
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public void close() {
|
|
||||||
try {
|
|
||||||
connection.close();
|
|
||||||
} catch (SQLException ex) {
|
|
||||||
//Non-essential exception, user has no need for the connection
|
|
||||||
//object at this stage so closing details are not important
|
|
||||||
logger.log(Level.WARNING, "Could not close JDBC connection", ex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -50,10 +50,12 @@ import javax.annotation.Nonnull;
|
|||||||
import static org.apache.commons.collections4.CollectionUtils.isNotEmpty;
|
import static org.apache.commons.collections4.CollectionUtils.isNotEmpty;
|
||||||
import org.netbeans.api.progress.ProgressHandle;
|
import org.netbeans.api.progress.ProgressHandle;
|
||||||
import org.openide.util.Cancellable;
|
import org.openide.util.Cancellable;
|
||||||
|
import org.openide.util.Exceptions;
|
||||||
import org.openide.util.NbBundle;
|
import org.openide.util.NbBundle;
|
||||||
import org.sleuthkit.autopsy.casemodule.Case;
|
import org.sleuthkit.autopsy.casemodule.Case;
|
||||||
import org.sleuthkit.autopsy.casemodule.Case.CaseType;
|
import org.sleuthkit.autopsy.casemodule.Case.CaseType;
|
||||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||||
|
import org.sleuthkit.autopsy.casemodule.services.TagsManager;
|
||||||
import org.sleuthkit.autopsy.coreutils.History;
|
import org.sleuthkit.autopsy.coreutils.History;
|
||||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||||
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
||||||
@ -70,6 +72,7 @@ import org.sleuthkit.autopsy.imagegallery.datamodel.grouping.GroupViewState;
|
|||||||
import org.sleuthkit.autopsy.ingest.IngestManager;
|
import org.sleuthkit.autopsy.ingest.IngestManager;
|
||||||
import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector;
|
import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector;
|
||||||
import org.sleuthkit.datamodel.AbstractFile;
|
import org.sleuthkit.datamodel.AbstractFile;
|
||||||
|
import org.sleuthkit.datamodel.ContentTag;
|
||||||
import org.sleuthkit.datamodel.DataSource;
|
import org.sleuthkit.datamodel.DataSource;
|
||||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||||
import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction;
|
import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction;
|
||||||
@ -602,6 +605,7 @@ public final class ImageGalleryController {
|
|||||||
|
|
||||||
DRAWABLE_QUERY
|
DRAWABLE_QUERY
|
||||||
= DATASOURCE_CLAUSE
|
= DATASOURCE_CLAUSE
|
||||||
|
+ " AND ( meta_type = " + TskData.TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_REG.getValue() + ")"
|
||||||
+ " AND ( "
|
+ " AND ( "
|
||||||
+ //grab files with supported extension
|
+ //grab files with supported extension
|
||||||
FILE_EXTENSION_CLAUSE
|
FILE_EXTENSION_CLAUSE
|
||||||
@ -635,7 +639,7 @@ public final class ImageGalleryController {
|
|||||||
public void run() {
|
public void run() {
|
||||||
progressHandle = getInitialProgressHandle();
|
progressHandle = getInitialProgressHandle();
|
||||||
progressHandle.start();
|
progressHandle.start();
|
||||||
updateMessage(Bundle.CopyAnalyzedFiles_populatingDb_status());
|
updateMessage(Bundle.CopyAnalyzedFiles_populatingDb_status() + " (Data Source " + dataSourceObjId + ")" );
|
||||||
|
|
||||||
DrawableDB.DrawableTransaction drawableDbTransaction = null;
|
DrawableDB.DrawableTransaction drawableDbTransaction = null;
|
||||||
CaseDbTransaction caseDbTransaction = null;
|
CaseDbTransaction caseDbTransaction = null;
|
||||||
@ -650,6 +654,7 @@ public final class ImageGalleryController {
|
|||||||
taskCompletionStatus = true;
|
taskCompletionStatus = true;
|
||||||
int workDone = 0;
|
int workDone = 0;
|
||||||
|
|
||||||
|
// Cycle through all of the files returned and call processFile on each
|
||||||
//do in transaction
|
//do in transaction
|
||||||
drawableDbTransaction = taskDB.beginTransaction();
|
drawableDbTransaction = taskDB.beginTransaction();
|
||||||
caseDbTransaction = tskCase.beginTransaction();
|
caseDbTransaction = tskCase.beginTransaction();
|
||||||
@ -672,11 +677,12 @@ public final class ImageGalleryController {
|
|||||||
|
|
||||||
progressHandle.finish();
|
progressHandle.finish();
|
||||||
progressHandle = ProgressHandle.createHandle(Bundle.BulkTask_committingDb_status());
|
progressHandle = ProgressHandle.createHandle(Bundle.BulkTask_committingDb_status());
|
||||||
updateMessage(Bundle.BulkTask_committingDb_status());
|
updateMessage(Bundle.BulkTask_committingDb_status() + " (Data Source " + dataSourceObjId + ")" );
|
||||||
updateProgress(1.0);
|
updateProgress(1.0);
|
||||||
|
|
||||||
progressHandle.start();
|
progressHandle.start();
|
||||||
caseDbTransaction.commit();
|
caseDbTransaction.commit();
|
||||||
|
// pass true so that groupmanager is notified of the changes
|
||||||
taskDB.commitTransaction(drawableDbTransaction, true);
|
taskDB.commitTransaction(drawableDbTransaction, true);
|
||||||
|
|
||||||
} catch (TskCoreException ex) {
|
} catch (TskCoreException ex) {
|
||||||
@ -728,10 +734,12 @@ public final class ImageGalleryController {
|
|||||||
|
|
||||||
CopyAnalyzedFiles(long dataSourceObjId, ImageGalleryController controller) {
|
CopyAnalyzedFiles(long dataSourceObjId, ImageGalleryController controller) {
|
||||||
super(dataSourceObjId, controller);
|
super(dataSourceObjId, controller);
|
||||||
|
taskDB.buildFileMetaDataCache();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void cleanup(boolean success) {
|
protected void cleanup(boolean success) {
|
||||||
|
taskDB.freeFileMetaDataCache();
|
||||||
// at the end of the task, set the stale status based on the
|
// at the end of the task, set the stale status based on the
|
||||||
// cumulative status of all data sources
|
// cumulative status of all data sources
|
||||||
controller.setStale(controller.isDataSourcesTableStale());
|
controller.setStale(controller.isDataSourcesTableStale());
|
||||||
@ -744,20 +752,19 @@ public final class ImageGalleryController {
|
|||||||
if (known) {
|
if (known) {
|
||||||
taskDB.removeFile(f.getId(), tr); //remove known files
|
taskDB.removeFile(f.getId(), tr); //remove known files
|
||||||
} else {
|
} else {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
//supported mimetype => analyzed
|
|
||||||
if (null != f.getMIMEType() && FileTypeUtils.hasDrawableMIMEType(f)) {
|
|
||||||
taskDB.updateFile(DrawableFile.create(f, true, false), tr, caseDbTransaction);
|
|
||||||
} else {
|
|
||||||
// if mimetype of the file hasn't been ascertained, ingest might not have completed yet.
|
// if mimetype of the file hasn't been ascertained, ingest might not have completed yet.
|
||||||
if (null == f.getMIMEType()) {
|
if (null == f.getMIMEType()) {
|
||||||
// set to false to force the DB to be marked as stale
|
// set to false to force the DB to be marked as stale
|
||||||
this.setTaskCompletionStatus(false);
|
this.setTaskCompletionStatus(false);
|
||||||
} else {
|
|
||||||
//unsupported mimtype => analyzed but shouldn't include
|
|
||||||
taskDB.removeFile(f.getId(), tr);
|
|
||||||
}
|
}
|
||||||
|
//supported mimetype => analyzed
|
||||||
|
else if (FileTypeUtils.hasDrawableMIMEType(f)) {
|
||||||
|
taskDB.updateFile(DrawableFile.create(f, true, false), tr, caseDbTransaction);
|
||||||
|
}
|
||||||
|
//unsupported mimtype => analyzed but shouldn't include
|
||||||
|
else {
|
||||||
|
taskDB.removeFile(f.getId(), tr);
|
||||||
}
|
}
|
||||||
} catch (FileTypeDetector.FileTypeDetectorInitException ex) {
|
} catch (FileTypeDetector.FileTypeDetectorInitException ex) {
|
||||||
throw new TskCoreException("Failed to initialize FileTypeDetector.", ex);
|
throw new TskCoreException("Failed to initialize FileTypeDetector.", ex);
|
||||||
|
@ -27,6 +27,7 @@ import javafx.application.Platform;
|
|||||||
import javax.swing.JOptionPane;
|
import javax.swing.JOptionPane;
|
||||||
import javax.swing.SwingUtilities;
|
import javax.swing.SwingUtilities;
|
||||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||||
|
import org.openide.util.Exceptions;
|
||||||
import org.openide.util.NbBundle;
|
import org.openide.util.NbBundle;
|
||||||
import org.sleuthkit.autopsy.casemodule.Case;
|
import org.sleuthkit.autopsy.casemodule.Case;
|
||||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||||
@ -39,9 +40,13 @@ import org.sleuthkit.autopsy.events.AutopsyEvent;
|
|||||||
import org.sleuthkit.autopsy.imagegallery.datamodel.DrawableDB;
|
import org.sleuthkit.autopsy.imagegallery.datamodel.DrawableDB;
|
||||||
import org.sleuthkit.autopsy.ingest.IngestManager;
|
import org.sleuthkit.autopsy.ingest.IngestManager;
|
||||||
import org.sleuthkit.autopsy.ingest.IngestManager.IngestJobEvent;
|
import org.sleuthkit.autopsy.ingest.IngestManager.IngestJobEvent;
|
||||||
|
import static org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent.DATA_ADDED;
|
||||||
import static org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent.FILE_DONE;
|
import static org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent.FILE_DONE;
|
||||||
|
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
|
||||||
import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector;
|
import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector;
|
||||||
import org.sleuthkit.datamodel.AbstractFile;
|
import org.sleuthkit.datamodel.AbstractFile;
|
||||||
|
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||||
|
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
|
||||||
import org.sleuthkit.datamodel.Content;
|
import org.sleuthkit.datamodel.Content;
|
||||||
import org.sleuthkit.datamodel.TskCoreException;
|
import org.sleuthkit.datamodel.TskCoreException;
|
||||||
import org.sleuthkit.datamodel.TskData;
|
import org.sleuthkit.datamodel.TskData;
|
||||||
@ -154,14 +159,6 @@ public class ImageGalleryModule {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (IngestManager.IngestModuleEvent.valueOf(evt.getPropertyName()) != FILE_DONE) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
// getOldValue has fileID getNewValue has Abstractfile
|
|
||||||
AbstractFile file = (AbstractFile) evt.getNewValue();
|
|
||||||
if (false == file.isFile()) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
/* only process individual files in realtime on the node that is
|
/* only process individual files in realtime on the node that is
|
||||||
* running the ingest. on a remote node, image files are processed
|
* running the ingest. on a remote node, image files are processed
|
||||||
* enblock when ingest is complete */
|
* enblock when ingest is complete */
|
||||||
@ -169,21 +166,39 @@ public class ImageGalleryModule {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Bail out if the case is closed
|
||||||
|
try {
|
||||||
|
if (controller == null || Case.getCurrentCaseThrows() == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
} catch (NoCurrentCaseException ex) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (IngestManager.IngestModuleEvent.valueOf(evt.getPropertyName()) == FILE_DONE) {
|
||||||
|
|
||||||
|
// getOldValue has fileID getNewValue has Abstractfile
|
||||||
|
AbstractFile file = (AbstractFile) evt.getNewValue();
|
||||||
|
if (false == file.isFile()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
ImageGalleryController con = getController();
|
ImageGalleryController con = getController();
|
||||||
if (con.isListeningEnabled()) {
|
if (con.isListeningEnabled()) {
|
||||||
try {
|
try {
|
||||||
|
// Update the entry if it is a picture and not in NSRL
|
||||||
if (isDrawableAndNotKnown(file)) {
|
if (isDrawableAndNotKnown(file)) {
|
||||||
//this file should be included and we don't already know about it from hash sets (NSRL)
|
|
||||||
con.queueDBTask(new ImageGalleryController.UpdateFileTask(file, controller.getDatabase()));
|
con.queueDBTask(new ImageGalleryController.UpdateFileTask(file, controller.getDatabase()));
|
||||||
} else if (FileTypeUtils.getAllSupportedExtensions().contains(file.getNameExtension())) {
|
}
|
||||||
|
// Remove it from the DB if it is no longer relevant, but had the correct extension
|
||||||
|
else if (FileTypeUtils.getAllSupportedExtensions().contains(file.getNameExtension())) {
|
||||||
/* Doing this check results in fewer tasks queued
|
/* Doing this check results in fewer tasks queued
|
||||||
* up, and faster completion of db update. This file
|
* up, and faster completion of db update. This file
|
||||||
* would have gotten scooped up in initial grab, but
|
* would have gotten scooped up in initial grab, but
|
||||||
* actually we don't need it */
|
* actually we don't need it */
|
||||||
con.queueDBTask(new ImageGalleryController.RemoveFileTask(file, controller.getDatabase()));
|
con.queueDBTask(new ImageGalleryController.RemoveFileTask(file, controller.getDatabase()));
|
||||||
}
|
}
|
||||||
|
|
||||||
} catch (FileTypeDetector.FileTypeDetectorInitException ex) {
|
} catch (FileTypeDetector.FileTypeDetectorInitException ex) {
|
||||||
logger.log(Level.SEVERE, "Unable to determine if file is drawable and not known. Not making any changes to DB", ex); //NON-NLS
|
logger.log(Level.SEVERE, "Unable to determine if file is drawable and not known. Not making any changes to DB", ex); //NON-NLS
|
||||||
MessageNotifyUtil.Notify.error("Image Gallery Error",
|
MessageNotifyUtil.Notify.error("Image Gallery Error",
|
||||||
@ -194,6 +209,23 @@ public class ImageGalleryModule {
|
|||||||
logger.log(Level.SEVERE, "Attempted to access ImageGallery with no case open.", ex); //NON-NLS
|
logger.log(Level.SEVERE, "Attempted to access ImageGallery with no case open.", ex); //NON-NLS
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
else if (IngestManager.IngestModuleEvent.valueOf(evt.getPropertyName()) == DATA_ADDED) {
|
||||||
|
ModuleDataEvent mde = (ModuleDataEvent)evt.getOldValue();
|
||||||
|
|
||||||
|
if (mde.getBlackboardArtifactType().getTypeID() == ARTIFACT_TYPE.TSK_METADATA_EXIF.getTypeID()) {
|
||||||
|
DrawableDB drawableDB = controller.getDatabase();
|
||||||
|
for (BlackboardArtifact art : mde.getArtifacts()) {
|
||||||
|
drawableDB.addExifCache(art.getObjectID());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (mde.getBlackboardArtifactType().getTypeID() == ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID()) {
|
||||||
|
DrawableDB drawableDB = controller.getDatabase();
|
||||||
|
for (BlackboardArtifact art : mde.getArtifacts()) {
|
||||||
|
drawableDB.addHashSetCache(art.getObjectID());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -251,7 +283,14 @@ public class ImageGalleryModule {
|
|||||||
break;
|
break;
|
||||||
case CONTENT_TAG_ADDED:
|
case CONTENT_TAG_ADDED:
|
||||||
final ContentTagAddedEvent tagAddedEvent = (ContentTagAddedEvent) evt;
|
final ContentTagAddedEvent tagAddedEvent = (ContentTagAddedEvent) evt;
|
||||||
if (con.getDatabase().isInDB(tagAddedEvent.getAddedTag().getContent().getId())) {
|
|
||||||
|
long objId = tagAddedEvent.getAddedTag().getContent().getId();
|
||||||
|
|
||||||
|
// update the cache
|
||||||
|
DrawableDB drawableDB = controller.getDatabase();
|
||||||
|
drawableDB.addTagCache(objId);
|
||||||
|
|
||||||
|
if (con.getDatabase().isInDB(objId)) {
|
||||||
con.getTagsManager().fireTagAddedEvent(tagAddedEvent);
|
con.getTagsManager().fireTagAddedEvent(tagAddedEvent);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
@ -119,6 +119,8 @@ public final class ImageGalleryTopComponent extends TopComponent implements Expl
|
|||||||
private Node infoOverlay;
|
private Node infoOverlay;
|
||||||
private final Region infoOverLayBackground = new TranslucentRegion();
|
private final Region infoOverLayBackground = new TranslucentRegion();
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns whether the ImageGallery window is open or not.
|
* Returns whether the ImageGallery window is open or not.
|
||||||
*
|
*
|
||||||
@ -142,6 +144,11 @@ public final class ImageGalleryTopComponent extends TopComponent implements Expl
|
|||||||
return WindowManager.getDefault().findTopComponent(PREFERRED_ID);
|
return WindowManager.getDefault().findTopComponent(PREFERRED_ID);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* NOTE: This usually gets called on the EDT
|
||||||
|
*
|
||||||
|
* @throws NoCurrentCaseException
|
||||||
|
*/
|
||||||
@Messages({
|
@Messages({
|
||||||
"ImageGalleryTopComponent.openTopCommponent.chooseDataSourceDialog.headerText=Choose a data source to view.",
|
"ImageGalleryTopComponent.openTopCommponent.chooseDataSourceDialog.headerText=Choose a data source to view.",
|
||||||
"ImageGalleryTopComponent.openTopCommponent.chooseDataSourceDialog.contentText=Data source:",
|
"ImageGalleryTopComponent.openTopCommponent.chooseDataSourceDialog.contentText=Data source:",
|
||||||
@ -149,24 +156,35 @@ public final class ImageGalleryTopComponent extends TopComponent implements Expl
|
|||||||
"ImageGalleryTopComponent.openTopCommponent.chooseDataSourceDialog.titleText=Image Gallery",})
|
"ImageGalleryTopComponent.openTopCommponent.chooseDataSourceDialog.titleText=Image Gallery",})
|
||||||
public static void openTopComponent() throws NoCurrentCaseException {
|
public static void openTopComponent() throws NoCurrentCaseException {
|
||||||
|
|
||||||
|
// This creates the top component and adds the UI widgets if it has not yet been opened
|
||||||
final TopComponent topComponent = WindowManager.getDefault().findTopComponent(PREFERRED_ID);
|
final TopComponent topComponent = WindowManager.getDefault().findTopComponent(PREFERRED_ID);
|
||||||
if (topComponent == null) {
|
if (topComponent == null) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
topComponentInitialized = true;
|
|
||||||
if (topComponent.isOpened()) {
|
if (topComponent.isOpened()) {
|
||||||
showTopComponent(topComponent);
|
showTopComponent(topComponent);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
List<DataSource> dataSources = Collections.emptyList();
|
// Wait until the FX UI has been created. This way, we can always
|
||||||
|
// show the gray progress screen
|
||||||
|
// TODO: do this in a more elegant way.
|
||||||
|
while (topComponentInitialized == false) {}
|
||||||
|
|
||||||
ImageGalleryController controller = ImageGalleryModule.getController();
|
ImageGalleryController controller = ImageGalleryModule.getController();
|
||||||
((ImageGalleryTopComponent) topComponent).setController(controller);
|
((ImageGalleryTopComponent) topComponent).setController(controller);
|
||||||
|
|
||||||
|
// Display the UI so taht they can see the progress screen
|
||||||
|
showTopComponent(topComponent);
|
||||||
|
|
||||||
|
List<DataSource> dataSources = Collections.emptyList();
|
||||||
try {
|
try {
|
||||||
dataSources = controller.getSleuthKitCase().getDataSources();
|
dataSources = controller.getSleuthKitCase().getDataSources();
|
||||||
} catch (TskCoreException tskCoreException) {
|
} catch (TskCoreException tskCoreException) {
|
||||||
logger.log(Level.SEVERE, "Unable to get data sourcecs.", tskCoreException);
|
logger.log(Level.SEVERE, "Unable to get data sourcecs.", tskCoreException);
|
||||||
}
|
}
|
||||||
|
|
||||||
GroupManager groupManager = controller.getGroupManager();
|
GroupManager groupManager = controller.getGroupManager();
|
||||||
synchronized (groupManager) {
|
synchronized (groupManager) {
|
||||||
if (dataSources.size() <= 1
|
if (dataSources.size() <= 1
|
||||||
@ -175,15 +193,13 @@ public final class ImageGalleryTopComponent extends TopComponent implements Expl
|
|||||||
* set to something other than path , don't both to ask for
|
* set to something other than path , don't both to ask for
|
||||||
* datasource */
|
* datasource */
|
||||||
groupManager.regroup(null, groupManager.getGroupBy(), groupManager.getSortBy(), groupManager.getSortOrder(), true);
|
groupManager.regroup(null, groupManager.getGroupBy(), groupManager.getSortBy(), groupManager.getSortOrder(), true);
|
||||||
|
|
||||||
showTopComponent(topComponent);
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<String, DataSource> dataSourceNames = new HashMap<>();
|
Map<String, DataSource> dataSourceNames = new HashMap<>();
|
||||||
dataSourceNames.put("All", null);
|
dataSourceNames.put("All", null);
|
||||||
dataSources.forEach(dataSource -> dataSourceNames.put(dataSource.getName(), dataSource));
|
dataSources.forEach(dataSource -> dataSourceNames.put(dataSource.getName() + " (ID: " + dataSource.getId() + ")", dataSource));
|
||||||
|
|
||||||
Platform.runLater(() -> {
|
Platform.runLater(() -> {
|
||||||
ChoiceDialog<String> datasourceDialog = new ChoiceDialog<>(null, dataSourceNames.keySet());
|
ChoiceDialog<String> datasourceDialog = new ChoiceDialog<>(null, dataSourceNames.keySet());
|
||||||
@ -198,7 +214,6 @@ public final class ImageGalleryTopComponent extends TopComponent implements Expl
|
|||||||
synchronized (groupManager) {
|
synchronized (groupManager) {
|
||||||
groupManager.regroup(dataSource, groupManager.getGroupBy(), groupManager.getSortBy(), groupManager.getSortOrder(), true);
|
groupManager.regroup(dataSource, groupManager.getGroupBy(), groupManager.getSortBy(), groupManager.getSortOrder(), true);
|
||||||
}
|
}
|
||||||
SwingUtilities.invokeLater(() -> showTopComponent(topComponent));
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -266,6 +281,9 @@ public final class ImageGalleryTopComponent extends TopComponent implements Expl
|
|||||||
controller.regroupDisabledProperty().addListener((Observable observable) -> checkForGroups());
|
controller.regroupDisabledProperty().addListener((Observable observable) -> checkForGroups());
|
||||||
controller.getGroupManager().getAnalyzedGroups().addListener((Observable observable) -> Platform.runLater(() -> checkForGroups()));
|
controller.getGroupManager().getAnalyzedGroups().addListener((Observable observable) -> Platform.runLater(() -> checkForGroups()));
|
||||||
|
|
||||||
|
topComponentInitialized = true;
|
||||||
|
|
||||||
|
// This will cause the UI to show the progress dialog
|
||||||
Platform.runLater(() -> checkForGroups());
|
Platform.runLater(() -> checkForGroups());
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -329,6 +347,8 @@ public final class ImageGalleryTopComponent extends TopComponent implements Expl
|
|||||||
* Check if there are any fully analyzed groups available from the
|
* Check if there are any fully analyzed groups available from the
|
||||||
* GroupManager and remove blocking progress spinners if there are. If there
|
* GroupManager and remove blocking progress spinners if there are. If there
|
||||||
* aren't, add a blocking progress spinner with appropriate message.
|
* aren't, add a blocking progress spinner with appropriate message.
|
||||||
|
*
|
||||||
|
* This gets called when any group becomes analyzed and when started.
|
||||||
*/
|
*/
|
||||||
@ThreadConfined(type = ThreadConfined.ThreadType.JFX)
|
@ThreadConfined(type = ThreadConfined.ThreadType.JFX)
|
||||||
@NbBundle.Messages({
|
@NbBundle.Messages({
|
||||||
@ -345,11 +365,14 @@ public final class ImageGalleryTopComponent extends TopComponent implements Expl
|
|||||||
private void checkForGroups() {
|
private void checkForGroups() {
|
||||||
GroupManager groupManager = controller.getGroupManager();
|
GroupManager groupManager = controller.getGroupManager();
|
||||||
|
|
||||||
|
// if there are groups to display, then display them
|
||||||
|
// @@@ Need to check timing on this and make sure we have only groups for the selected DS. Seems like rebuild can cause groups to be created for a DS that is not later selected...
|
||||||
if (isNotEmpty(groupManager.getAnalyzedGroups())) {
|
if (isNotEmpty(groupManager.getAnalyzedGroups())) {
|
||||||
clearNotification();
|
clearNotification();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// display a message based on if ingest is running and/or listening
|
||||||
if (IngestManager.getInstance().isIngestRunning()) {
|
if (IngestManager.getInstance().isIngestRunning()) {
|
||||||
if (controller.isListeningEnabled()) {
|
if (controller.isListeningEnabled()) {
|
||||||
replaceNotification(centralStack,
|
replaceNotification(centralStack,
|
||||||
@ -361,12 +384,17 @@ public final class ImageGalleryTopComponent extends TopComponent implements Expl
|
|||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// display a message about stuff still being in the queue
|
||||||
if (controller.getDBTasksQueueSizeProperty().get() > 0) {
|
if (controller.getDBTasksQueueSizeProperty().get() > 0) {
|
||||||
replaceNotification(fullUIStack,
|
replaceNotification(fullUIStack,
|
||||||
new NoGroupsDialog(Bundle.ImageGalleryController_noGroupsDlg_msg3(),
|
new NoGroupsDialog(Bundle.ImageGalleryController_noGroupsDlg_msg3(),
|
||||||
new ProgressIndicator()));
|
new ProgressIndicator()));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// are there are files in the DB?
|
||||||
try {
|
try {
|
||||||
if (controller.getDatabase().countAllFiles() <= 0) {
|
if (controller.getDatabase().countAllFiles() <= 0) {
|
||||||
// there are no files in db
|
// there are no files in db
|
||||||
|
@ -72,6 +72,7 @@ public class NextUnseenGroup extends Action {
|
|||||||
Optional.ofNullable(controller.getViewState())
|
Optional.ofNullable(controller.getViewState())
|
||||||
.flatMap(GroupViewState::getGroup)
|
.flatMap(GroupViewState::getGroup)
|
||||||
.ifPresent(group -> {
|
.ifPresent(group -> {
|
||||||
|
setDisabled(true);
|
||||||
groupManager.markGroupSeen(group, true)
|
groupManager.markGroupSeen(group, true)
|
||||||
.addListener(this::advanceToNextUnseenGroup, MoreExecutors.newDirectExecutorService());
|
.addListener(this::advanceToNextUnseenGroup, MoreExecutors.newDirectExecutorService());
|
||||||
});
|
});
|
||||||
|
@ -55,6 +55,11 @@ class SwingMenuAdapter extends Menu {
|
|||||||
SwingMenuAdapter(final JMenu jMenu) {
|
SwingMenuAdapter(final JMenu jMenu) {
|
||||||
super(jMenu.getText());
|
super(jMenu.getText());
|
||||||
this.jMenu = jMenu;
|
this.jMenu = jMenu;
|
||||||
|
if(!jMenu.isEnabled()) {
|
||||||
|
//Grey out text if the JMenu that this Menu is wrapping is
|
||||||
|
//not enabled.
|
||||||
|
setDisable(true);
|
||||||
|
}
|
||||||
buildChildren(jMenu);
|
buildChildren(jMenu);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -18,6 +18,8 @@
|
|||||||
*/
|
*/
|
||||||
package org.sleuthkit.autopsy.imagegallery.datamodel;
|
package org.sleuthkit.autopsy.imagegallery.datamodel;
|
||||||
|
|
||||||
|
import com.google.common.cache.Cache;
|
||||||
|
import com.google.common.cache.CacheBuilder;
|
||||||
import com.google.common.collect.HashMultimap;
|
import com.google.common.collect.HashMultimap;
|
||||||
import com.google.common.collect.Multimap;
|
import com.google.common.collect.Multimap;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -42,6 +44,7 @@ import java.util.Set;
|
|||||||
import java.util.concurrent.CompletableFuture;
|
import java.util.concurrent.CompletableFuture;
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
import java.util.concurrent.ExecutionException;
|
import java.util.concurrent.ExecutionException;
|
||||||
|
import java.util.concurrent.TimeUnit;
|
||||||
import java.util.concurrent.locks.Lock;
|
import java.util.concurrent.locks.Lock;
|
||||||
import java.util.concurrent.locks.ReentrantReadWriteLock;
|
import java.util.concurrent.locks.ReentrantReadWriteLock;
|
||||||
import java.util.logging.Level;
|
import java.util.logging.Level;
|
||||||
@ -147,6 +150,16 @@ public final class DrawableDB {
|
|||||||
|
|
||||||
private final Lock DBLock = rwLock.writeLock(); //using exclusing lock for all db ops for now
|
private final Lock DBLock = rwLock.writeLock(); //using exclusing lock for all db ops for now
|
||||||
|
|
||||||
|
// caches to make inserts / updates faster
|
||||||
|
private Cache<String, Boolean> groupCache = CacheBuilder.newBuilder().expireAfterWrite(5, TimeUnit.MINUTES).build();
|
||||||
|
private final Object cacheLock = new Object(); // protects access to the below cache-related objects
|
||||||
|
private boolean areCachesLoaded = false; // if true, the below caches contain valid data
|
||||||
|
private Set<Long> hasTagCache = new HashSet<>(); // contains obj id of files with tags
|
||||||
|
private Set<Long> hasHashCache = new HashSet<>(); // obj id of files with hash set hits
|
||||||
|
private Set<Long> hasExifCache = new HashSet<>(); // obj id of files with EXIF (make/model)
|
||||||
|
private int cacheBuildCount = 0; // number of tasks taht requested the caches be built
|
||||||
|
|
||||||
|
|
||||||
static {//make sure sqlite driver is loaded // possibly redundant
|
static {//make sure sqlite driver is loaded // possibly redundant
|
||||||
try {
|
try {
|
||||||
Class.forName("org.sqlite.JDBC");
|
Class.forName("org.sqlite.JDBC");
|
||||||
@ -767,6 +780,123 @@ public final class DrawableDB {
|
|||||||
insertOrUpdateFile(f, tr, updateFileStmt, caseDbTransaction);
|
insertOrUpdateFile(f, tr, updateFileStmt, caseDbTransaction);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Populate caches based on current state of Case DB
|
||||||
|
*/
|
||||||
|
public void buildFileMetaDataCache() {
|
||||||
|
|
||||||
|
synchronized (cacheLock) {
|
||||||
|
cacheBuildCount++;
|
||||||
|
if (areCachesLoaded == true)
|
||||||
|
return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// get tags
|
||||||
|
try (SleuthkitCase.CaseDbQuery dbQuery = tskCase.executeQuery("SELECT obj_id FROM content_tags")) {
|
||||||
|
ResultSet rs = dbQuery.getResultSet();
|
||||||
|
while (rs.next()) {
|
||||||
|
long id = rs.getLong("obj_id");
|
||||||
|
hasTagCache.add(id);
|
||||||
|
}
|
||||||
|
} catch (SQLException ex) {
|
||||||
|
logger.log(Level.SEVERE, "Error getting tags from DB", ex); //NON-NLS
|
||||||
|
}
|
||||||
|
} catch (TskCoreException ex) {
|
||||||
|
logger.log(Level.SEVERE, "Error executing query to get tags", ex); //NON-NLS
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// hash sets
|
||||||
|
try (SleuthkitCase.CaseDbQuery dbQuery = tskCase.executeQuery("SELECT obj_id FROM blackboard_artifacts WHERE artifact_type_id = " + BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID())) {
|
||||||
|
ResultSet rs = dbQuery.getResultSet();
|
||||||
|
while (rs.next()) {
|
||||||
|
long id = rs.getLong("obj_id");
|
||||||
|
hasHashCache.add(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (SQLException ex) {
|
||||||
|
logger.log(Level.SEVERE, "Error getting hashsets from DB", ex); //NON-NLS
|
||||||
|
}
|
||||||
|
} catch (TskCoreException ex) {
|
||||||
|
logger.log(Level.SEVERE, "Error executing query to get hashsets", ex); //NON-NLS
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// EXIF
|
||||||
|
try (SleuthkitCase.CaseDbQuery dbQuery = tskCase.executeQuery("SELECT obj_id FROM blackboard_artifacts WHERE artifact_type_id = " + BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF.getTypeID())) {
|
||||||
|
ResultSet rs = dbQuery.getResultSet();
|
||||||
|
while (rs.next()) {
|
||||||
|
long id = rs.getLong("obj_id");
|
||||||
|
hasExifCache.add(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (SQLException ex) {
|
||||||
|
logger.log(Level.SEVERE, "Error getting EXIF from DB", ex); //NON-NLS
|
||||||
|
}
|
||||||
|
} catch (TskCoreException ex) {
|
||||||
|
logger.log(Level.SEVERE, "Error executing query to get EXIF", ex); //NON-NLS
|
||||||
|
}
|
||||||
|
|
||||||
|
areCachesLoaded = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a file to cache of files that have EXIF data
|
||||||
|
* @param objectID ObjId of file with EXIF
|
||||||
|
*/
|
||||||
|
public void addExifCache(long objectID) {
|
||||||
|
synchronized (cacheLock) {
|
||||||
|
// bail out if we are not maintaining caches
|
||||||
|
if (cacheBuildCount == 0)
|
||||||
|
return;
|
||||||
|
hasExifCache.add(objectID);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a file to cache of files that have hash set hits
|
||||||
|
* @param objectID ObjId of file with hash set
|
||||||
|
*/
|
||||||
|
public void addHashSetCache(long objectID) {
|
||||||
|
synchronized (cacheLock) {
|
||||||
|
// bail out if we are not maintaining caches
|
||||||
|
if (cacheBuildCount == 0)
|
||||||
|
return;
|
||||||
|
hasHashCache.add(objectID);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a file to cache of files that have tags
|
||||||
|
* @param objectID ObjId of file with tags
|
||||||
|
*/
|
||||||
|
public void addTagCache(long objectID) {
|
||||||
|
synchronized (cacheLock) {
|
||||||
|
// bail out if we are not maintaining caches
|
||||||
|
if (cacheBuildCount == 0)
|
||||||
|
return;
|
||||||
|
hasTagCache.add(objectID);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Free the cached case DB data
|
||||||
|
*/
|
||||||
|
public void freeFileMetaDataCache() {
|
||||||
|
synchronized (cacheLock) {
|
||||||
|
// dont' free these if there is another task still using them
|
||||||
|
if (--cacheBuildCount > 0)
|
||||||
|
return;
|
||||||
|
|
||||||
|
areCachesLoaded = false;
|
||||||
|
hasTagCache.clear();
|
||||||
|
hasHashCache.clear();
|
||||||
|
hasExifCache.clear();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Update (or insert) a file in(to) the drawable db. Weather this is an
|
* Update (or insert) a file in(to) the drawable db. Weather this is an
|
||||||
* insert or an update depends on the given prepared statement. This method
|
* insert or an update depends on the given prepared statement. This method
|
||||||
@ -778,7 +908,7 @@ public final class DrawableDB {
|
|||||||
*
|
*
|
||||||
* @param f The file to insert.
|
* @param f The file to insert.
|
||||||
* @param tr a transaction to use, must not be null
|
* @param tr a transaction to use, must not be null
|
||||||
* @param stmt the statement that does the actull inserting
|
* @param stmt the statement that does the actual inserting
|
||||||
*/
|
*/
|
||||||
private void insertOrUpdateFile(DrawableFile f, @Nonnull DrawableTransaction tr, @Nonnull PreparedStatement stmt, @Nonnull CaseDbTransaction caseDbTransaction) {
|
private void insertOrUpdateFile(DrawableFile f, @Nonnull DrawableTransaction tr, @Nonnull PreparedStatement stmt, @Nonnull CaseDbTransaction caseDbTransaction) {
|
||||||
|
|
||||||
@ -786,22 +916,42 @@ public final class DrawableDB {
|
|||||||
throw new IllegalArgumentException("can't update database with closed transaction");
|
throw new IllegalArgumentException("can't update database with closed transaction");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// get data from caches. Default to true and force the DB lookup if we don't have caches
|
||||||
|
boolean hasExif = true;
|
||||||
|
boolean hasHashSet = true;
|
||||||
|
boolean hasTag = true;
|
||||||
|
synchronized (cacheLock) {
|
||||||
|
if (areCachesLoaded) {
|
||||||
|
hasExif = hasExifCache.contains(f.getId());
|
||||||
|
hasHashSet = hasHashCache.contains(f.getId());
|
||||||
|
hasTag = hasTagCache.contains(f.getId());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
dbWriteLock();
|
dbWriteLock();
|
||||||
try {
|
try {
|
||||||
// "INSERT OR IGNORE/ INTO drawable_files (obj_id, data_source_obj_id, path, name, created_time, modified_time, make, model, analyzed)"
|
// "INSERT OR IGNORE/ INTO drawable_files (obj_id, data_source_obj_id, path, name, created_time, modified_time, make, model, analyzed)"
|
||||||
stmt.setLong(1, f.getId());
|
stmt.setLong(1, f.getId());
|
||||||
stmt.setLong(2, f.getAbstractFile().getDataSource().getId());
|
stmt.setLong(2, f.getAbstractFile().getDataSourceObjectId());
|
||||||
stmt.setString(3, f.getDrawablePath());
|
stmt.setString(3, f.getDrawablePath());
|
||||||
stmt.setString(4, f.getName());
|
stmt.setString(4, f.getName());
|
||||||
stmt.setLong(5, f.getCrtime());
|
stmt.setLong(5, f.getCrtime());
|
||||||
stmt.setLong(6, f.getMtime());
|
stmt.setLong(6, f.getMtime());
|
||||||
|
if (hasExif) {
|
||||||
stmt.setString(7, f.getMake());
|
stmt.setString(7, f.getMake());
|
||||||
stmt.setString(8, f.getModel());
|
stmt.setString(8, f.getModel());
|
||||||
|
} else {
|
||||||
|
stmt.setString(7, "");
|
||||||
|
stmt.setString(8, "");
|
||||||
|
}
|
||||||
stmt.setBoolean(9, f.isAnalyzed());
|
stmt.setBoolean(9, f.isAnalyzed());
|
||||||
stmt.executeUpdate();
|
stmt.executeUpdate();
|
||||||
|
|
||||||
// Update the list of file IDs in memory
|
// Update the list of file IDs in memory
|
||||||
addImageFileToList(f.getId());
|
addImageFileToList(f.getId());
|
||||||
|
|
||||||
|
// Update the hash set tables
|
||||||
|
if (hasHashSet) {
|
||||||
try {
|
try {
|
||||||
for (String name : f.getHashSetNames()) {
|
for (String name : f.getHashSetNames()) {
|
||||||
|
|
||||||
@ -826,21 +976,31 @@ public final class DrawableDB {
|
|||||||
} catch (TskCoreException ex) {
|
} catch (TskCoreException ex) {
|
||||||
logger.log(Level.SEVERE, "failed to insert/update hash hits for file" + f.getContentPathSafe(), ex); //NON-NLS
|
logger.log(Level.SEVERE, "failed to insert/update hash hits for file" + f.getContentPathSafe(), ex); //NON-NLS
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
//and update all groups this file is in
|
//and update all groups this file is in
|
||||||
for (DrawableAttribute<?> attr : DrawableAttribute.getGroupableAttrs()) {
|
for (DrawableAttribute<?> attr : DrawableAttribute.getGroupableAttrs()) {
|
||||||
|
// skip attributes that we do not have data for
|
||||||
|
if ((attr == DrawableAttribute.TAGS) && (hasTag == false)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
else if ((attr == DrawableAttribute.MAKE || attr == DrawableAttribute.MODEL) && (hasExif == false)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
Collection<? extends Comparable<?>> vals = attr.getValue(f);
|
Collection<? extends Comparable<?>> vals = attr.getValue(f);
|
||||||
for (Comparable<?> val : vals) {
|
for (Comparable<?> val : vals) {
|
||||||
if (null != val) {
|
if (null != val) {
|
||||||
if (attr == DrawableAttribute.PATH) {
|
if (attr == DrawableAttribute.PATH) {
|
||||||
insertGroup(f.getAbstractFile().getDataSource().getId(), val.toString(), attr, caseDbTransaction);
|
insertGroup(f.getAbstractFile().getDataSource().getId(), val.toString(), attr, caseDbTransaction);
|
||||||
} else {
|
}
|
||||||
|
else {
|
||||||
insertGroup(val.toString(), attr, caseDbTransaction);
|
insertGroup(val.toString(), attr, caseDbTransaction);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// @@@ Consider storing more than ID so that we do not need to requery each file during commit
|
||||||
tr.addUpdatedFile(f.getId());
|
tr.addUpdatedFile(f.getId());
|
||||||
|
|
||||||
} catch (SQLException | NullPointerException | TskCoreException ex) {
|
} catch (SQLException | NullPointerException | TskCoreException ex) {
|
||||||
@ -926,11 +1086,16 @@ public final class DrawableDB {
|
|||||||
return new DrawableTransaction();
|
return new DrawableTransaction();
|
||||||
}
|
}
|
||||||
|
|
||||||
public void commitTransaction(DrawableTransaction tr, Boolean notify) {
|
/**
|
||||||
|
*
|
||||||
|
* @param tr
|
||||||
|
* @param notifyGM If true, notify GroupManager about the changes.
|
||||||
|
*/
|
||||||
|
public void commitTransaction(DrawableTransaction tr, Boolean notifyGM) {
|
||||||
if (tr.isClosed()) {
|
if (tr.isClosed()) {
|
||||||
throw new IllegalArgumentException("can't close already closed transaction");
|
throw new IllegalArgumentException("can't close already closed transaction");
|
||||||
}
|
}
|
||||||
tr.commit(notify);
|
tr.commit(notifyGM);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void rollbackTransaction(DrawableTransaction tr) {
|
public void rollbackTransaction(DrawableTransaction tr) {
|
||||||
@ -1071,7 +1236,7 @@ public final class DrawableDB {
|
|||||||
* @param sortOrder Sort ascending or descending.
|
* @param sortOrder Sort ascending or descending.
|
||||||
* @param dataSource
|
* @param dataSource
|
||||||
*
|
*
|
||||||
* @return
|
* @return Map of data source (or null of group by attribute ignores data sources) to list of unique group values
|
||||||
*
|
*
|
||||||
* @throws org.sleuthkit.datamodel.TskCoreException
|
* @throws org.sleuthkit.datamodel.TskCoreException
|
||||||
*/
|
*/
|
||||||
@ -1180,6 +1345,11 @@ public final class DrawableDB {
|
|||||||
* @param caseDbTransaction transaction to use for CaseDB insert/updates
|
* @param caseDbTransaction transaction to use for CaseDB insert/updates
|
||||||
*/
|
*/
|
||||||
private void insertGroup(long ds_obj_id, final String value, DrawableAttribute<?> groupBy, CaseDbTransaction caseDbTransaction) {
|
private void insertGroup(long ds_obj_id, final String value, DrawableAttribute<?> groupBy, CaseDbTransaction caseDbTransaction) {
|
||||||
|
// don't waste DB round trip if we recently added it
|
||||||
|
String cacheKey = Long.toString(ds_obj_id) + "_" + value + "_" + groupBy.getDisplayName();
|
||||||
|
if (groupCache.getIfPresent(cacheKey) != null)
|
||||||
|
return;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
String insertSQL = String.format(" (data_source_obj_id, value, attribute) VALUES (%d, \'%s\', \'%s\')",
|
String insertSQL = String.format(" (data_source_obj_id, value, attribute) VALUES (%d, \'%s\', \'%s\')",
|
||||||
ds_obj_id, value, groupBy.attrName.toString());
|
ds_obj_id, value, groupBy.attrName.toString());
|
||||||
@ -1188,6 +1358,7 @@ public final class DrawableDB {
|
|||||||
insertSQL += "ON CONFLICT DO NOTHING";
|
insertSQL += "ON CONFLICT DO NOTHING";
|
||||||
}
|
}
|
||||||
tskCase.getCaseDbAccessManager().insert(GROUPS_TABLENAME, insertSQL, caseDbTransaction);
|
tskCase.getCaseDbAccessManager().insert(GROUPS_TABLENAME, insertSQL, caseDbTransaction);
|
||||||
|
groupCache.put(cacheKey, Boolean.TRUE);
|
||||||
} catch (TskCoreException ex) {
|
} catch (TskCoreException ex) {
|
||||||
// Don't need to report it if the case was closed
|
// Don't need to report it if the case was closed
|
||||||
if (Case.isCaseOpen()) {
|
if (Case.isCaseOpen()) {
|
||||||
@ -1513,14 +1684,19 @@ public final class DrawableDB {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
synchronized private void commit(Boolean notify) {
|
/**
|
||||||
|
* Commit changes that happened during this transaction
|
||||||
|
*
|
||||||
|
* @param notifyGM If true, notify GroupManager about the changes.
|
||||||
|
*/
|
||||||
|
synchronized private void commit(Boolean notifyGM) {
|
||||||
if (!closed) {
|
if (!closed) {
|
||||||
try {
|
try {
|
||||||
con.commit();
|
con.commit();
|
||||||
// make sure we close before we update, bc they'll need locks
|
// make sure we close before we update, bc they'll need locks
|
||||||
close();
|
close();
|
||||||
|
|
||||||
if (notify) {
|
if (notifyGM) {
|
||||||
if (groupManager != null) {
|
if (groupManager != null) {
|
||||||
groupManager.handleFileUpdate(updatedFiles);
|
groupManager.handleFileUpdate(updatedFiles);
|
||||||
groupManager.handleFileRemoved(removedFiles);
|
groupManager.handleFileRemoved(removedFiles);
|
||||||
|
@ -63,8 +63,8 @@ public abstract class DrawableFile {
|
|||||||
|
|
||||||
private static final Logger LOGGER = Logger.getLogger(DrawableFile.class.getName());
|
private static final Logger LOGGER = Logger.getLogger(DrawableFile.class.getName());
|
||||||
|
|
||||||
public static DrawableFile create(AbstractFile abstractFileById, boolean analyzed) {
|
public static DrawableFile create(AbstractFile abstractFile, boolean analyzed) {
|
||||||
return create(abstractFileById, analyzed, FileTypeUtils.hasVideoMIMEType(abstractFileById));
|
return create(abstractFile, analyzed, FileTypeUtils.hasVideoMIMEType(abstractFile));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -93,7 +93,7 @@ public class HashSetManager {
|
|||||||
*
|
*
|
||||||
* @param fileID the fileID to invalidate in the cache
|
* @param fileID the fileID to invalidate in the cache
|
||||||
*/
|
*/
|
||||||
public void invalidateHashSetsForFile(long fileID) {
|
public void invalidateHashSetsCacheForFile(long fileID) {
|
||||||
hashSetCache.invalidate(fileID);
|
hashSetCache.invalidate(fileID);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -74,7 +74,8 @@ public class GroupKey<T extends Comparable<T>> implements Comparable<GroupKey<T>
|
|||||||
|
|
||||||
hash = 79 * hash + Objects.hashCode(this.val);
|
hash = 79 * hash + Objects.hashCode(this.val);
|
||||||
hash = 79 * hash + Objects.hashCode(this.attr);
|
hash = 79 * hash + Objects.hashCode(this.attr);
|
||||||
hash = 79 * hash + Objects.hashCode(this.dataSource);
|
if (this.dataSource != null)
|
||||||
|
hash = 79 * hash + (int)this.dataSource.getId();
|
||||||
|
|
||||||
return hash;
|
return hash;
|
||||||
}
|
}
|
||||||
|
@ -172,12 +172,15 @@ public class GroupManager {
|
|||||||
* a part of.
|
* a part of.
|
||||||
*/
|
*/
|
||||||
@SuppressWarnings({"rawtypes", "unchecked"})
|
@SuppressWarnings({"rawtypes", "unchecked"})
|
||||||
synchronized public Set<GroupKey<?>> getGroupKeysForFile(DrawableFile file) throws TskCoreException, TskDataException {
|
synchronized public Set<GroupKey<?>> getGroupKeysForCurrentGroupBy(DrawableFile file) throws TskCoreException, TskDataException {
|
||||||
Set<GroupKey<?>> resultSet = new HashSet<>();
|
Set<GroupKey<?>> resultSet = new HashSet<>();
|
||||||
for (Comparable<?> val : getGroupBy().getValue(file)) {
|
for (Comparable<?> val : getGroupBy().getValue(file)) {
|
||||||
|
|
||||||
if (getGroupBy() == DrawableAttribute.PATH) {
|
if (getGroupBy() == DrawableAttribute.PATH) {
|
||||||
|
// verify this file is in a data source being displayed
|
||||||
|
if ((getDataSource() == null) || (file.getDataSource().equals(getDataSource()))) {
|
||||||
resultSet.add(new GroupKey(getGroupBy(), val, file.getDataSource()));
|
resultSet.add(new GroupKey(getGroupBy(), val, file.getDataSource()));
|
||||||
|
}
|
||||||
} else if (getGroupBy() == DrawableAttribute.TAGS) {
|
} else if (getGroupBy() == DrawableAttribute.TAGS) {
|
||||||
//don't show groups for the categories when grouped by tags.
|
//don't show groups for the categories when grouped by tags.
|
||||||
if (CategoryManager.isNotCategoryTagName((TagName) val)) {
|
if (CategoryManager.isNotCategoryTagName((TagName) val)) {
|
||||||
@ -199,10 +202,10 @@ public class GroupManager {
|
|||||||
* @return A set of GroupKeys representing the group(s) the given file is a
|
* @return A set of GroupKeys representing the group(s) the given file is a
|
||||||
* part of
|
* part of
|
||||||
*/
|
*/
|
||||||
synchronized public Set<GroupKey<?>> getGroupKeysForFileID(Long fileID) {
|
synchronized public Set<GroupKey<?>> getGroupKeysForCurrentGroupBy(Long fileID) {
|
||||||
try {
|
try {
|
||||||
DrawableFile file = getDrawableDB().getFileFromID(fileID);
|
DrawableFile file = getDrawableDB().getFileFromID(fileID);
|
||||||
return getGroupKeysForFile(file);
|
return getGroupKeysForCurrentGroupBy(file);
|
||||||
|
|
||||||
} catch (TskCoreException | TskDataException ex) {
|
} catch (TskCoreException | TskDataException ex) {
|
||||||
logger.log(Level.SEVERE, "Failed to get group keys for file with ID " +fileID, ex); //NON-NLS
|
logger.log(Level.SEVERE, "Failed to get group keys for file with ID " +fileID, ex); //NON-NLS
|
||||||
@ -434,10 +437,18 @@ public class GroupManager {
|
|||||||
return sortOrderProp.getReadOnlyProperty();
|
return sortOrderProp.getReadOnlyProperty();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @return null if all data sources are being displayed
|
||||||
|
*/
|
||||||
public synchronized DataSource getDataSource() {
|
public synchronized DataSource getDataSource() {
|
||||||
return dataSourceProp.get();
|
return dataSourceProp.get();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @param dataSource Data source to display or null to display all of them
|
||||||
|
*/
|
||||||
synchronized void setDataSource(DataSource dataSource) {
|
synchronized void setDataSource(DataSource dataSource) {
|
||||||
dataSourceProp.set(dataSource);
|
dataSourceProp.set(dataSource);
|
||||||
}
|
}
|
||||||
@ -505,16 +516,28 @@ public class GroupManager {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds an analyzed file to a group and marks the group as analyzed if the entire group is
|
||||||
|
* now analyzed.
|
||||||
|
*
|
||||||
|
* @param group Group being added to (will be null if a group has not yet been created)
|
||||||
|
* @param groupKey Group type/value
|
||||||
|
* @param fileID
|
||||||
|
*/
|
||||||
@SuppressWarnings("AssignmentToMethodParameter")
|
@SuppressWarnings("AssignmentToMethodParameter")
|
||||||
synchronized private void addFileToGroup(DrawableGroup group, final GroupKey<?> groupKey, final long fileID) {
|
synchronized private void addFileToGroup(DrawableGroup group, final GroupKey<?> groupKey, final long fileID) {
|
||||||
|
|
||||||
|
// NOTE: We assume that it has already been determined that GroupKey can be displayed based on Data Source filters
|
||||||
if (group == null) {
|
if (group == null) {
|
||||||
//if there wasn't already a group check if there should be one now
|
//if there wasn't already a group check if there should be one now
|
||||||
|
// path group, for example, only gets created when all files are analyzed
|
||||||
group = popuplateIfAnalyzed(groupKey, null);
|
group = popuplateIfAnalyzed(groupKey, null);
|
||||||
}
|
}
|
||||||
if (group != null) {
|
else {
|
||||||
//if there is aleady a group that was previously deemed fully analyzed, then add this newly analyzed file to it.
|
//if there is aleady a group that was previously deemed fully analyzed, then add this newly analyzed file to it.
|
||||||
group.addFile(fileID);
|
group.addFile(fileID);
|
||||||
}
|
}
|
||||||
|
// reset the seen status for the group
|
||||||
markGroupSeen(group, false);
|
markGroupSeen(group, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -543,7 +566,7 @@ public class GroupManager {
|
|||||||
|
|
||||||
for (final long fileId : removedFileIDs) {
|
for (final long fileId : removedFileIDs) {
|
||||||
//get grouping(s) this file would be in
|
//get grouping(s) this file would be in
|
||||||
Set<GroupKey<?>> groupsForFile = getGroupKeysForFileID(fileId);
|
Set<GroupKey<?>> groupsForFile = getGroupKeysForCurrentGroupBy(fileId);
|
||||||
|
|
||||||
for (GroupKey<?> gk : groupsForFile) {
|
for (GroupKey<?> gk : groupsForFile) {
|
||||||
removeFromGroup(gk, fileId);
|
removeFromGroup(gk, fileId);
|
||||||
@ -564,12 +587,13 @@ public class GroupManager {
|
|||||||
* groups( if we are grouping by say make or model) -jm
|
* groups( if we are grouping by say make or model) -jm
|
||||||
*/
|
*/
|
||||||
for (long fileId : updatedFileIDs) {
|
for (long fileId : updatedFileIDs) {
|
||||||
|
// reset the hash cache
|
||||||
|
controller.getHashSetManager().invalidateHashSetsCacheForFile(fileId);
|
||||||
|
|
||||||
controller.getHashSetManager().invalidateHashSetsForFile(fileId);
|
// Update the current groups (if it is visible)
|
||||||
|
Set<GroupKey<?>> groupsForFile = getGroupKeysForCurrentGroupBy(fileId);
|
||||||
//get grouping(s) this file would be in
|
|
||||||
Set<GroupKey<?>> groupsForFile = getGroupKeysForFileID(fileId);
|
|
||||||
for (GroupKey<?> gk : groupsForFile) {
|
for (GroupKey<?> gk : groupsForFile) {
|
||||||
|
// see if a group has been created yet for the key
|
||||||
DrawableGroup g = getGroupForKey(gk);
|
DrawableGroup g = getGroupForKey(gk);
|
||||||
addFileToGroup(g, gk, fileId);
|
addFileToGroup(g, gk, fileId);
|
||||||
}
|
}
|
||||||
@ -579,6 +603,10 @@ public class GroupManager {
|
|||||||
controller.getCategoryManager().fireChange(updatedFileIDs, null);
|
controller.getCategoryManager().fireChange(updatedFileIDs, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If the group is analyzed (or other criteria based on grouping) and should be shown to the user,
|
||||||
|
* then add it to the appropriate data structures so that it can be viewed.
|
||||||
|
*/
|
||||||
synchronized private DrawableGroup popuplateIfAnalyzed(GroupKey<?> groupKey, ReGroupTask<?> task) {
|
synchronized private DrawableGroup popuplateIfAnalyzed(GroupKey<?> groupKey, ReGroupTask<?> task) {
|
||||||
/*
|
/*
|
||||||
* If this method call is part of a ReGroupTask and that task is
|
* If this method call is part of a ReGroupTask and that task is
|
||||||
@ -588,7 +616,9 @@ public class GroupManager {
|
|||||||
* user picked a different group by attribute, while the current task
|
* user picked a different group by attribute, while the current task
|
||||||
* was still running)
|
* was still running)
|
||||||
*/
|
*/
|
||||||
if (isNull(task) || task.isCancelled() == false) {
|
if (isNull(task) == false && task.isCancelled() == true) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* For attributes other than path we can't be sure a group is fully
|
* For attributes other than path we can't be sure a group is fully
|
||||||
@ -627,7 +657,6 @@ public class GroupManager {
|
|||||||
logger.log(Level.SEVERE, "failed to get files for group: " + groupKey.getAttribute().attrName.toString() + " = " + groupKey.getValue(), ex); //NON-NLS
|
logger.log(Level.SEVERE, "failed to get files for group: " + groupKey.getAttribute().attrName.toString() + " = " + groupKey.getValue(), ex); //NON-NLS
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
@ -810,7 +839,7 @@ public class GroupManager {
|
|||||||
*
|
*
|
||||||
* @param groupBy
|
* @param groupBy
|
||||||
*
|
*
|
||||||
* @return
|
* @return map of data source (or null if group by attribute ignores data sources) to list of unique group values
|
||||||
*/
|
*/
|
||||||
public Multimap<DataSource, AttrValType> findValuesForAttribute() {
|
public Multimap<DataSource, AttrValType> findValuesForAttribute() {
|
||||||
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
Autopsy Forensic Browser
|
Autopsy Forensic Browser
|
||||||
|
|
||||||
Copyright 2016 Basis Technology Corp.
|
Copyright 2016-2018 Basis Technology Corp.
|
||||||
Contact: carrier <at> sleuthkit <dot> org
|
Contact: carrier <at> sleuthkit <dot> org
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
@ -67,8 +67,8 @@ class BrowserLocationAnalyzer(general.AndroidComponentAnalyzer):
|
|||||||
self._logger.log(Level.SEVERE, "Error parsing browser location files", ex)
|
self._logger.log(Level.SEVERE, "Error parsing browser location files", ex)
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||||
except TskCoreException as ex:
|
except TskCoreException as ex:
|
||||||
self._logger.log(Level.SEVERE, "Error finding browser location files", ex)
|
# Error finding browser location files.
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
pass
|
||||||
|
|
||||||
def __findGeoLocationsInDB(self, databasePath, abstractFile):
|
def __findGeoLocationsInDB(self, databasePath, abstractFile):
|
||||||
if not databasePath:
|
if not databasePath:
|
||||||
@ -78,11 +78,15 @@ class BrowserLocationAnalyzer(general.AndroidComponentAnalyzer):
|
|||||||
Class.forName("org.sqlite.JDBC") #load JDBC driver
|
Class.forName("org.sqlite.JDBC") #load JDBC driver
|
||||||
connection = DriverManager.getConnection("jdbc:sqlite:" + databasePath)
|
connection = DriverManager.getConnection("jdbc:sqlite:" + databasePath)
|
||||||
statement = connection.createStatement()
|
statement = connection.createStatement()
|
||||||
except (ClassNotFoundException, SQLException) as ex:
|
except (ClassNotFoundException) as ex:
|
||||||
self._logger.log(Level.SEVERE, "Error connecting to SQL database", ex)
|
self._logger.log(Level.SEVERE, "Error loading JDBC driver", ex)
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||||
return
|
return
|
||||||
|
except (SQLException) as ex:
|
||||||
|
# Error connecting to SQL databse.
|
||||||
|
return
|
||||||
|
|
||||||
|
resultSet = None
|
||||||
try:
|
try:
|
||||||
resultSet = statement.executeQuery("SELECT timestamp, latitude, longitude, accuracy FROM CachedPosition;")
|
resultSet = statement.executeQuery("SELECT timestamp, latitude, longitude, accuracy FROM CachedPosition;")
|
||||||
while resultSet.next():
|
while resultSet.next():
|
||||||
@ -109,6 +113,9 @@ class BrowserLocationAnalyzer(general.AndroidComponentAnalyzer):
|
|||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||||
MessageNotifyUtil.Notify.error("Failed to index GPS trackpoint artifact for keyword search.", artifact.getDisplayName())
|
MessageNotifyUtil.Notify.error("Failed to index GPS trackpoint artifact for keyword search.", artifact.getDisplayName())
|
||||||
|
|
||||||
|
except SQLException as ex:
|
||||||
|
# Unable to execute browser location SQL query against database.
|
||||||
|
pass
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
self._logger.log(Level.SEVERE, "Error putting artifacts to blackboard", ex)
|
self._logger.log(Level.SEVERE, "Error putting artifacts to blackboard", ex)
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||||
@ -119,5 +126,5 @@ class BrowserLocationAnalyzer(general.AndroidComponentAnalyzer):
|
|||||||
statement.close()
|
statement.close()
|
||||||
connection.close()
|
connection.close()
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
self._logger.log(Level.SEVERE, "Error closing database", ex)
|
# Error closing database.
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
pass
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
Autopsy Forensic Browser
|
Autopsy Forensic Browser
|
||||||
|
|
||||||
Copyright 2016 Basis Technology Corp.
|
Copyright 2016-2018 Basis Technology Corp.
|
||||||
Contact: carrier <at> sleuthkit <dot> org
|
Contact: carrier <at> sleuthkit <dot> org
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
@ -69,8 +69,8 @@ class CacheLocationAnalyzer(general.AndroidComponentAnalyzer):
|
|||||||
self._logger.log(Level.SEVERE, "Error parsing cached location files", ex)
|
self._logger.log(Level.SEVERE, "Error parsing cached location files", ex)
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||||
except TskCoreException as ex:
|
except TskCoreException as ex:
|
||||||
self._logger.log(Level.SEVERE, "Error finding cached location files", ex)
|
# Error finding cached location files.
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
pass
|
||||||
|
|
||||||
def __findGeoLocationsInFile(self, file, abstractFile):
|
def __findGeoLocationsInFile(self, file, abstractFile):
|
||||||
|
|
||||||
@ -142,6 +142,9 @@ class CacheLocationAnalyzer(general.AndroidComponentAnalyzer):
|
|||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||||
MessageNotifyUtil.Notify.error("Failed to index GPS trackpoint artifact for keyword search.", artifact.getDisplayName())
|
MessageNotifyUtil.Notify.error("Failed to index GPS trackpoint artifact for keyword search.", artifact.getDisplayName())
|
||||||
|
|
||||||
|
except SQLException as ex:
|
||||||
|
# Unable to execute Cached GPS locations SQL query against database.
|
||||||
|
pass
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
self._logger.log(Level.SEVERE, "Error parsing Cached GPS locations to blackboard", ex)
|
self._logger.log(Level.SEVERE, "Error parsing Cached GPS locations to blackboard", ex)
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
Autopsy Forensic Browser
|
Autopsy Forensic Browser
|
||||||
|
|
||||||
Copyright 2016-17 Basis Technology Corp.
|
Copyright 2016-2018 Basis Technology Corp.
|
||||||
Contact: carrier <at> sleuthkit <dot> org
|
Contact: carrier <at> sleuthkit <dot> org
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
@ -98,8 +98,8 @@ class CallLogAnalyzer(general.AndroidComponentAnalyzer):
|
|||||||
self._logger.log(Level.SEVERE, "Error writing temporary call log db to disk", ex)
|
self._logger.log(Level.SEVERE, "Error writing temporary call log db to disk", ex)
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||||
except TskCoreException as ex:
|
except TskCoreException as ex:
|
||||||
self._logger.log(Level.SEVERE, "Error finding call logs", ex)
|
# Error finding call logs.
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
pass
|
||||||
|
|
||||||
def __findCallLogsInDB(self, databasePath, abstractFile, dataSource):
|
def __findCallLogsInDB(self, databasePath, abstractFile, dataSource):
|
||||||
if not databasePath:
|
if not databasePath:
|
||||||
@ -165,10 +165,12 @@ class CallLogAnalyzer(general.AndroidComponentAnalyzer):
|
|||||||
self._logger.log(Level.SEVERE, "Error posting call log record to the blackboard", ex)
|
self._logger.log(Level.SEVERE, "Error posting call log record to the blackboard", ex)
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||||
except SQLException as ex:
|
except SQLException as ex:
|
||||||
self._logger.log(Level.WARNING, String.format("Could not read table %s in db %s", tableName, databasePath), ex)
|
# Could not read table in db.
|
||||||
|
# Catch and proceed to the next table in the loop.
|
||||||
|
pass
|
||||||
except SQLException as ex:
|
except SQLException as ex:
|
||||||
self._logger.log(Level.SEVERE, "Could not parse call log; error connecting to db " + databasePath, ex)
|
# Could not parse call log; error connecting to db.
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
pass
|
||||||
finally:
|
finally:
|
||||||
if bbartifacts:
|
if bbartifacts:
|
||||||
IngestServices.getInstance().fireModuleDataEvent(ModuleDataEvent(general.MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_CALLLOG, bbartifacts))
|
IngestServices.getInstance().fireModuleDataEvent(ModuleDataEvent(general.MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_CALLLOG, bbartifacts))
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
Autopsy Forensic Browser
|
Autopsy Forensic Browser
|
||||||
|
|
||||||
Copyright 2016-17 Basis Technology Corp.
|
Copyright 2016-2018 Basis Technology Corp.
|
||||||
Contact: carrier <at> sleuthkit <dot> org
|
Contact: carrier <at> sleuthkit <dot> org
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
@ -72,8 +72,8 @@ class ContactAnalyzer(general.AndroidComponentAnalyzer):
|
|||||||
self._logger.log(Level.SEVERE, "Error parsing Contacts", ex)
|
self._logger.log(Level.SEVERE, "Error parsing Contacts", ex)
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||||
except TskCoreException as ex:
|
except TskCoreException as ex:
|
||||||
self._logger.log(Level.SEVERE, "Error finding Contacts", ex)
|
# Error finding Contacts.
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
pass
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Will create artifact from a database given by the path
|
Will create artifact from a database given by the path
|
||||||
@ -88,10 +88,13 @@ class ContactAnalyzer(general.AndroidComponentAnalyzer):
|
|||||||
Class.forName("org.sqlite.JDBC") # load JDBC driver
|
Class.forName("org.sqlite.JDBC") # load JDBC driver
|
||||||
connection = DriverManager.getConnection("jdbc:sqlite:" + databasePath)
|
connection = DriverManager.getConnection("jdbc:sqlite:" + databasePath)
|
||||||
statement = connection.createStatement()
|
statement = connection.createStatement()
|
||||||
except (ClassNotFoundException, SQLException) as ex:
|
except (ClassNotFoundException) as ex:
|
||||||
self._logger.log(Level.SEVERE, "Error opening database", ex)
|
self._logger.log(Level.SEVERE, "Error loading JDBC driver", ex)
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||||
return
|
return
|
||||||
|
except (SQLException) as ex:
|
||||||
|
# Error opening database.
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
# Create a 'Device' account using the data source device id
|
# Create a 'Device' account using the data source device id
|
||||||
@ -101,6 +104,7 @@ class ContactAnalyzer(general.AndroidComponentAnalyzer):
|
|||||||
|
|
||||||
deviceAccountInstance = Case.getCurrentCase().getSleuthkitCase().getCommunicationsManager().createAccountFileInstance (Account.Type.DEVICE, deviceID, general.MODULE_NAME, abstractFile)
|
deviceAccountInstance = Case.getCurrentCase().getSleuthkitCase().getCommunicationsManager().createAccountFileInstance (Account.Type.DEVICE, deviceID, general.MODULE_NAME, abstractFile)
|
||||||
|
|
||||||
|
resultSet = None
|
||||||
try:
|
try:
|
||||||
# get display_name, mimetype(email or phone number) and data1 (phonenumber or email address depending on mimetype)
|
# get display_name, mimetype(email or phone number) and data1 (phonenumber or email address depending on mimetype)
|
||||||
# sorted by name, so phonenumber/email would be consecutive for a person if they exist.
|
# sorted by name, so phonenumber/email would be consecutive for a person if they exist.
|
||||||
@ -169,7 +173,8 @@ class ContactAnalyzer(general.AndroidComponentAnalyzer):
|
|||||||
MessageNotifyUtil.Notify.error("Failed to index contact artifact for keyword search.", artifact.getDisplayName())
|
MessageNotifyUtil.Notify.error("Failed to index contact artifact for keyword search.", artifact.getDisplayName())
|
||||||
|
|
||||||
except SQLException as ex:
|
except SQLException as ex:
|
||||||
self._logger.log(Level.WARNING, "Unable to execute contacts SQL query against {0} : {1}", [databasePath, ex])
|
# Unable to execute contacts SQL query against database.
|
||||||
|
pass
|
||||||
except TskCoreException as ex:
|
except TskCoreException as ex:
|
||||||
self._logger.log(Level.SEVERE, "Error posting to blackboard", ex)
|
self._logger.log(Level.SEVERE, "Error posting to blackboard", ex)
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||||
@ -183,5 +188,5 @@ class ContactAnalyzer(general.AndroidComponentAnalyzer):
|
|||||||
statement.close()
|
statement.close()
|
||||||
connection.close()
|
connection.close()
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
self._logger.log(Level.SEVERE, "Error closing database", ex)
|
# Error closing database.
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
pass
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
Autopsy Forensic Browser
|
Autopsy Forensic Browser
|
||||||
|
|
||||||
Copyright 2016 Basis Technology Corp.
|
Copyright 2016-2018 Basis Technology Corp.
|
||||||
Contact: carrier <at> sleuthkit <dot> org
|
Contact: carrier <at> sleuthkit <dot> org
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
@ -67,8 +67,8 @@ class GoogleMapLocationAnalyzer(general.AndroidComponentAnalyzer):
|
|||||||
self._logger.log(Level.SEVERE, "Error parsing Google map locations", ex)
|
self._logger.log(Level.SEVERE, "Error parsing Google map locations", ex)
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||||
except TskCoreException as ex:
|
except TskCoreException as ex:
|
||||||
self._logger.log(Level.SEVERE, "Error finding Google map locations", ex)
|
# Error finding Google map locations.
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
pass
|
||||||
|
|
||||||
def __findGeoLocationsInDB(self, databasePath, abstractFile):
|
def __findGeoLocationsInDB(self, databasePath, abstractFile):
|
||||||
if not databasePath:
|
if not databasePath:
|
||||||
@ -78,11 +78,15 @@ class GoogleMapLocationAnalyzer(general.AndroidComponentAnalyzer):
|
|||||||
Class.forName("org.sqlite.JDBC") # load JDBC driver
|
Class.forName("org.sqlite.JDBC") # load JDBC driver
|
||||||
connection = DriverManager.getConnection("jdbc:sqlite:" + databasePath)
|
connection = DriverManager.getConnection("jdbc:sqlite:" + databasePath)
|
||||||
statement = connection.createStatement()
|
statement = connection.createStatement()
|
||||||
except (ClassNotFoundException, SQLException) as ex:
|
except (ClassNotFoundException) as ex:
|
||||||
self._logger.log(Level.SEVERE, "Error opening database", ex)
|
self._logger.log(Level.SEVERE, "Error loading JDBC driver", ex)
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||||
return
|
return
|
||||||
|
except (SQLException) as ex:
|
||||||
|
# Error opening database.
|
||||||
|
return
|
||||||
|
|
||||||
|
resultSet = None
|
||||||
try:
|
try:
|
||||||
resultSet = statement.executeQuery(
|
resultSet = statement.executeQuery(
|
||||||
"SELECT time, dest_lat, dest_lng, dest_title, dest_address, source_lat, source_lng FROM destination_history;")
|
"SELECT time, dest_lat, dest_lng, dest_title, dest_address, source_lat, source_lng FROM destination_history;")
|
||||||
@ -119,6 +123,9 @@ class GoogleMapLocationAnalyzer(general.AndroidComponentAnalyzer):
|
|||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||||
MessageNotifyUtil.Notify.error("Failed to index GPS route artifact for keyword search.", artifact.getDisplayName())
|
MessageNotifyUtil.Notify.error("Failed to index GPS route artifact for keyword search.", artifact.getDisplayName())
|
||||||
|
|
||||||
|
except SQLException as ex:
|
||||||
|
# Unable to execute Google map locations SQL query against database.
|
||||||
|
pass
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
self._logger.log(Level.SEVERE, "Error parsing Google map locations to the blackboard", ex)
|
self._logger.log(Level.SEVERE, "Error parsing Google map locations to the blackboard", ex)
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||||
@ -129,8 +136,8 @@ class GoogleMapLocationAnalyzer(general.AndroidComponentAnalyzer):
|
|||||||
statement.close()
|
statement.close()
|
||||||
connection.close()
|
connection.close()
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
self._logger.log(Level.SEVERE, "Error closing the database", ex)
|
# Error closing the database.
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
pass
|
||||||
|
|
||||||
# add periods 6 decimal places before the end.
|
# add periods 6 decimal places before the end.
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
Autopsy Forensic Browser
|
Autopsy Forensic Browser
|
||||||
|
|
||||||
Copyright 2016 Basis Technology Corp.
|
Copyright 2016-2018 Basis Technology Corp.
|
||||||
Contact: carrier <at> sleuthkit <dot> org
|
Contact: carrier <at> sleuthkit <dot> org
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
@ -68,8 +68,8 @@ class TangoMessageAnalyzer(general.AndroidComponentAnalyzer):
|
|||||||
self._logger.log(Level.SEVERE, "Error parsing Tango messages", ex)
|
self._logger.log(Level.SEVERE, "Error parsing Tango messages", ex)
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||||
except TskCoreException as ex:
|
except TskCoreException as ex:
|
||||||
self._logger.log(Level.SEVERE, "Error finding Tango messages", ex)
|
# Error finding Tango messages.
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
pass
|
||||||
|
|
||||||
def __findTangoMessagesInDB(self, databasePath, abstractFile, dataSource):
|
def __findTangoMessagesInDB(self, databasePath, abstractFile, dataSource):
|
||||||
if not databasePath:
|
if not databasePath:
|
||||||
@ -79,10 +79,13 @@ class TangoMessageAnalyzer(general.AndroidComponentAnalyzer):
|
|||||||
Class.forName("org.sqlite.JDBC") # load JDBC driver
|
Class.forName("org.sqlite.JDBC") # load JDBC driver
|
||||||
connection = DriverManager.getConnection("jdbc:sqlite:" + databasePath)
|
connection = DriverManager.getConnection("jdbc:sqlite:" + databasePath)
|
||||||
statement = connection.createStatement()
|
statement = connection.createStatement()
|
||||||
except (ClassNotFoundException, SQLException) as ex:
|
except (ClassNotFoundException) as ex:
|
||||||
self._logger.log(Level.SEVERE, "Error opening database", ex)
|
self._logger.log(Level.SEVERE, "Error loading JDBC driver", ex)
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||||
return
|
return
|
||||||
|
except (SQLException) as ex:
|
||||||
|
# Error opening database.
|
||||||
|
return
|
||||||
|
|
||||||
# Create a 'Device' account using the data source device id
|
# Create a 'Device' account using the data source device id
|
||||||
datasourceObjId = dataSource.getDataSource().getId()
|
datasourceObjId = dataSource.getDataSource().getId()
|
||||||
@ -90,6 +93,7 @@ class TangoMessageAnalyzer(general.AndroidComponentAnalyzer):
|
|||||||
deviceID = ds.getDeviceId()
|
deviceID = ds.getDeviceId()
|
||||||
deviceAccountInstance = Case.getCurrentCase().getSleuthkitCase().getCommunicationsManager().createAccountFileInstance(Account.Type.DEVICE, deviceID, general.MODULE_NAME, abstractFile)
|
deviceAccountInstance = Case.getCurrentCase().getSleuthkitCase().getCommunicationsManager().createAccountFileInstance(Account.Type.DEVICE, deviceID, general.MODULE_NAME, abstractFile)
|
||||||
|
|
||||||
|
resultSet = None
|
||||||
try:
|
try:
|
||||||
resultSet = statement.executeQuery(
|
resultSet = statement.executeQuery(
|
||||||
"SELECT conv_id, create_time, direction, payload FROM messages ORDER BY create_time DESC;")
|
"SELECT conv_id, create_time, direction, payload FROM messages ORDER BY create_time DESC;")
|
||||||
@ -120,6 +124,9 @@ class TangoMessageAnalyzer(general.AndroidComponentAnalyzer):
|
|||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||||
MessageNotifyUtil.Notify.error("Failed to index Tango message artifact for keyword search.", artifact.getDisplayName())
|
MessageNotifyUtil.Notify.error("Failed to index Tango message artifact for keyword search.", artifact.getDisplayName())
|
||||||
|
|
||||||
|
except SQLException as ex:
|
||||||
|
# Unable to execute Tango messages SQL query against database.
|
||||||
|
pass
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
self._logger.log(Level.SEVERE, "Error parsing Tango messages to the blackboard", ex)
|
self._logger.log(Level.SEVERE, "Error parsing Tango messages to the blackboard", ex)
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||||
@ -130,8 +137,8 @@ class TangoMessageAnalyzer(general.AndroidComponentAnalyzer):
|
|||||||
statement.close()
|
statement.close()
|
||||||
connection.close()
|
connection.close()
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
self._logger.log(Level.SEVERE, "Error closing database", ex)
|
# Error closing database.
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
pass
|
||||||
|
|
||||||
# take the message string which is wrapped by a certain string, and return the text enclosed.
|
# take the message string which is wrapped by a certain string, and return the text enclosed.
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -142,6 +149,6 @@ class TangoMessageAnalyzer(general.AndroidComponentAnalyzer):
|
|||||||
Z = String(decoded, "UTF-8")
|
Z = String(decoded, "UTF-8")
|
||||||
result = Z.split(wrapper)[1]
|
result = Z.split(wrapper)[1]
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
self._logger.log(Level.SEVERE, "Error decoding a Tango message", ex)
|
# Error decoding a Tango message.
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
pass
|
||||||
return result
|
return result
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
Autopsy Forensic Browser
|
Autopsy Forensic Browser
|
||||||
|
|
||||||
Copyright 2016-17 Basis Technology Corp.
|
Copyright 2016-2018 Basis Technology Corp.
|
||||||
Contact: carrier <at> sleuthkit <dot> org
|
Contact: carrier <at> sleuthkit <dot> org
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
@ -70,8 +70,8 @@ class TextMessageAnalyzer(general.AndroidComponentAnalyzer):
|
|||||||
self._logger.log(Level.SEVERE, "Error parsing text messages", ex)
|
self._logger.log(Level.SEVERE, "Error parsing text messages", ex)
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||||
except TskCoreException as ex:
|
except TskCoreException as ex:
|
||||||
self._logger.log(Level.SEVERE, "Error finding text messages", ex)
|
# Error finding text messages.
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
pass
|
||||||
|
|
||||||
def __findTextsInDB(self, databasePath, abstractFile, dataSource):
|
def __findTextsInDB(self, databasePath, abstractFile, dataSource):
|
||||||
if not databasePath:
|
if not databasePath:
|
||||||
@ -82,10 +82,13 @@ class TextMessageAnalyzer(general.AndroidComponentAnalyzer):
|
|||||||
Class.forName("org.sqlite.JDBC") # load JDBC driver
|
Class.forName("org.sqlite.JDBC") # load JDBC driver
|
||||||
connection = DriverManager.getConnection("jdbc:sqlite:" + databasePath)
|
connection = DriverManager.getConnection("jdbc:sqlite:" + databasePath)
|
||||||
statement = connection.createStatement()
|
statement = connection.createStatement()
|
||||||
except (ClassNotFoundException, SQLException) as ex:
|
except (ClassNotFoundException) as ex:
|
||||||
self._logger.log(Level.SEVERE, "Error opening database", ex)
|
self._logger.log(Level.SEVERE, "Error loading JDBC driver", ex)
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||||
return
|
return
|
||||||
|
except (SQLException) as ex:
|
||||||
|
# Error opening database.
|
||||||
|
return
|
||||||
|
|
||||||
# Create a 'Device' account using the data source device id
|
# Create a 'Device' account using the data source device id
|
||||||
datasourceObjId = dataSource.getDataSource().getId()
|
datasourceObjId = dataSource.getDataSource().getId()
|
||||||
@ -93,6 +96,7 @@ class TextMessageAnalyzer(general.AndroidComponentAnalyzer):
|
|||||||
deviceID = ds.getDeviceId()
|
deviceID = ds.getDeviceId()
|
||||||
deviceAccountInstance = Case.getCurrentCase().getSleuthkitCase().getCommunicationsManager().createAccountFileInstance(Account.Type.DEVICE, deviceID, general.MODULE_NAME, abstractFile)
|
deviceAccountInstance = Case.getCurrentCase().getSleuthkitCase().getCommunicationsManager().createAccountFileInstance(Account.Type.DEVICE, deviceID, general.MODULE_NAME, abstractFile)
|
||||||
|
|
||||||
|
resultSet = None
|
||||||
try:
|
try:
|
||||||
resultSet = statement.executeQuery(
|
resultSet = statement.executeQuery(
|
||||||
"SELECT address, date, read, type, subject, body FROM sms;")
|
"SELECT address, date, read, type, subject, body FROM sms;")
|
||||||
@ -134,6 +138,9 @@ class TextMessageAnalyzer(general.AndroidComponentAnalyzer):
|
|||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||||
MessageNotifyUtil.Notify.error("Failed to index text message artifact for keyword search.", artifact.getDisplayName())
|
MessageNotifyUtil.Notify.error("Failed to index text message artifact for keyword search.", artifact.getDisplayName())
|
||||||
|
|
||||||
|
except SQLException as ex:
|
||||||
|
# Unable to execute text messages SQL query against database.
|
||||||
|
pass
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
self._logger.log(Level.SEVERE, "Error parsing text messages to blackboard", ex)
|
self._logger.log(Level.SEVERE, "Error parsing text messages to blackboard", ex)
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||||
@ -147,5 +154,5 @@ class TextMessageAnalyzer(general.AndroidComponentAnalyzer):
|
|||||||
statement.close()
|
statement.close()
|
||||||
connection.close()
|
connection.close()
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
self._logger.log(Level.SEVERE, "Error closing database", ex)
|
# Error closing database.
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
pass
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
Autopsy Forensic Browser
|
Autopsy Forensic Browser
|
||||||
|
|
||||||
Copyright 2016-17 Basis Technology Corp.
|
Copyright 2016-2018 Basis Technology Corp.
|
||||||
Contact: carrier <at> sleuthkit <dot> org
|
Contact: carrier <at> sleuthkit <dot> org
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
@ -72,8 +72,8 @@ class WWFMessageAnalyzer(general.AndroidComponentAnalyzer):
|
|||||||
self._logger.log(Level.SEVERE, "Error parsing WWF messages", ex)
|
self._logger.log(Level.SEVERE, "Error parsing WWF messages", ex)
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||||
except TskCoreException as ex:
|
except TskCoreException as ex:
|
||||||
self._logger.log(Level.SEVERE, "Error finding WWF messages", ex)
|
# Error finding WWF messages.
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
pass
|
||||||
|
|
||||||
def __findWWFMessagesInDB(self, databasePath, abstractFile, dataSource):
|
def __findWWFMessagesInDB(self, databasePath, abstractFile, dataSource):
|
||||||
if not databasePath:
|
if not databasePath:
|
||||||
@ -83,10 +83,13 @@ class WWFMessageAnalyzer(general.AndroidComponentAnalyzer):
|
|||||||
Class.forName("org.sqlite.JDBC"); # load JDBC driver
|
Class.forName("org.sqlite.JDBC"); # load JDBC driver
|
||||||
connection = DriverManager.getConnection("jdbc:sqlite:" + databasePath)
|
connection = DriverManager.getConnection("jdbc:sqlite:" + databasePath)
|
||||||
statement = connection.createStatement()
|
statement = connection.createStatement()
|
||||||
except (ClassNotFoundException, SQLException) as ex:
|
except (ClassNotFoundException) as ex:
|
||||||
self._logger.log(Level.SEVERE, "Error opening database", ex)
|
self._logger.log(Level.SEVERE, "Error loading JDBC driver", ex)
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||||
return
|
return
|
||||||
|
except (SQLException) as ex:
|
||||||
|
# Error opening database.
|
||||||
|
return
|
||||||
|
|
||||||
# Create a 'Device' account using the data source device id
|
# Create a 'Device' account using the data source device id
|
||||||
datasourceObjId = dataSource.getDataSource().getId()
|
datasourceObjId = dataSource.getDataSource().getId()
|
||||||
@ -94,6 +97,7 @@ class WWFMessageAnalyzer(general.AndroidComponentAnalyzer):
|
|||||||
deviceID = ds.getDeviceId()
|
deviceID = ds.getDeviceId()
|
||||||
deviceAccountInstance = Case.getCurrentCase().getSleuthkitCase().getCommunicationsManager().createAccountFileInstance(Account.Type.DEVICE, deviceID, general.MODULE_NAME, abstractFile)
|
deviceAccountInstance = Case.getCurrentCase().getSleuthkitCase().getCommunicationsManager().createAccountFileInstance(Account.Type.DEVICE, deviceID, general.MODULE_NAME, abstractFile)
|
||||||
|
|
||||||
|
resultSet = None
|
||||||
try:
|
try:
|
||||||
resultSet = statement.executeQuery(
|
resultSet = statement.executeQuery(
|
||||||
"SELECT message, strftime('%s' ,created_at) as datetime, user_id, game_id FROM chat_messages ORDER BY game_id DESC, created_at DESC;")
|
"SELECT message, strftime('%s' ,created_at) as datetime, user_id, game_id FROM chat_messages ORDER BY game_id DESC, created_at DESC;")
|
||||||
@ -129,6 +133,9 @@ class WWFMessageAnalyzer(general.AndroidComponentAnalyzer):
|
|||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||||
MessageNotifyUtil.Notify.error("Failed to index WWF message artifact for keyword search.", artifact.getDisplayName())
|
MessageNotifyUtil.Notify.error("Failed to index WWF message artifact for keyword search.", artifact.getDisplayName())
|
||||||
|
|
||||||
|
except SQLException as ex:
|
||||||
|
# Unable to execute WWF messages SQL query against database.
|
||||||
|
pass
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
self._logger.log(Level.SEVERE, "Error parsing WWF messages to the blackboard", ex)
|
self._logger.log(Level.SEVERE, "Error parsing WWF messages to the blackboard", ex)
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||||
@ -139,5 +146,5 @@ class WWFMessageAnalyzer(general.AndroidComponentAnalyzer):
|
|||||||
statement.close()
|
statement.close()
|
||||||
connection.close()
|
connection.close()
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
self._logger.log(Level.SEVERE, "Error closing database", ex)
|
# Error closing database.
|
||||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
pass
|
||||||
|
@ -189,10 +189,10 @@ class DropdownToolbar extends javax.swing.JPanel {
|
|||||||
* schema version and selectively enable the ad
|
* schema version and selectively enable the ad
|
||||||
* hoc search UI components.
|
* hoc search UI components.
|
||||||
*/
|
*/
|
||||||
boolean schemaIsCurrent = IndexFinder.getCurrentSchemaVersion().equals(indexInfo.getSchemaVersion());
|
boolean schemaIsCompatible = indexInfo.isCompatible(IndexFinder.getCurrentSchemaVersion());
|
||||||
listsButton.setEnabled(schemaIsCurrent);
|
listsButton.setEnabled(schemaIsCompatible);
|
||||||
searchDropButton.setEnabled(true);
|
searchDropButton.setEnabled(true);
|
||||||
dropPanel.setRegexSearchEnabled(schemaIsCurrent);
|
dropPanel.setRegexSearchEnabled(schemaIsCompatible);
|
||||||
active = true;
|
active = true;
|
||||||
} else {
|
} else {
|
||||||
/*
|
/*
|
||||||
|
@ -20,6 +20,7 @@ package org.sleuthkit.autopsy.keywordsearch;
|
|||||||
|
|
||||||
import java.text.SimpleDateFormat;
|
import java.text.SimpleDateFormat;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
|
import org.apache.commons.lang.math.NumberUtils;
|
||||||
import org.sleuthkit.autopsy.coreutils.UNCPathUtilities;
|
import org.sleuthkit.autopsy.coreutils.UNCPathUtilities;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -133,4 +134,20 @@ final class Index {
|
|||||||
String getIndexName() {
|
String getIndexName() {
|
||||||
return indexName;
|
return indexName;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Is the current Index instance compatible with the given version number
|
||||||
|
*
|
||||||
|
* @param version The version number to compare the current Index against
|
||||||
|
*
|
||||||
|
* @return true if the current major version number is equal to the given
|
||||||
|
* major version number, otherwise false
|
||||||
|
*/
|
||||||
|
boolean isCompatible(String version) {
|
||||||
|
// Versions are compatible if they have the same major version no
|
||||||
|
int currentMajorVersion = NumberUtils.toInt(schemaVersion.substring(0, schemaVersion.indexOf('.')));
|
||||||
|
int givenMajorVersion = NumberUtils.toInt(version.substring(0, version.indexOf('.')));
|
||||||
|
|
||||||
|
return currentMajorVersion == givenMajorVersion;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -164,7 +164,7 @@ public final class KeywordSearchIngestModule implements FileIngestModule {
|
|||||||
if (!IndexFinder.getCurrentSolrVersion().equals(indexInfo.getSolrVersion())) {
|
if (!IndexFinder.getCurrentSolrVersion().equals(indexInfo.getSolrVersion())) {
|
||||||
throw new IngestModuleException(Bundle.KeywordSearchIngestModule_startupException_indexSolrVersionNotSupported(indexInfo.getSolrVersion()));
|
throw new IngestModuleException(Bundle.KeywordSearchIngestModule_startupException_indexSolrVersionNotSupported(indexInfo.getSolrVersion()));
|
||||||
}
|
}
|
||||||
if (!IndexFinder.getCurrentSchemaVersion().equals(indexInfo.getSchemaVersion())) {
|
if (!indexInfo.isCompatible(IndexFinder.getCurrentSchemaVersion())) {
|
||||||
throw new IngestModuleException(Bundle.KeywordSearchIngestModule_startupException_indexSchemaNotSupported(indexInfo.getSchemaVersion()));
|
throw new IngestModuleException(Bundle.KeywordSearchIngestModule_startupException_indexSchemaNotSupported(indexInfo.getSchemaVersion()));
|
||||||
}
|
}
|
||||||
} catch (NoOpenCoreException ex) {
|
} catch (NoOpenCoreException ex) {
|
||||||
|
@ -325,8 +325,8 @@ public class SolrSearchService implements KeywordSearchService, AutopsyService {
|
|||||||
double currentSolrVersion = NumberUtils.toDouble(IndexFinder.getCurrentSolrVersion());
|
double currentSolrVersion = NumberUtils.toDouble(IndexFinder.getCurrentSolrVersion());
|
||||||
double indexSolrVersion = NumberUtils.toDouble(indexToUse.getSolrVersion());
|
double indexSolrVersion = NumberUtils.toDouble(indexToUse.getSolrVersion());
|
||||||
if (indexSolrVersion == currentSolrVersion) {
|
if (indexSolrVersion == currentSolrVersion) {
|
||||||
// latest Solr version but not latest schema. index should be used in read-only mode
|
// latest Solr version but schema not compatible. index should be used in read-only mode
|
||||||
if (RuntimeProperties.runningWithGUI()) {
|
if (!indexToUse.isCompatible(IndexFinder.getCurrentSchemaVersion()) && RuntimeProperties.runningWithGUI()) {
|
||||||
// pop up a message box to indicate the read-only restrictions.
|
// pop up a message box to indicate the read-only restrictions.
|
||||||
JOptionPane optionPane = new JOptionPane(
|
JOptionPane optionPane = new JOptionPane(
|
||||||
NbBundle.getMessage(this.getClass(), "SolrSearchService.IndexReadOnlyDialog.msg"),
|
NbBundle.getMessage(this.getClass(), "SolrSearchService.IndexReadOnlyDialog.msg"),
|
||||||
|
@ -21,31 +21,30 @@ package org.sleuthkit.autopsy.keywordsearch;
|
|||||||
import com.google.common.io.CharSource;
|
import com.google.common.io.CharSource;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.Reader;
|
import java.io.Reader;
|
||||||
import java.util.ArrayList;
|
import java.sql.Connection;
|
||||||
|
import java.sql.DriverManager;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.ResultSetMetaData;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.sql.Statement;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.LinkedList;
|
import java.util.LinkedList;
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.logging.Level;
|
import java.util.logging.Level;
|
||||||
import javax.swing.text.Segment;
|
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||||
import org.sleuthkit.autopsy.tabulardatareader.AbstractReader;
|
import org.sleuthkit.autopsy.coreutils.SqliteUtil;
|
||||||
import org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException;
|
|
||||||
import org.sleuthkit.datamodel.Content;
|
import org.sleuthkit.datamodel.Content;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
|
||||||
import org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException;
|
|
||||||
import org.sleuthkit.autopsy.tabulardatareader.FileReaderFactory;
|
|
||||||
import org.sleuthkit.datamodel.AbstractFile;
|
import org.sleuthkit.datamodel.AbstractFile;
|
||||||
|
import org.sleuthkit.datamodel.TskCoreException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Dedicated SqliteTextExtractor to solve the problems associated with Tika's
|
* Dedicated SqliteTextExtractor to solve the problems associated with Tika's
|
||||||
* Sqlite parser.
|
* Sqlite parser.
|
||||||
*
|
*
|
||||||
* Tika problems:
|
* Tika problems: 1) Tika fails to open virtual tables 2) Tika fails to open
|
||||||
* 1) Tika fails to open virtual tables
|
* tables with spaces in table name 3) Tika fails to include the table names in
|
||||||
* 2) Tika fails to open tables with spaces in table name
|
* output (except for the first table it parses)
|
||||||
* 3) Tika fails to include the table names in output (except for the first table it parses)
|
|
||||||
*/
|
*/
|
||||||
class SqliteTextExtractor extends ContentTextExtractor {
|
class SqliteTextExtractor extends ContentTextExtractor {
|
||||||
|
|
||||||
@ -93,126 +92,204 @@ class SqliteTextExtractor extends ContentTextExtractor {
|
|||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public Reader getReader(Content source) throws TextExtractorException {
|
public Reader getReader(Content source) throws TextExtractorException {
|
||||||
|
try {
|
||||||
//Firewall for any content that is not an AbstractFile
|
//Firewall for any content that is not an AbstractFile
|
||||||
if (!AbstractFile.class.isInstance(source)) {
|
if (!AbstractFile.class.isInstance(source)) {
|
||||||
try {
|
|
||||||
return CharSource.wrap(EMPTY_CHARACTER_SEQUENCE).openStream();
|
return CharSource.wrap(EMPTY_CHARACTER_SEQUENCE).openStream();
|
||||||
} catch (IOException ex) {
|
|
||||||
throw new TextExtractorException(
|
|
||||||
String.format("Encountered an issue wrapping blank string" //NON-NLS
|
|
||||||
+ " with CharSource for non-abstract file with id: [%s]," //NON-NLS
|
|
||||||
+ " name: [%s].", source.getId(), source.getName()), ex); //NON-NLS
|
|
||||||
}
|
}
|
||||||
}
|
return new SQLiteTableReader((AbstractFile) source);
|
||||||
|
} catch (NoCurrentCaseException | IOException | TskCoreException
|
||||||
try (AbstractReader reader = FileReaderFactory.createReader(
|
| ClassNotFoundException | SQLException ex) {
|
||||||
(AbstractFile) source, SQLITE_MIMETYPE)) {
|
|
||||||
final CharSequence databaseContent = getDatabaseContents(source, reader);
|
|
||||||
//CharSource will maintain unicode strings correctly
|
|
||||||
return CharSource.wrap(databaseContent).openStream();
|
|
||||||
} catch (FileReaderInitException | IOException ex) {
|
|
||||||
throw new TextExtractorException(
|
throw new TextExtractorException(
|
||||||
String.format("Encountered a FileReaderInitException" //NON-NLS
|
String.format("Encountered an issue while trying to initialize " //NON-NLS
|
||||||
+ " when trying to initialize a SQLiteReader" //NON-NLS
|
+ "a sqlite table steamer for abstract file with id: [%s], name: " //NON-NLS
|
||||||
+ " for AbstractFile with id: [%s], name: [%s].", //NON-NLS
|
+ "[%s].", source.getId(), source.getName()), ex); //NON-NLS
|
||||||
source.getId(), source.getName()), ex);
|
|
||||||
} catch (FileReaderException ex) {
|
|
||||||
throw new TextExtractorException(
|
|
||||||
String.format("Could not get contents from database " //NON-NLS
|
|
||||||
+ "tables for AbstractFile with id [%s], name: [%s].", //NON-NLS
|
|
||||||
source.getId(), source.getName()), ex);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Queries the sqlite database and adds all tables and rows to a
|
* Lazily loads tables from the database during reading to conserve memory.
|
||||||
* TableBuilder, which formats the strings into a table view for clean
|
|
||||||
* results while searching for keywords in the application.
|
|
||||||
*
|
|
||||||
* @param reader Sqlite reader for the content source
|
|
||||||
* @param source Sqlite file source
|
|
||||||
*/
|
*/
|
||||||
private CharSequence getDatabaseContents(Content source, AbstractReader reader) throws FileReaderException {
|
private class SQLiteTableReader extends Reader {
|
||||||
Collection<String> databaseStorage = new LinkedList<>();
|
|
||||||
|
|
||||||
Integer charactersCopied = loadDatabaseIntoCollection(databaseStorage,
|
private final Iterator<String> tableIterator;
|
||||||
reader, source);
|
private final Connection connection;
|
||||||
|
private Reader currentTableReader;
|
||||||
|
private final AbstractFile source;
|
||||||
|
|
||||||
return toCharSequence(databaseStorage, charactersCopied);
|
/**
|
||||||
|
* Creates a reader that streams each table into memory and wraps a
|
||||||
|
* reader around it. Designed to save memory for large databases.
|
||||||
|
*
|
||||||
|
* @param file Sqlite database file
|
||||||
|
*
|
||||||
|
* @throws NoCurrentCaseException Current case has closed
|
||||||
|
* @throws IOException Exception copying abstract file over
|
||||||
|
* to local temp directory
|
||||||
|
* @throws TskCoreException Exception using file manager to find
|
||||||
|
* meta files
|
||||||
|
* @throws ClassNotFoundException Could not find sqlite JDBC class
|
||||||
|
* @throws SQLException Could not establish jdbc connection
|
||||||
|
*/
|
||||||
|
public SQLiteTableReader(AbstractFile file) throws NoCurrentCaseException,
|
||||||
|
IOException, TskCoreException, ClassNotFoundException, SQLException {
|
||||||
|
source = file;
|
||||||
|
|
||||||
|
String localDiskPath = SqliteUtil.writeAbstractFileToLocalDisk(file);
|
||||||
|
SqliteUtil.findAndCopySQLiteMetaFile(file);
|
||||||
|
Class.forName("org.sqlite.JDBC"); //NON-NLS
|
||||||
|
connection = DriverManager.getConnection("jdbc:sqlite:" + localDiskPath); //NON-NLS
|
||||||
|
tableIterator = getTables().iterator();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Iterates all of the tables and populate the TableBuilder with all of the
|
* Gets the table names from the SQLite database file.
|
||||||
* rows from the table. The table string will be added to the list of
|
|
||||||
* contents.
|
|
||||||
*
|
*
|
||||||
* @param databaseStorage Collection containing all of the database content
|
* @return Collection of table names from the database schema
|
||||||
* @param tables A map of table names to table schemas
|
|
||||||
* @param reader SqliteReader for interfacing with the database
|
|
||||||
* @param source Source database file for logging
|
|
||||||
*/
|
*/
|
||||||
private int loadDatabaseIntoCollection(Collection<String> databaseStorage,
|
private Collection<String> getTables() throws SQLException {
|
||||||
AbstractReader reader, Content source) throws FileReaderException {
|
Collection<String> tableNames = new LinkedList<>();
|
||||||
//Will throw a FileReaderException if table schemas are unattainable
|
try (Statement statement = connection.createStatement();
|
||||||
Map<String, String> tables = reader.getTableSchemas();
|
ResultSet resultSet = statement.executeQuery(
|
||||||
|
"SELECT name FROM sqlite_master "
|
||||||
int charactersCopied = 0;
|
+ " WHERE type= 'table' ")) {
|
||||||
for (String tableName : tables.keySet()) {
|
while (resultSet.next()) {
|
||||||
TableBuilder tableBuilder = new TableBuilder();
|
tableNames.add(resultSet.getString("name")); //NON-NLS
|
||||||
tableBuilder.setTableName(tableName);
|
|
||||||
|
|
||||||
try {
|
|
||||||
//Catch any exception at a particular table, we want to ensure we grab
|
|
||||||
//content from as many tables as possible
|
|
||||||
List<Map<String, Object>> rowsInTable = reader.getRowsFromTable(tableName);
|
|
||||||
if (!rowsInTable.isEmpty()) {
|
|
||||||
tableBuilder.addHeader(new ArrayList<>(rowsInTable.get(0).keySet()));
|
|
||||||
for (Map<String, Object> row : rowsInTable) {
|
|
||||||
tableBuilder.addRow(row.values());
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (FileReaderException ex) {
|
return tableNames;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reads from a database table and loads the contents into a table
|
||||||
|
* builder so that its properly formatted during indexing.
|
||||||
|
*
|
||||||
|
* @param tableName Database table to be read
|
||||||
|
*/
|
||||||
|
private String getTableAsString(String tableName) {
|
||||||
|
TableBuilder table = new TableBuilder();
|
||||||
|
table.addTableName(tableName);
|
||||||
|
String quotedTableName = "\"" + tableName + "\"";
|
||||||
|
|
||||||
|
try (Statement statement = connection.createStatement();
|
||||||
|
ResultSet resultSet = statement.executeQuery(
|
||||||
|
"SELECT * FROM " + quotedTableName)) { //NON-NLS
|
||||||
|
ResultSetMetaData metaData = resultSet.getMetaData();
|
||||||
|
int columnCount = resultSet.getMetaData().getColumnCount();
|
||||||
|
Collection<String> row = new LinkedList<>();
|
||||||
|
|
||||||
|
//Add column names once from metadata
|
||||||
|
for (int i = 1; i <= columnCount; i++) {
|
||||||
|
row.add(metaData.getColumnName(i));
|
||||||
|
}
|
||||||
|
|
||||||
|
table.addHeader(row);
|
||||||
|
while (resultSet.next()) {
|
||||||
|
row = new LinkedList<>();
|
||||||
|
for (int i = 1; i <= columnCount; i++) {
|
||||||
|
Object result = resultSet.getObject(i);
|
||||||
|
String type = metaData.getColumnTypeName(i);
|
||||||
|
if (isValuableResult(result, type)) {
|
||||||
|
row.add(resultSet.getObject(i).toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
table.addRow(row);
|
||||||
|
}
|
||||||
|
table.addCell("\n");
|
||||||
|
} catch (SQLException ex) {
|
||||||
logger.log(Level.WARNING, String.format(
|
logger.log(Level.WARNING, String.format(
|
||||||
"Error attempting to read file table: [%s]" //NON-NLS
|
"Error attempting to read file table: [%s]" //NON-NLS
|
||||||
+ " for file: [%s] (id=%d).", tableName, //NON-NLS
|
+ " for file: [%s] (id=%d).", tableName, //NON-NLS
|
||||||
source.getName(), source.getId()), ex);
|
source.getName(), source.getId()), ex);
|
||||||
}
|
}
|
||||||
|
|
||||||
String formattedTable = tableBuilder.toString();
|
return table.toString();
|
||||||
charactersCopied += formattedTable.length();
|
|
||||||
databaseStorage.add(formattedTable);
|
|
||||||
}
|
}
|
||||||
return charactersCopied;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Copy elements from collection (which contains formatted database tables)
|
* Determines if the result from the result set is worth adding to the
|
||||||
* into a CharSequence so that it can be wrapped and used by the Google CharSource
|
* row. Ignores nulls and blobs for the time being.
|
||||||
* lib.
|
|
||||||
*
|
*
|
||||||
* @param databaseStorage Collection containing database contents
|
* @param result Object result retrieved from resultSet
|
||||||
* @param characterCount Number of characters needed to be allocated in the buffer
|
* @param type Type of objet retrieved from resultSet
|
||||||
* so that all of the contents in the collection can be copied over.
|
|
||||||
*
|
*
|
||||||
* @return CharSource of the formatted database contents
|
* @return boolean where true means valuable, false implies it can be
|
||||||
|
* skipped.
|
||||||
*/
|
*/
|
||||||
private CharSequence toCharSequence(Collection<String> databaseStorage,
|
private boolean isValuableResult(Object result, String type) {
|
||||||
int characterCount) {
|
//Ignore nulls and blobs
|
||||||
|
return result != null && type.compareToIgnoreCase("blob") != 0;
|
||||||
final char[] databaseCharArray = new char[characterCount];
|
}
|
||||||
|
|
||||||
int currIndex = 0;
|
/**
|
||||||
for (String table : databaseStorage) {
|
* Loads a database file into the character buffer. The underlying
|
||||||
System.arraycopy(table.toCharArray(), 0, databaseCharArray,
|
* implementation here only loads one table at a time to conserve
|
||||||
currIndex, table.length());
|
* memory.
|
||||||
currIndex += table.length();
|
*
|
||||||
|
* @param cbuf Buffer to copy database content characters into
|
||||||
|
* @param off offset to begin loading in buffer
|
||||||
|
* @param len length of the buffer
|
||||||
|
*
|
||||||
|
* @return The number of characters read from the reader
|
||||||
|
*
|
||||||
|
* @throws IOException If there is an error with the CharSource wrapping
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public int read(char[] cbuf, int off, int len) throws IOException {
|
||||||
|
if (currentTableReader == null) {
|
||||||
|
String tableResults = getNextTable();
|
||||||
|
if (tableResults == null) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
currentTableReader = CharSource.wrap(tableResults).openStream();
|
||||||
|
}
|
||||||
|
|
||||||
|
int charactersRead = currentTableReader.read(cbuf, off, len);
|
||||||
|
while (charactersRead == -1) {
|
||||||
|
String tableResults = getNextTable();
|
||||||
|
if (tableResults == null) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
currentTableReader = CharSource.wrap(tableResults).openStream();
|
||||||
|
charactersRead = currentTableReader.read(cbuf, off, len);
|
||||||
|
}
|
||||||
|
|
||||||
|
return charactersRead;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Grab the next table name from the collection of all table names, once
|
||||||
|
* we no longer have a table to process, return null which will be
|
||||||
|
* understood to mean the end of parsing.
|
||||||
|
*
|
||||||
|
* @return Current table contents or null meaning there are not more
|
||||||
|
* tables to process
|
||||||
|
*/
|
||||||
|
private String getNextTable() {
|
||||||
|
if (tableIterator.hasNext()) {
|
||||||
|
return getTableAsString(tableIterator.next());
|
||||||
|
} else {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Close the underlying connection to the database.
|
||||||
|
*
|
||||||
|
* @throws IOException Not applicable, we can just catch the
|
||||||
|
* SQLException
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public void close() throws IOException {
|
||||||
|
try {
|
||||||
|
connection.close();
|
||||||
|
} catch (SQLException ex) {
|
||||||
|
//Non-essential exception, user has no need for the connection
|
||||||
|
//object at this stage so closing details are not important
|
||||||
|
logger.log(Level.WARNING, "Could not close JDBC connection", ex);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
//Segment class does not make an internal copy of the character array
|
|
||||||
//being passed in (more efficient). It also implements a CharSequences
|
|
||||||
//necessary for the CharSource class to create a compatible reader.
|
|
||||||
return new Segment(databaseCharArray, 0, characterCount);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -221,179 +298,61 @@ class SqliteTextExtractor extends ContentTextExtractor {
|
|||||||
*/
|
*/
|
||||||
private class TableBuilder {
|
private class TableBuilder {
|
||||||
|
|
||||||
private final List<String[]> rows = new LinkedList<>();
|
private final Integer DEFAULT_CAPACITY = 32000;
|
||||||
private Integer charactersAdded = 0;
|
private final StringBuilder table = new StringBuilder(DEFAULT_CAPACITY);
|
||||||
|
|
||||||
//Formatters
|
|
||||||
private static final String HORIZONTAL_DELIMITER = "-";
|
|
||||||
private static final String VERTICAL_DELIMITER = "|";
|
|
||||||
private static final String HEADER_CORNER = "+";
|
|
||||||
|
|
||||||
private static final String TAB = "\t";
|
private static final String TAB = "\t";
|
||||||
private static final String NEW_LINE = "\n";
|
private static final String NEW_LINE = "\n";
|
||||||
private static final String SPACE = " ";
|
private static final String SPACE = " ";
|
||||||
|
|
||||||
//Number of escape sequences in the header row
|
|
||||||
private static final int ESCAPE_SEQUENCES = 4;
|
|
||||||
|
|
||||||
private String tableName = "";
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Add the section to the top left corner of the table. This is where
|
* Add the section to the top left corner of the table. This is where
|
||||||
* the name of the table should go.
|
* the name of the table should go
|
||||||
*
|
*
|
||||||
* @param tableName Table name
|
* @param tableName Table name
|
||||||
*/
|
*/
|
||||||
public void setTableName(String tableName) {
|
public void addTableName(String tableName) {
|
||||||
this.tableName = tableName + NEW_LINE + NEW_LINE;
|
table.append(tableName)
|
||||||
|
.append(NEW_LINE)
|
||||||
|
.append(NEW_LINE);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a border given the length param.
|
* Adds a formatted header row to the underlying StringBuilder
|
||||||
*
|
|
||||||
* @return Ex: \t+----------------------+\n
|
|
||||||
*/
|
|
||||||
private String createBorder(int length) {
|
|
||||||
return TAB + HEADER_CORNER + StringUtils.repeat(
|
|
||||||
HORIZONTAL_DELIMITER, length) + HEADER_CORNER + NEW_LINE;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add header row to underlying list collection, which will be formatted
|
|
||||||
* when toString is called.
|
|
||||||
*
|
*
|
||||||
* @param vals
|
* @param vals
|
||||||
*/
|
*/
|
||||||
public void addHeader(Collection<Object> vals) {
|
public void addHeader(Collection<String> vals) {
|
||||||
addRow(vals);
|
addRow(vals);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Add a row to the underlying list collection, which will be formatted
|
* Adds a formatted row to the underlying StringBuilder
|
||||||
* when toString is called.
|
|
||||||
*
|
*
|
||||||
* @param vals
|
* @param vals
|
||||||
*/
|
*/
|
||||||
public void addRow(Collection<Object> vals) {
|
public void addRow(Collection<String> vals) {
|
||||||
List<String> rowValues = new ArrayList<>();
|
table.append(TAB);
|
||||||
vals.forEach((val) -> {
|
vals.forEach((val) -> {
|
||||||
rowValues.add(val.toString());
|
table.append(val);
|
||||||
charactersAdded += val.toString().length();
|
table.append(SPACE);
|
||||||
});
|
});
|
||||||
rows.add(rowValues.toArray(
|
table.append(NEW_LINE);
|
||||||
new String[rowValues.size()]));
|
}
|
||||||
|
|
||||||
|
public void addCell(String cell) {
|
||||||
|
table.append(cell);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the max width of a cell in each column and the max number of
|
* Returns a string version of the table, with all of the escape
|
||||||
* columns in any given row. This ensures that there are enough columns
|
* sequences necessary to print nicely in the console output.
|
||||||
* and enough space for even the longest entry.
|
|
||||||
*
|
*
|
||||||
* @return array of column widths
|
* @return Formated table contents
|
||||||
*/
|
|
||||||
private int[] getMaxWidthPerColumn() {
|
|
||||||
int maxNumberOfColumns = 0;
|
|
||||||
for (String[] row : rows) {
|
|
||||||
maxNumberOfColumns = Math.max(
|
|
||||||
maxNumberOfColumns, row.length);
|
|
||||||
}
|
|
||||||
|
|
||||||
int[] widths = new int[maxNumberOfColumns];
|
|
||||||
for (String[] row : rows) {
|
|
||||||
for (int colNum = 0; colNum < row.length; colNum++) {
|
|
||||||
widths[colNum] = Math.max(
|
|
||||||
widths[colNum],
|
|
||||||
row[colNum].length()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return widths;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns a string version of the table, with all of the formatters and
|
|
||||||
* escape sequences necessary to print nicely in the console output.
|
|
||||||
*
|
|
||||||
* @return
|
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
StringBuilder outputTable = new StringBuilder(charactersAdded);
|
return table.toString();
|
||||||
int[] colMaxWidths = getMaxWidthPerColumn();
|
|
||||||
int borderLength = 0;
|
|
||||||
|
|
||||||
Iterator<String[]> rowIterator = rows.iterator();
|
|
||||||
if (rowIterator.hasNext()) {
|
|
||||||
//Length of the header defines the table boundaries
|
|
||||||
borderLength = appendFormattedHeader(rowIterator.next(),
|
|
||||||
colMaxWidths, outputTable);
|
|
||||||
|
|
||||||
while (rowIterator.hasNext()) {
|
|
||||||
appendFormattedRow(rowIterator.next(), colMaxWidths, outputTable);
|
|
||||||
}
|
|
||||||
|
|
||||||
outputTable.insert(0, tableName);
|
|
||||||
outputTable.append(createBorder(borderLength));
|
|
||||||
outputTable.append(NEW_LINE);
|
|
||||||
}
|
|
||||||
|
|
||||||
return outputTable.toString();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Outputs a fully formatted row in the table
|
|
||||||
*
|
|
||||||
* Example: \t| John | 12345678 | john@email.com |\n
|
|
||||||
*
|
|
||||||
* @param row Array containing unformatted row content
|
|
||||||
* @param colMaxWidths An array of column maximum widths, so that
|
|
||||||
* everything is pretty printed.
|
|
||||||
* @param outputTable Buffer that formatted contents are written to
|
|
||||||
*/
|
|
||||||
private void appendFormattedRow(String[] row,
|
|
||||||
int[] colMaxWidths, StringBuilder outputTable) {
|
|
||||||
outputTable.append(TAB);
|
|
||||||
for (int colNum = 0; colNum < row.length; colNum++) {
|
|
||||||
outputTable.append(VERTICAL_DELIMITER);
|
|
||||||
outputTable.append(SPACE);
|
|
||||||
outputTable.append(StringUtils.rightPad(
|
|
||||||
StringUtils.defaultString(row[colNum]),
|
|
||||||
colMaxWidths[colNum]));
|
|
||||||
outputTable.append(SPACE);
|
|
||||||
}
|
|
||||||
outputTable.append(VERTICAL_DELIMITER);
|
|
||||||
outputTable.append(NEW_LINE);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Adds a fully formatted header to the table builder and returns the
|
|
||||||
* length of this header. The length of the header is needed to set the
|
|
||||||
* table boundaries
|
|
||||||
*
|
|
||||||
* Example: \t+----------------------+\n
|
|
||||||
* \t| Email | Phone | Name |\n
|
|
||||||
* \t+----------------------+\n
|
|
||||||
*
|
|
||||||
* @param row Array of contents in each column
|
|
||||||
* @param colMaxWidths Widths for each column in the table
|
|
||||||
* @param outputTable Output stringbuilder
|
|
||||||
*
|
|
||||||
* @return length of the formatted header, this length will be needed to
|
|
||||||
* correctly print the bottom table border.
|
|
||||||
*/
|
|
||||||
private int appendFormattedHeader(String[] row, int[] colMaxWidths, StringBuilder outputTable) {
|
|
||||||
appendFormattedRow(row, colMaxWidths, outputTable);
|
|
||||||
//Printable table dimensions are equal to the length of the header minus
|
|
||||||
//the number of escape sequences used to for formatting.
|
|
||||||
int borderLength = outputTable.length() - ESCAPE_SEQUENCES;
|
|
||||||
String border = createBorder(borderLength);
|
|
||||||
|
|
||||||
//Surround the header with borders above and below.
|
|
||||||
outputTable.insert(0, border);
|
|
||||||
outputTable.append(border);
|
|
||||||
|
|
||||||
return borderLength;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -37,7 +37,7 @@ Substring match should be used where the search term is just part of a word, or
|
|||||||
|
|
||||||
## Regex match
|
## Regex match
|
||||||
|
|
||||||
Regex match can be used to search for a specific pattern. Regular expressions are supported using Lucene Regex Syntax which is documented here: https://www.elastic.co/guide/en/elasticsearch/reference/1.6/query-dsl-regexp-query.html#regexp-syntax. Wildcards are automatically added to the beginning and end of the regular expressions to ensure all matches are found. Additionally, the resulting hits are split on common token separator boundaries (e.g. space, newline, colon, exclamation point etc.) to make the resulting keyword hit more amenable to highlighting.
|
Regex match can be used to search for a specific pattern. Regular expressions are supported using Lucene Regex Syntax which is documented here: https://www.elastic.co/guide/en/elasticsearch/reference/1.6/query-dsl-regexp-query.html#regexp-syntax. Wildcards are automatically added to the beginning and end of the regular expressions to ensure all matches are found. Additionally, the resulting hits are split on common token separator boundaries (e.g. space, newline, colon, exclamation point etc.) to make the resulting keyword hit more amenable to highlighting. As of Autopsy 4.9, regex searches are no longer case sensitive. This includes literal characters and character classes.
|
||||||
|
|
||||||
<b>Note:</b> Since Autopsy 4.4, boundary characters ('^' and '$') no longer work as word boundaries. Previously a search for "^[0-9]{5}$" would return all five
|
<b>Note:</b> Since Autopsy 4.4, boundary characters ('^' and '$') no longer work as word boundaries. Previously a search for "^[0-9]{5}$" would return all five
|
||||||
digit strings surrounded by some type of non-word characters. For example, "The number 12345 is.." would contain a match, while "123456789 people" would not. This was because the regex
|
digit strings surrounded by some type of non-word characters. For example, "The number 12345 is.." would contain a match, while "123456789 people" would not. This was because the regex
|
||||||
@ -48,8 +48,9 @@ There is some validation on the regex but it's best to test on a sample image to
|
|||||||
|
|
||||||
> In the year 1885 in an article titled Current Notes, the quick brown fox first jumped over the lazy dog.
|
> In the year 1885 in an article titled Current Notes, the quick brown fox first jumped over the lazy dog.
|
||||||
|
|
||||||
- "qu.ck", "Cu.*es" will match
|
- "fox" and "FOX" will both match since the search is case-insensitive
|
||||||
- "[Ff][Oo][Xx]" will match any version of "fox". There is no way to specify that an entire regex should be case-insensitive.
|
- "qu.ck", "cu.*es" will match
|
||||||
|
- "[JLK]umped" will match "jumped"
|
||||||
- "[0-9]{4}" will match 1885. Character classes like "\d" are not supported. Backreferences are also not supported (but will not generate an error), so "Cu(.)\1ent" would not work to find "Current"
|
- "[0-9]{4}" will match 1885. Character classes like "\d" are not supported. Backreferences are also not supported (but will not generate an error), so "Cu(.)\1ent" would not work to find "Current"
|
||||||
|
|
||||||
## Other notes
|
## Other notes
|
||||||
@ -66,17 +67,15 @@ If you want to override this default behavior:
|
|||||||
- Make a new keyword list containing the result and run it either during ingest or through the Keyword Lists button.
|
- Make a new keyword list containing the result and run it either during ingest or through the Keyword Lists button.
|
||||||
|
|
||||||
### Non-Latin text
|
### Non-Latin text
|
||||||
In general all three types of keyword searches will work as expected but the feature has not been thoroughly tested with all character sets. As with regex above, we suggest testing on a sample file. Some notes:
|
In general all three types of keyword searches will work as expected but the feature has not been thoroughly tested with all character sets. For example, the searches may no longer be case-insensitive. As with regex above, we suggest testing on a sample file.
|
||||||
- Exact match and substring match may no longer be case-insensitive
|
|
||||||
- In languages like Japanese that don't contain word breaks, every character is processed as a separate word. This tends to make substring match fail, but those searches can be run using exact match. For example, if the text contained 日本語, an exact match search on 日本 would find it (a substring search on 日本 would fail).
|
|
||||||
|
|
||||||
\section ad_hoc_kw_search Keyword Search
|
\section ad_hoc_kw_search Keyword Search
|
||||||
|
|
||||||
Individual keyword or regular expressions can quickly be searched using the search text box widget. You can select "Exact Match", "Substring Match" and "Regular Expression" match. See the earlier \ref ad_hoc_kw_types_section section for information on each keyword type. The search can be restricted to only certain data sources by selecting the checkbox near the bottom and then highlighting the data sources to search within. Multiple data sources can be selected used shift+left click or control+left click.
|
Individual keyword or regular expressions can quickly be searched using the search text box widget. You can select "Exact Match", "Substring Match" and "Regular Expression" match. See the earlier \ref ad_hoc_kw_types_section section for information on each keyword type. The search can be restricted to only certain data sources by selecting the checkbox near the bottom and then highlighting the data sources to search within. Multiple data sources can be selected used shift+left click or control+left click. The "Save search results" checkbox determines whether the search results will be saved to the case database.
|
||||||
|
|
||||||
\image html keyword-search-bar.PNG
|
\image html keyword-search-bar.PNG
|
||||||
|
|
||||||
Results will be opened in a separate Results Viewer for every search executed and they will also be saved in the Directory Tree as shown in the screenshot below.
|
Results will be opened in a separate Results Viewer for every search executed. If the "Save search results" checkbox was enabled, the results will also be saved in the Directory Tree as shown in the screenshot below.
|
||||||
|
|
||||||
\image html keyword-search-hits.PNG
|
\image html keyword-search-hits.PNG
|
||||||
|
|
||||||
@ -84,11 +83,11 @@ Results will be opened in a separate Results Viewer for every search executed an
|
|||||||
|
|
||||||
In addition to being selected during ingest, keyword lists can also be run through the Keyword Lists button. For information on setting up these keyword lists, see the \ref keywordListsTab section of the ingest module documentation.
|
In addition to being selected during ingest, keyword lists can also be run through the Keyword Lists button. For information on setting up these keyword lists, see the \ref keywordListsTab section of the ingest module documentation.
|
||||||
|
|
||||||
Lists created using the Keyword Search Configuration Dialog can be manually searched by the user by pressing on the 'Keyword Lists' button and selecting the check boxes corresponding to the lists to be searched. The search can be restricted to only certain data sources by selecting the checkbox near the bottom and then highlighting the data sources to search within. Multiple data sources can be selected used shift+left click or control+left click. Once everything has been configured, press "Search" to begin the search.
|
Lists created using the Keyword Search Configuration Dialog can be manually searched by the user by pressing on the 'Keyword Lists' button and selecting the check boxes corresponding to the lists to be searched. The search can be restricted to only certain data sources by selecting the checkbox near the bottom and then highlighting the data sources to search within. Multiple data sources can be selected used shift+left click or control+left click. Once everything has been configured, press "Search" to begin the search. The "Save search results" checkbox determines whether the search results will be saved to the case database.
|
||||||
|
|
||||||
\image html keyword-search-list.PNG
|
\image html keyword-search-list.PNG
|
||||||
|
|
||||||
The results of the keyword list search are shown in the tree, as shown below.
|
If the "Save search results" checkbox was enabled, the results of the keyword list search will be shown in the tree, as shown below.
|
||||||
|
|
||||||
\image html keyword-search-list-results.PNG
|
\image html keyword-search-list-results.PNG
|
||||||
|
|
||||||
|
@ -1,23 +1,61 @@
|
|||||||
/*! \page common_files_page Common Files Search
|
/*! \page common_properties_page Common Properties Search
|
||||||
|
|
||||||
\section common_files_overview Overview
|
\section common_properties_overview Overview
|
||||||
|
|
||||||
The common files feature allows you to search for multiple copies of the same file in different data sources within a case.
|
The Common Properties Search feature allows you to search for multiple copies of a property within the current case or within the \ref central_repo_page.
|
||||||
|
|
||||||
\section common_files_usage Usage
|
To start a search, go to Tools->Common Properties Search to bring up the main dialog. Searching requires at least one of the following to be true:
|
||||||
|
<ul>
|
||||||
|
<li> The current case has more than one data source
|
||||||
|
<li> The Central Repository contains at least two cases
|
||||||
|
</ul>
|
||||||
|
|
||||||
To start, go to Tools->Common Files Search to bring up the following dialog:
|
A message will be displayed if both of these conditions are false.
|
||||||
|
|
||||||
\image html common_files_dialog.png
|
\section common_properties_search_types Common Properties Search Scope
|
||||||
|
|
||||||
You can choose to find any files with multiple copies in the whole case, or specify that at least one of the copies has to be in the selected data source(s).
|
Different parameters are needed for setting up the two types of searches. These will be described below.
|
||||||
|
|
||||||
\image html common_files_data_source.png
|
\subsection common_properties_intra_case Scope - between data sources in the current case
|
||||||
|
|
||||||
You can also choose to restrict the search to only pictures and videos and/or documents.
|
This type of search looks for files that are in multiple data sources within the current case. It does not require the Central Repository to be enabled, and currently only searches for common files. You must run the \ref hash_db_page to compute MD5 hashes on each data source prior to performing the search. The search results will not include any files that have been marked as "known" by the hash module (ex: files that are in the NSRL).
|
||||||
|
|
||||||
Once the search is run, the matching files are displayed in the results tab. The results are grouped by how many matching files were found and then grouped by hash.
|
\image html common_properties_intra_case.png
|
||||||
|
|
||||||
\image html common_files_results.png
|
By default, the search will find matching files in any data sources. If desired, you can change the search to only show matches where one of the files is in a certain data source by selecting it from the list:
|
||||||
|
|
||||||
|
\image html common_properties_select_ds.png
|
||||||
|
|
||||||
|
You can also choose to show any type of matching files or restrict the search to pictures and videos and/or documents.
|
||||||
|
|
||||||
|
Finally, if you have the Central Repository enabled you can choose to hide matches that appear with a high frequency in the Central Repository.
|
||||||
|
|
||||||
|
\subsection common_properties_central_repo Scope - between current case and cases in the Central Repository
|
||||||
|
|
||||||
|
This type of search looks for files that contain common properties between the current case and other cases in the Central Repository. You must run the Correlation Engine ingest module on each case with the property you want to search for enabled, along with the ingest modules that produce that property type (see \ref cr_manage_properties).
|
||||||
|
|
||||||
|
\image html common_properties_cr.png
|
||||||
|
|
||||||
|
You can restrict the search to only include results where at least one of the matches was in a specific case.
|
||||||
|
|
||||||
|
\image html common_properties_cr_case_select.png
|
||||||
|
|
||||||
|
In the example above, any matching properties would have to exist in the current case and in Case 2. Note that matches in other cases will also be included in the results, as long as the property exists in the current case and selected case.
|
||||||
|
|
||||||
|
You can select the type of property to search for in the menu below:
|
||||||
|
|
||||||
|
\image html common_properties_cr_property.png
|
||||||
|
|
||||||
|
Restricting a file search to only return images or documents is currently disabled.
|
||||||
|
|
||||||
|
Finally, you can choose to hide matches that appear with a high frequency in the Central Repository.
|
||||||
|
|
||||||
|
\section common_properties_results Search Results
|
||||||
|
|
||||||
|
Each search displays its results in a new tab. The title of the tab will include the search parameters.
|
||||||
|
|
||||||
|
\image html common_properties_result.png
|
||||||
|
|
||||||
|
The top tree level of the results shows the number of matching files. The results are grouped by how many matching files were found and then grouped by the property itself.
|
||||||
|
|
||||||
*/
|
*/
|
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
The Content Viewer lives in the lower right-hand side of the Autopsy main screen and shows pictures, video, hex, text, extracted strings, metadata, etc. The Content Viewer is enabled when you select an entry in the \ref ui_results.
|
The Content Viewer lives in the lower right-hand side of the Autopsy main screen and shows pictures, video, hex, text, extracted strings, metadata, etc. The Content Viewer is enabled when you select an entry in the \ref ui_results.
|
||||||
|
|
||||||
The Content Viewer is context-aware, meaning different tabs will be enabled depending on the type of content selected and which ingest modules have been run. It will default to what it considers the "most specific" tab. For example, selecting a JPG will cause the Content Viewer to automatically select the "Application" tab and will display the image there. If you instead would like the Content Viewer to stay on the previously selected tab when you change to a different content object, go to Tools->Options->Application Tab and select the "Stay on the same file viewer" option.
|
The Content Viewer is context-aware, meaning different tabs will be enabled depending on the type of content selected and which ingest modules have been run. It will default to what it considers the "most specific" tab. For example, selecting a JPG will cause the Content Viewer to automatically select the "Application" tab and will display the image there. If you instead would like the Content Viewer to stay on the previously selected tab when you change to a different content object, go to the \ref view_options_page panel through Tools->Options->Application Tab and select the "Stay on the same file viewer" option.
|
||||||
|
|
||||||
\image html content_viewer_options_panel.png
|
\image html content_viewer_options_panel.png
|
||||||
|
|
||||||
@ -62,6 +62,12 @@ The Results tab is active when selecting entries that are part of the Results tr
|
|||||||
<br>
|
<br>
|
||||||
\image html content_viewer_results_bookmark.png
|
\image html content_viewer_results_bookmark.png
|
||||||
|
|
||||||
|
\section cv_annotations Annotations
|
||||||
|
|
||||||
|
The Annotations tab shows information added by an analyst about a file or result. It displays any tags and comments associated with the file or result, and if the \ref central_repo_page is enabled it will also display any comments saved to the Central Repository.
|
||||||
|
|
||||||
|
\image html content_viewer_annotations.png
|
||||||
|
|
||||||
\section cv_other_occurrences Other Occurrences
|
\section cv_other_occurrences Other Occurrences
|
||||||
|
|
||||||
The Other Occurrences tab shows other instances of this file or result. Enabling the \ref central_repo_page adds additional functionality to this tab. See the \ref cr_content_viewer section for more information.
|
The Other Occurrences tab shows other instances of this file or result. Enabling the \ref central_repo_page adds additional functionality to this tab. See the \ref cr_content_viewer section for more information.
|
||||||
|
@ -41,6 +41,8 @@ To import an existing hash set, use the "Import Database" button on the Hash Set
|
|||||||
|
|
||||||
<b>Send ingest inbox message for each hit</b> - Determines whether a message is sent for each matching file. This can not be enabled for a "known" hash set.
|
<b>Send ingest inbox message for each hit</b> - Determines whether a message is sent for each matching file. This can not be enabled for a "known" hash set.
|
||||||
|
|
||||||
|
<b>Copy hash set into user configuration folder</b> - Makes a copy of the hash set instead of using the existing one. This is intended to be used with a \ref live_triage_page drive.
|
||||||
|
|
||||||
\subsection hashset_indexing Indexing
|
\subsection hashset_indexing Indexing
|
||||||
|
|
||||||
After importing the hash set, you may have to index it before it can be used. For most hash set types, Autopsy needs an index of the hash set to actually use a hash set. It can create the index if you import only the hash set. Any hash sets that require an index will be displayed in red, and their "Index Status" will indicate that an index needs to be created. This is done simply by using the Index button.
|
After importing the hash set, you may have to index it before it can be used. For most hash set types, Autopsy needs an index of the hash set to actually use a hash set. It can create the index if you import only the hash set. Any hash sets that require an index will be displayed in red, and their "Index Status" will indicate that an index needs to be created. This is done simply by using the Index button.
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
/*! \page image_gallery_page Image Gallery Module
|
/*! \page image_gallery_page Image Gallery Module
|
||||||
Overview
|
Overview
|
||||||
========
|
========
|
||||||
This document outlines the use of the new Image Gallery feature of Autopsy. This feature was funded by DHS S&T to help provide free and open source digital forensics tools to law enforcement.
|
This document outlines the use of the Image Gallery feature of Autopsy. This feature was funded by DHS S&T to help provide free and open source digital forensics tools to law enforcement.
|
||||||
|
|
||||||
The new image gallery feature has been designed specifically with child-exploitation cases in mind, but can be used for a variety of other investigation types that involve images and videos. It offers the following features beyond the traditional long list of thumbnails that Autopsy and other tools currently provide.
|
The Image Gallery feature has been designed specifically with child-exploitation cases in mind, but can be used for a variety of other investigation types that involve images and videos. It offers the following features beyond the traditional long list of thumbnails that Autopsy and other tools currently provide.
|
||||||
- Groups images by folder (and other attributes) to help examiner break the large set of images into smaller groups and to help focus on areas with images of interest.
|
- Groups images by folder (and other attributes) to help examiner break the large set of images into smaller groups and to help focus on areas with images of interest.
|
||||||
- Allows examiner to start viewing images immediately upon adding them to the case. As images are hashed, they are updated in the interface. You do not need to wait until the entire image is ingested.
|
- Allows examiner to start viewing images immediately upon adding them to the case. As images are hashed, they are updated in the interface. You do not need to wait until the entire image is ingested.
|
||||||
|
|
||||||
@ -99,18 +99,22 @@ The section in the top left with tabs labelled “Contents” and “Hash Hits
|
|||||||
Each group shows the number of files that hit against configured Hash DBs during ingest (hash hits) and the total number of image/video files as a ratio (hash hits / total) after its name. By selecting groups in the tree/list you can navigate directly to them in the main display area. If the Hash Hits tab is selected only groups containing files that have hash hits are shown.
|
Each group shows the number of files that hit against configured Hash DBs during ingest (hash hits) and the total number of image/video files as a ratio (hash hits / total) after its name. By selecting groups in the tree/list you can navigate directly to them in the main display area. If the Hash Hits tab is selected only groups containing files that have hash hits are shown.
|
||||||
|
|
||||||
|
|
||||||
|
Listening for Changes
|
||||||
|
======================
|
||||||
|
The Image Gallery maintains its own database, which needs to be updated as files are analyzed by Autopsy. For example, it needs to know when a file has been hashed or had EXIF data extracted. By default, the Image Gallery is always listening in single-user cases for these changes and keeps its database up to date. If this is causing a performance impact, you can disable this feature in the Options panel.
|
||||||
|
|
||||||
|
You can turn the listening off for the current case and you can change the default behavior for future cases.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Multi-User Cases
|
||||||
|
=================
|
||||||
|
If a case was created in a multi-user environment, then it becomes much harder to keep the Image Gallery database in sync because many other examiners could be analyzing data from that case. Therefore, Image Gallery has different update behaviors in a multi-user case than it does for a single-user case. Notably:
|
||||||
|
- If your system is running ingest on the data source, then you will continue to get real-time updates just like in a single-user case. So, as soon as a folder of files has been hashed and had EXIF data extracted, it will be possible for you to view it.
|
||||||
|
- If another system in the cluster is running ingest on a data source, you may not see its results until the ingest has completed. You will not get real-time updates and instead you will get updates only after you have closed Image Gallery and opened it again.
|
||||||
|
- Each time you open Image Gallery, it will check the local database to see if it is in sync with the case database. If it is not, it will ask you to rebuild it. This is because additional data may have been added to the case database by another system and your Image Gallery database is no longer accurate.
|
||||||
|
|
||||||
|
You also have the option to see groups (or folders) that are new to you or new to everyone. When you press “Show Next Unseen Group”, the default behavior is to show you the highest priority group that you have not seen yet. But, you can also choose to see groups that no one else has seen. This choice can be made using the check box next to the “Show Next Unseen Group” button.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Before Width: | Height: | Size: 5.7 KiB |
Before Width: | Height: | Size: 19 KiB |
Before Width: | Height: | Size: 52 KiB |
BIN
docs/doxygen-user/images/common_properties_cr.png
Normal file
After Width: | Height: | Size: 27 KiB |
BIN
docs/doxygen-user/images/common_properties_cr_case_select.png
Normal file
After Width: | Height: | Size: 5.3 KiB |
BIN
docs/doxygen-user/images/common_properties_cr_property.png
Normal file
After Width: | Height: | Size: 5.0 KiB |
BIN
docs/doxygen-user/images/common_properties_intra_case.png
Normal file
After Width: | Height: | Size: 19 KiB |
BIN
docs/doxygen-user/images/common_properties_result.png
Normal file
After Width: | Height: | Size: 29 KiB |
BIN
docs/doxygen-user/images/common_properties_select_ds.png
Normal file
After Width: | Height: | Size: 8.6 KiB |
BIN
docs/doxygen-user/images/content_viewer_annotations.png
Normal file
After Width: | Height: | Size: 16 KiB |
Before Width: | Height: | Size: 23 KiB After Width: | Height: | Size: 25 KiB |
Before Width: | Height: | Size: 5.5 KiB After Width: | Height: | Size: 6.0 KiB |
Before Width: | Height: | Size: 34 KiB After Width: | Height: | Size: 33 KiB |
Before Width: | Height: | Size: 8.4 KiB After Width: | Height: | Size: 8.6 KiB |
Before Width: | Height: | Size: 17 KiB After Width: | Height: | Size: 18 KiB |
BIN
docs/doxygen-user/images/live_triage_import_hash.png
Normal file
After Width: | Height: | Size: 25 KiB |
Before Width: | Height: | Size: 60 KiB After Width: | Height: | Size: 62 KiB |
BIN
docs/doxygen-user/images/tagging_user_name.png
Normal file
After Width: | Height: | Size: 7.5 KiB |
BIN
docs/doxygen-user/images/tagging_view_options.png
Normal file
After Width: | Height: | Size: 22 KiB |
Before Width: | Height: | Size: 431 KiB After Width: | Height: | Size: 431 KiB |
Before Width: | Height: | Size: 34 KiB After Width: | Height: | Size: 33 KiB |
BIN
docs/doxygen-user/images/view_options_gear.png
Normal file
After Width: | Height: | Size: 22 KiB |
BIN
docs/doxygen-user/images/view_options_gmt.png
Normal file
After Width: | Height: | Size: 12 KiB |
BIN
docs/doxygen-user/images/view_options_hide_slack.png
Normal file
After Width: | Height: | Size: 18 KiB |
BIN
docs/doxygen-user/images/view_options_local_time.png
Normal file
After Width: | Height: | Size: 12 KiB |
BIN
docs/doxygen-user/images/view_options_options_panel.png
Normal file
After Width: | Height: | Size: 48 KiB |
BIN
docs/doxygen-user/images/view_options_reject_account.png
Normal file
After Width: | Height: | Size: 48 KiB |
BIN
docs/doxygen-user/images/view_options_sco.png
Normal file
After Width: | Height: | Size: 18 KiB |
BIN
docs/doxygen-user/images/view_options_show_slack.png
Normal file
After Width: | Height: | Size: 20 KiB |
@ -79,13 +79,14 @@ The following steps will configure Solr to run using an account that will have a
|
|||||||
<br><br>
|
<br><br>
|
||||||
5. From an Autopsy installation, copy the folder <i>"C:\Program Files\Autopsy-XXX(current version)\autopsy\solr\solr\configsets"</i> to <i>"C:\Bitnami\solr-4.10.3-0\apache-solr\solr"</i>.
|
5. From an Autopsy installation, copy the folder <i>"C:\Program Files\Autopsy-XXX(current version)\autopsy\solr\solr\configsets"</i> to <i>"C:\Bitnami\solr-4.10.3-0\apache-solr\solr"</i>.
|
||||||
6. From an Autopsy installation, copy the folder <i>"C:\Program Files\Autopsy-XXX(current version)\autopsy\solr\solr\lib"</i> to <i>"C:\Bitnami\solr-4.10.3-0\apache-solr\solr"</i>.
|
6. From an Autopsy installation, copy the folder <i>"C:\Program Files\Autopsy-XXX(current version)\autopsy\solr\solr\lib"</i> to <i>"C:\Bitnami\solr-4.10.3-0\apache-solr\solr"</i>.
|
||||||
7. Stop the <i>solrJetty</i> service by pressing <i>Start</i>, typing <i>services.msc</i>, pressing Enter, and locating the <i>solrJetty</i> Windows service. Select the service and press <i>Stop the service</i>. If the service is already stopped and there is no <i>Stop the service</i> available, this is okay.
|
7. From an Autopsy installation, copy the file <i>"C:\Program Files\Autopsy-XXX(current version)\autopsy\solr\solr\zoo.cfg"</i> to <i>"C:\Bitnami\solr-4.10.3-0\apache-solr\solr"</i>.
|
||||||
8. Start a Windows command prompt as administrator by pressing Start, typing <i>command</i>, right clicking on <i>Command Prompt</i>, and clicking on <i>Run as administrator</i>. Then run the following command to uninstall the solrJetty service:
|
8. Stop the <i>solrJetty</i> service by pressing <i>Start</i>, typing <i>services.msc</i>, pressing Enter, and locating the <i>solrJetty</i> Windows service. Select the service and press <i>Stop the service</i>. If the service is already stopped and there is no <i>Stop the service</i> available, this is okay.
|
||||||
|
9. Start a Windows command prompt as administrator by pressing Start, typing <i>command</i>, right clicking on <i>Command Prompt</i>, and clicking on <i>Run as administrator</i>. Then run the following command to uninstall the solrJetty service:
|
||||||
|
|
||||||
cmd /c C:\Bitnami\solr-4.10.3-0\apache-solr\scripts\serviceinstall.bat UNINSTALL
|
cmd /c C:\Bitnami\solr-4.10.3-0\apache-solr\scripts\serviceinstall.bat UNINSTALL
|
||||||
|
|
||||||
You will very likely see a result that says "The solrJetty service is not started." This is okay.
|
You will very likely see a result that says "The solrJetty service is not started." This is okay.
|
||||||
9. Start a Windows command prompt as administrator by pressing Start, typing <i>command</i>, right clicking on <i>Command Prompt</i>, and clicking on <i>Run as administrator</i>. Then run the following command to install the solrJetty service:
|
10. Start a Windows command prompt as administrator by pressing Start, typing <i>command</i>, right clicking on <i>Command Prompt</i>, and clicking on <i>Run as administrator</i>. Then run the following command to install the solrJetty service:
|
||||||
|
|
||||||
cmd /c C:\Bitnami\solr-4.10.3-0\apache-solr\scripts\serviceinstall.bat INSTALL
|
cmd /c C:\Bitnami\solr-4.10.3-0\apache-solr\scripts\serviceinstall.bat INSTALL
|
||||||
<br> Note the argument "INSTALL" is case sensitive. Your command prompt should look like the screenshot below. Very likely your command prompt will say "The solrJetty service could not be started." This is okay.
|
<br> Note the argument "INSTALL" is case sensitive. Your command prompt should look like the screenshot below. Very likely your command prompt will say "The solrJetty service could not be started." This is okay.
|
||||||
|
@ -30,4 +30,17 @@ Then choose the Local Disk data source and select the desired drive.
|
|||||||
|
|
||||||
See the \ref ds_local page for more information on local disk data sources.
|
See the \ref ds_local page for more information on local disk data sources.
|
||||||
|
|
||||||
|
\section live_triage_hash_db Using hash sets
|
||||||
|
|
||||||
|
Follow these steps to import a hash set to use with the \ref hash_db_page :
|
||||||
|
<ol>
|
||||||
|
<li> Run Autopsy from the live triage drive, as described earlier
|
||||||
|
<li> Go to Tools->Options and then the "Hash Set" tab
|
||||||
|
<li> Import the hash set as normal (using a "Local" destination) but check the "Copy hash set into user configuration folder" option at the bottom
|
||||||
|
|
||||||
|
\image html live_triage_import_hash.png
|
||||||
|
</ol>
|
||||||
|
|
||||||
|
This will allow the hash set to be used regardless of the drive letter assigned to the live triage drive.
|
||||||
|
|
||||||
*/
|
*/
|
@ -47,7 +47,7 @@ The following topics are available here:
|
|||||||
- \subpage stix_page
|
- \subpage stix_page
|
||||||
- \subpage central_repo_page
|
- \subpage central_repo_page
|
||||||
- \subpage communications_page
|
- \subpage communications_page
|
||||||
- \subpage common_files_page
|
- \subpage common_properties_page
|
||||||
- \subpage logs_and_output_page
|
- \subpage logs_and_output_page
|
||||||
- Reporting
|
- Reporting
|
||||||
- \subpage tagging_page
|
- \subpage tagging_page
|
||||||
|
@ -6,6 +6,23 @@ The Result Viewer is located on the top right of the Autopsy screen. It shows li
|
|||||||
\image html result-viewer-example-1.PNG
|
\image html result-viewer-example-1.PNG
|
||||||
<br>
|
<br>
|
||||||
|
|
||||||
|
By default, the first three columns after the file name in the results viewer are named "S", "C" and "O".
|
||||||
|
|
||||||
|
\image html view_options_sco.png
|
||||||
|
|
||||||
|
These columns display the following:
|
||||||
|
<ul>
|
||||||
|
<li> (S)core column - indicates whether the item is interesting or notable
|
||||||
|
<ul>
|
||||||
|
<li>Displays a red icon if the file is a match for a notable hash set or has been tagged with a notable tag
|
||||||
|
<li>Displays a yellow icon if the file has an interesting item match or has been tagged with a non-notable tag
|
||||||
|
</ul>
|
||||||
|
<li> (C)omment column - indicates whether the item has a comment in the Central Repository or has a comment associated with a tag
|
||||||
|
<li> (O)ther occurrences column - indicates how many data sources in the Central Repository contain this item. The count will include the selected item.
|
||||||
|
</ul>
|
||||||
|
|
||||||
|
To display more information about why an icon has appeared, you can hover over it. The Comment and Other occurrences columns query the Central Repository. If this seems to be having a performance impact, it can be disabled through the \ref view_options_page. This will remove the Other occurrences column entirely and the Comment column will be based only on tags.
|
||||||
|
|
||||||
You can also switch it to Thumbnail view to see thumbnails of the content in the selected folder.
|
You can also switch it to Thumbnail view to see thumbnails of the content in the selected folder.
|
||||||
|
|
||||||
<br>
|
<br>
|
||||||
|
@ -1,20 +0,0 @@
|
|||||||
/*! \page result_viewer_page Result Viewer
|
|
||||||
|
|
||||||
The Result Viewer is located on the top right of the Autopsy screen. It shows lists of files and their corresponding attributes such as time, path, size, checksum, etc.
|
|
||||||
|
|
||||||
<br>
|
|
||||||
\image html result-viewer-example-1.PNG
|
|
||||||
<br>
|
|
||||||
|
|
||||||
You can also switch it to Thumbnail view to see thumbnails of the content in the selected folder.
|
|
||||||
|
|
||||||
<br>
|
|
||||||
\image html result-viewer-example-2.PNG
|
|
||||||
<br>
|
|
||||||
|
|
||||||
The Result Viewer is context-aware, meaning it will show applicable columns for the data type in selected.
|
|
||||||
<br>
|
|
||||||
\image html result-viewer-example-3.PNG
|
|
||||||
<br>
|
|
||||||
|
|
||||||
*/
|
|
@ -2,6 +2,8 @@
|
|||||||
|
|
||||||
Tagging (or Bookmarking) allows you to create a reference to a file or object and easily find it later. Tagging is also used by the \ref central_repo_page "central repository" to mark items as notable.
|
Tagging (or Bookmarking) allows you to create a reference to a file or object and easily find it later. Tagging is also used by the \ref central_repo_page "central repository" to mark items as notable.
|
||||||
|
|
||||||
|
\section tagging_items Tagging items
|
||||||
|
|
||||||
When an interesting item is discovered, the user can tag it by right-clicking the item and selecting one of the tag options.
|
When an interesting item is discovered, the user can tag it by right-clicking the item and selecting one of the tag options.
|
||||||
|
|
||||||
When you tag a Blackboard artifact result, you have the choice to either:
|
When you tag a Blackboard artifact result, you have the choice to either:
|
||||||
@ -22,7 +24,6 @@ At this point there are three options:
|
|||||||
|
|
||||||
\image html tagging_new_tag.png
|
\image html tagging_new_tag.png
|
||||||
|
|
||||||
<br>
|
|
||||||
There are several default tag names:
|
There are several default tag names:
|
||||||
- Bookmark - Default tag for marking files of interest
|
- Bookmark - Default tag for marking files of interest
|
||||||
- CAT-1 through CAT-5 - For law enforcement use
|
- CAT-1 through CAT-5 - For law enforcement use
|
||||||
@ -38,21 +39,36 @@ Items may have more than one tag.
|
|||||||
|
|
||||||
Tagged results are shown in the "Results" portion of the tree under "Tags". Tagged items are also highlighted in the Results Viewer.
|
Tagged results are shown in the "Results" portion of the tree under "Tags". Tagged items are also highlighted in the Results Viewer.
|
||||||
|
|
||||||
<br>
|
|
||||||
\image html tagging-4.PNG
|
\image html tagging-4.PNG
|
||||||
<br>
|
|
||||||
|
\section managing_tags Managing tags
|
||||||
|
|
||||||
The list of tags can be edited through the Tags tab on the Options menu.
|
The list of tags can be edited through the Tags tab on the Options menu.
|
||||||
<br>
|
|
||||||
\image html tagging-5.PNG
|
\image html tagging-5.PNG
|
||||||
<br>
|
|
||||||
|
|
||||||
From here, new tags can be added, existing tags can be edited, and user-created tags can be deleted. Note that deleting a tag does not remove it from any tagged items, and that tag will still be usable in any case where it has been used to tag an item.
|
From here, new tags can be added, existing tags can be edited, and user-created tags can be deleted. Note that deleting a tag does not remove it from any tagged items, and that tag will still be usable in any case where it has been used to tag an item.
|
||||||
<br>
|
|
||||||
\image html tagging-6.PNG
|
\image html tagging-6.PNG
|
||||||
<br>
|
|
||||||
|
|
||||||
If using the central repository, changing the notable status will effect tagged items in the current case only in the following way:
|
If using the central repository, changing the notable status will effect tagged items in the current case only in the following way:
|
||||||
- If "File A" is tagged with "Tag A", which is not notable, and then "Tag A" is switched to notable, "File A" will be marked as notable in the central repository
|
- If "File A" is tagged with "Tag A", which is not notable, and then "Tag A" is switched to notable, "File A" will be marked as notable in the central repository
|
||||||
- If "File B" is tagged with "Tag B", which is notable, and then "Tag B" is switched to non-notable, if there are no other notable tags on "File B" then its notable status in the central repository will be removed.
|
- If "File B" is tagged with "Tag B", which is notable, and then "Tag B" is switched to non-notable, if there are no other notable tags on "File B" then its notable status in the central repository will be removed.
|
||||||
|
|
||||||
|
\section user_tags Hiding tags from other users
|
||||||
|
|
||||||
|
Tags are associated with the account name of the user that tagged them. This information is visible through selecting items under the "Tags" section of the directory tree:
|
||||||
|
|
||||||
|
\image html tagging_user_name.png
|
||||||
|
|
||||||
|
or through the \ref cv_annotations content viewer:
|
||||||
|
|
||||||
|
\image html content_viewer_annotations.png
|
||||||
|
|
||||||
|
It is possible to hide all tagged files and results in the "Tags" area of the tree that were tagged by other users. Open the \ref view_options_page menu either through the gear icon above the directory tree or through Tools->Options, and then select the checkbox to hide other users' tags in the tags area of the tree.
|
||||||
|
|
||||||
|
\image html tagging_view_options.png
|
||||||
|
|
||||||
*/
|
*/
|
||||||
|
@ -11,6 +11,8 @@ The major areas in the Autopsy User Interface (UI) are:
|
|||||||
- \ref ui_keyword, shown outlined in yellow below
|
- \ref ui_keyword, shown outlined in yellow below
|
||||||
- \ref ui_status, shown in solid purple below
|
- \ref ui_status, shown in solid purple below
|
||||||
|
|
||||||
|
You can customize how data is shown in the UI through the \ref view_options_page panel.
|
||||||
|
|
||||||
\image html ui-layout-1.PNG
|
\image html ui-layout-1.PNG
|
||||||
|
|
||||||
<br>
|
<br>
|
||||||
@ -28,7 +30,7 @@ The tree on the left-hand side is where you can browse the files in the image an
|
|||||||
- <b>Tags:</b> Where files and results that have been \ref tagging_page "tagged" are shown
|
- <b>Tags:</b> Where files and results that have been \ref tagging_page "tagged" are shown
|
||||||
- <b>Reports:</b> References to reports that you have generated or that ingest modules have created show up here
|
- <b>Reports:</b> References to reports that you have generated or that ingest modules have created show up here
|
||||||
|
|
||||||
You can also use the "Group by Data Source" option at the upper left of the tree display to move the views, results, and tags subtrees under their corresponding data sources. This can be helpful on very large cases to reduce the size of each node.
|
You can also use the "Group by data source" option available through the \ref view_options_page to move the views, results, and tags subtrees under their corresponding data sources. This can be helpful on very large cases to reduce the size of each node.
|
||||||
|
|
||||||
\image html ui_layout_group_tree.PNG
|
\image html ui_layout_group_tree.PNG
|
||||||
|
|
||||||
|
76
docs/doxygen-user/view_options.dox
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
/*! \page view_options_page View Options
|
||||||
|
|
||||||
|
The View options allow you to configure how data is displayed in the Autopsy user interface.
|
||||||
|
|
||||||
|
There are two ways to access the options. The first way is through the gear icon above the directory tree:
|
||||||
|
|
||||||
|
\image html view_options_gear.png
|
||||||
|
|
||||||
|
The second way is through Tools->Options and then selecting the "Views" tab:
|
||||||
|
|
||||||
|
\image html view_options_options_panel.png
|
||||||
|
|
||||||
|
|
||||||
|
\section view_options_global Global Settings
|
||||||
|
|
||||||
|
The settings in this section persist through closing the application.
|
||||||
|
|
||||||
|
\subsection view_options_hide_known Hide known files
|
||||||
|
|
||||||
|
This option allows you to hide files marked as "known" by the hash_db_page. The option to hide known files in the data sources area will prevent these files from being displayed in the results view. Similarly, the option to hide slack in the views area will prevent slack files from appearing under the Views section of the tree.
|
||||||
|
|
||||||
|
\subsection view_options_hide_slack Hide slack files
|
||||||
|
|
||||||
|
Autopsy creates slack files (with the "-slack" extension) from any extra space at the end of a file. These files can be displayed or hidden from the data sources area and/or the views area. The following shows a slack file in the results view:
|
||||||
|
|
||||||
|
\image html view_options_show_slack.png
|
||||||
|
|
||||||
|
Checking the option to hide slack in the data sources area will prevent the slack file from being displayed:
|
||||||
|
|
||||||
|
\image html view_options_hide_slack.png
|
||||||
|
|
||||||
|
Similarly, the option to hide slack in the views area will prevent slack files from appearing under the Views section of the tree.
|
||||||
|
|
||||||
|
\subsection view_options_hide_tags Hide tags from other users
|
||||||
|
|
||||||
|
This option allows you to hide tags from other users in the Tagging section of the tree. See \ref user_tags for more details.
|
||||||
|
|
||||||
|
\subsection view_options_cr_columns Do not use the Central Repository to populate columns
|
||||||
|
|
||||||
|
By default, the first three columns in the result viewer after the file name in the results viewer are named "S", "C" and "O". These are described in more detail on the \ref result_viewer_page page. The Comment and Other occurrences columns query the Central Repository. If this seems to be having a performance impact, it can be disabled using the checkbox. This will remove the Other occurrences column entirely and the Comment column will be based only on tags.
|
||||||
|
|
||||||
|
\subsection view_options_content_viewer Content viewer selection
|
||||||
|
|
||||||
|
By default, the \ref content_viewer_page attempts to select the most relevant tab to display when choosing a node. If you would like to change this behavior to instead stay on the same content viewer when changing nodes, switch to the "Stay on the same file viewer" option.
|
||||||
|
|
||||||
|
\subsection view_options_time Time format
|
||||||
|
|
||||||
|
Timestamps can be viewed in either local time or GMT.
|
||||||
|
|
||||||
|
\image html view_options_local_time.png
|
||||||
|
<br>
|
||||||
|
\image html view_options_gmt.png
|
||||||
|
|
||||||
|
\section view_options_case Current Case Settings
|
||||||
|
|
||||||
|
The settings in this section only apply to the current case.
|
||||||
|
|
||||||
|
\subsection view_options_group Group by data source
|
||||||
|
|
||||||
|
The "Group by data source" option allows you to separate all elements in the \ref ui_tree by data source. This can help nodes load faster on large cases.
|
||||||
|
|
||||||
|
\image html ui_layout_group_tree.PNG
|
||||||
|
|
||||||
|
\section view_options_session Current Session Settings
|
||||||
|
|
||||||
|
The settings for the current session will be in effect until you close the application.
|
||||||
|
|
||||||
|
\subsection view_options_rejected Hide rejected results
|
||||||
|
|
||||||
|
Accounts can be approved or rejected by the user, as shown in the screenshot below.
|
||||||
|
|
||||||
|
\image html view_options_reject_account.png
|
||||||
|
|
||||||
|
Rejected accounts will not be included in the report, and by default will be hidden in the UI. If you accidentally reject an account and need to change its status, or simply want to view the the rejected accounts, you can uncheck the "hide rejected results" option.
|
||||||
|
|
||||||
|
*/
|