diff --git a/Core/nbproject/project.xml b/Core/nbproject/project.xml index 7a89519e34..04732e7873 100644 --- a/Core/nbproject/project.xml +++ b/Core/nbproject/project.xml @@ -338,7 +338,7 @@ org.sleuthkit.autopsy.modules.vmextractor org.sleuthkit.autopsy.progress org.sleuthkit.autopsy.report - org.sleuthkit.autopsy.tabulardatareader + org.sleuthkit.autopsy.texttranslation org.sleuthkit.datamodel diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.java b/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.java index c6301bfb89..191d1c3bc6 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.java @@ -127,7 +127,7 @@ public class DataContentViewerOtherCases extends JPanel implements DataContentVi } } else if (jmi.equals(showCommonalityMenuItem)) { showCommonalityDetails(); - } + } } }; @@ -419,7 +419,7 @@ public class DataContentViewerOtherCases extends JPanel implements DataContentVi } // we can correlate based on the MD5 if it is enabled - if (this.file != null && EamDb.isEnabled()) { + if (this.file != null && EamDb.isEnabled() && this.file.getSize() > 0) { try { List artifactTypes = EamDb.getInstance().getDefinedCorrelationTypes(); @@ -430,13 +430,14 @@ public class DataContentViewerOtherCases extends JPanel implements DataContentVi CorrelationCase corCase = EamDb.getInstance().getCase(Case.getCurrentCase()); try { ret.add(new CorrelationAttributeInstance( - md5, aType, + md5, corCase, CorrelationDataSource.fromTSKDataSource(corCase, file.getDataSource()), file.getParentPath() + file.getName(), "", - file.getKnown())); + file.getKnown(), + file.getId())); } catch (CorrelationAttributeNormalizationException ex) { LOGGER.log(Level.INFO, String.format("Unable to check create CorrelationAttribtueInstance for value %s and type %s.", md5, aType.toString()), ex); } @@ -447,27 +448,23 @@ public class DataContentViewerOtherCases extends JPanel implements DataContentVi } catch (EamDbException | TskCoreException ex) { LOGGER.log(Level.SEVERE, "Error connecting to DB", ex); // NON-NLS } - - } else { - - // If EamDb not enabled, get the Files default correlation type to allow Other Occurances to be enabled. - if (this.file != null) { - String md5 = this.file.getMd5Hash(); - if (md5 != null && !md5.isEmpty()) { - try { - final CorrelationAttributeInstance.Type fileAttributeType - = CorrelationAttributeInstance.getDefaultCorrelationTypes() - .stream() - .filter(attrType -> attrType.getId() == CorrelationAttributeInstance.FILES_TYPE_ID) - .findAny() - .get(); - - ret.add(new CorrelationAttributeInstance(fileAttributeType, md5)); - } catch (EamDbException ex) { - LOGGER.log(Level.SEVERE, "Error connecting to DB", ex); // NON-NLS - } catch (CorrelationAttributeNormalizationException ex) { - LOGGER.log(Level.INFO, String.format("Unable to create CorrelationAttributeInstance for value %s", md5), ex); // NON-NLS - } + // If EamDb not enabled, get the Files default correlation type to allow Other Occurances to be enabled. + } else if (this.file != null && this.file.getSize() > 0) { + String md5 = this.file.getMd5Hash(); + if (md5 != null && !md5.isEmpty()) { + try { + final CorrelationAttributeInstance.Type fileAttributeType + = CorrelationAttributeInstance.getDefaultCorrelationTypes() + .stream() + .filter(attrType -> attrType.getId() == CorrelationAttributeInstance.FILES_TYPE_ID) + .findAny() + .get(); + //The Central Repository is not enabled + ret.add(new CorrelationAttributeInstance(fileAttributeType, md5, null, null, "", "", TskData.FileKnown.UNKNOWN, this.file.getId())); + } catch (EamDbException ex) { + LOGGER.log(Level.SEVERE, "Error connecting to DB", ex); // NON-NLS + } catch (CorrelationAttributeNormalizationException ex) { + LOGGER.log(Level.INFO, String.format("Unable to create CorrelationAttributeInstance for value %s", md5), ex); // NON-NLS } } } @@ -515,9 +512,9 @@ public class DataContentViewerOtherCases extends JPanel implements DataContentVi * artifact. If the central repo is not enabled, this will only return files * from the current case with matching MD5 hashes. * - * @param corAttr CorrelationAttribute to query for + * @param corAttr CorrelationAttribute to query for * @param dataSourceName Data source to filter results - * @param deviceId Device Id to filter results + * @param deviceId Device Id to filter results * * @return A collection of correlated artifact instances */ @@ -580,7 +577,7 @@ public class DataContentViewerOtherCases extends JPanel implements DataContentVi * Get all other abstract files in the current case with the same MD5 as the * selected node. * - * @param corAttr The CorrelationAttribute containing the MD5 to search for + * @param corAttr The CorrelationAttribute containing the MD5 to search for * @param openCase The current case * * @return List of matching AbstractFile objects @@ -657,11 +654,9 @@ public class DataContentViewerOtherCases extends JPanel implements DataContentVi // - The central repo is disabled and the backing file has a valid MD5 hash this.file = this.getAbstractFileFromNode(node); if (EamDb.isEnabled()) { - return this.file != null - && this.file.getSize() > 0 - && !getCorrelationAttributesFromNode(node).isEmpty(); + return !getCorrelationAttributesFromNode(node).isEmpty(); } else { - return this.file != null + return this.file != null && this.file.getSize() > 0 && ((this.file.getMd5Hash() != null) && (!this.file.getMd5Hash().isEmpty())); } @@ -733,8 +728,8 @@ public class DataContentViewerOtherCases extends JPanel implements DataContentVi * Adjust a given column for the text provided. * * @param columnIndex The index of the column to adjust. - * @param text The text whose length will be used to adjust the column - * width. + * @param text The text whose length will be used to adjust the + * column width. */ private void setColumnWidthToText(int columnIndex, String text) { TableColumn column = otherCasesTable.getColumnModel().getColumn(columnIndex); diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java index 836f47ea85..c2b10f8295 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java @@ -808,9 +808,9 @@ abstract class AbstractSqlEamDb implements EamDb { String sql = "INSERT INTO " + tableName - + "(case_id, data_source_id, value, file_path, known_status, comment) " + + "(case_id, data_source_id, value, file_path, known_status, comment, file_obj_id) " + "VALUES ((SELECT id FROM cases WHERE case_uid=? LIMIT 1), " - + "(SELECT id FROM data_sources WHERE device_id=? AND case_id=? LIMIT 1), ?, ?, ?, ?) " + + "(SELECT id FROM data_sources WHERE device_id=? AND case_id=? LIMIT 1), ?, ?, ?, ?, ?) " + getConflictClause(); try { @@ -824,11 +824,13 @@ abstract class AbstractSqlEamDb implements EamDb { preparedStatement.setString(4, eamArtifact.getCorrelationValue()); preparedStatement.setString(5, eamArtifact.getFilePath().toLowerCase()); preparedStatement.setByte(6, eamArtifact.getKnownStatus().getFileKnownValue()); + if ("".equals(eamArtifact.getComment())) { preparedStatement.setNull(7, Types.INTEGER); } else { preparedStatement.setString(7, eamArtifact.getComment()); } + preparedStatement.setLong(8, eamArtifact.getFileObjectId()); preparedStatement.executeUpdate(); } @@ -900,6 +902,8 @@ abstract class AbstractSqlEamDb implements EamDb { + ".id," + tableName + ".value," + + tableName + + ".file_obj_id," + " cases.case_name, cases.case_uid, data_sources.id AS data_source_id, data_sources.name, device_id, file_path, known_status, comment, data_sources.case_id FROM " + tableName + " LEFT JOIN cases ON " @@ -963,6 +967,8 @@ abstract class AbstractSqlEamDb implements EamDb { + ".id, " + tableName + ".value," + + tableName + + ".file_obj_id," + " cases.case_name, cases.case_uid, data_sources.id AS data_source_id, data_sources.name, device_id, file_path, known_status, comment, data_sources.case_id FROM " + tableName + " LEFT JOIN cases ON " @@ -1229,9 +1235,9 @@ abstract class AbstractSqlEamDb implements EamDb { String sql = "INSERT INTO " + tableName - + " (case_id, data_source_id, value, file_path, known_status, comment) " + + " (case_id, data_source_id, value, file_path, known_status, comment, file_obj_id) " + "VALUES ((SELECT id FROM cases WHERE case_uid=? LIMIT 1), " - + "(SELECT id FROM data_sources WHERE device_id=? AND case_id=? LIMIT 1), ?, ?, ?, ?) " + + "(SELECT id FROM data_sources WHERE device_id=? AND case_id=? LIMIT 1), ?, ?, ?, ?, ?) " + getConflictClause(); bulkPs = conn.prepareStatement(sql); @@ -1275,6 +1281,7 @@ abstract class AbstractSqlEamDb implements EamDb { } else { bulkPs.setString(7, eamArtifact.getComment()); } + bulkPs.setLong(8, eamArtifact.getFileObjectId()); bulkPs.addBatch(); } else { logger.log(Level.WARNING, ("Artifact value too long for central repository." @@ -1439,6 +1446,68 @@ abstract class AbstractSqlEamDb implements EamDb { } } + /** + * Find a correlation attribute in the Central Repository database given the + * instance type, case, data source, object id. + * + * @param type The type of instance. + * @param correlationCase The case tied to the instance. + * @param correlationDataSource The data source tied to the instance. + * @param objectID The object id of the file tied to the + * instance. + * + * @return The correlation attribute if it exists; otherwise null. + * + * @throws EamDbException + */ + @Override + public CorrelationAttributeInstance getCorrelationAttributeInstance(CorrelationAttributeInstance.Type type, CorrelationCase correlationCase, + CorrelationDataSource correlationDataSource, long objectID) throws EamDbException, CorrelationAttributeNormalizationException { + + if (correlationCase == null) { + throw new EamDbException("Correlation case is null"); + } + + Connection conn = connect(); + + PreparedStatement preparedStatement = null; + ResultSet resultSet = null; + CorrelationAttributeInstance correlationAttributeInstance = null; + + try { + + String tableName = EamDbUtil.correlationTypeToInstanceTableName(type); + String sql + = "SELECT id, value, file_path, known_status, comment FROM " + + tableName + + " WHERE case_id=?" + + " AND file_obj_id=?"; + + preparedStatement = conn.prepareStatement(sql); + preparedStatement.setInt(1, correlationCase.getID()); + preparedStatement.setInt(2, (int) objectID); + resultSet = preparedStatement.executeQuery(); + if (resultSet.next()) { + int instanceId = resultSet.getInt(1); + String value = resultSet.getString(2); + String filePath = resultSet.getString(3); + int knownStatus = resultSet.getInt(4); + String comment = resultSet.getString(5); + + correlationAttributeInstance = new CorrelationAttributeInstance(type, value, + instanceId, correlationCase, correlationDataSource, filePath, comment, TskData.FileKnown.valueOf((byte) knownStatus), objectID); + } + } catch (SQLException ex) { + throw new EamDbException("Error getting notable artifact instances.", ex); // NON-NLS + } finally { + EamDbUtil.closeStatement(preparedStatement); + EamDbUtil.closeResultSet(resultSet); + EamDbUtil.closeConnection(conn); + } + + return correlationAttributeInstance; + } + /** * Find a correlation attribute in the Central Repository database given the * instance type, case, data source, value, and file path. @@ -1495,9 +1564,9 @@ abstract class AbstractSqlEamDb implements EamDb { int instanceId = resultSet.getInt(1); int knownStatus = resultSet.getInt(2); String comment = resultSet.getString(3); - + //null objectId used because we only fall back to using this method when objectID was not available correlationAttributeInstance = new CorrelationAttributeInstance(type, value, - instanceId, correlationCase, correlationDataSource, filePath, comment, TskData.FileKnown.valueOf((byte) knownStatus)); + instanceId, correlationCase, correlationDataSource, filePath, comment, TskData.FileKnown.valueOf((byte) knownStatus), null); } } catch (SQLException ex) { throw new EamDbException("Error getting notable artifact instances.", ex); // NON-NLS @@ -1637,6 +1706,8 @@ abstract class AbstractSqlEamDb implements EamDb { + ".id, " + tableName + ".value, " + + tableName + + ".file_obj_id," + "cases.case_name, cases.case_uid, data_sources.id AS data_source_id, data_sources.name, device_id, file_path, known_status, comment, data_sources.case_id FROM " + tableName + " LEFT JOIN cases ON " @@ -1694,7 +1765,7 @@ abstract class AbstractSqlEamDb implements EamDb { String tableName = EamDbUtil.correlationTypeToInstanceTableName(aType); String sql - = "SELECT cases.case_name, cases.case_uid, data_sources.name, device_id, file_path, known_status, comment, data_sources.case_id, id, value FROM " + = "SELECT cases.case_name, cases.case_uid, data_sources.name, device_id, file_path, known_status, comment, data_sources.case_id, id, value, file_obj_id FROM " + tableName + " LEFT JOIN cases ON " + tableName @@ -2960,11 +3031,10 @@ abstract class AbstractSqlEamDb implements EamDb { resultSet.getString("poc_phone")); } - CorrelationCase eamCase = new CorrelationCase(resultSet.getInt("case_id"), resultSet.getString("case_uid"), eamOrg, resultSet.getString("case_name"), - resultSet.getString("creation_date"), resultSet.getString("case_number"), resultSet.getString("examiner_name"), + CorrelationCase eamCase = new CorrelationCase(resultSet.getInt("case_id"), resultSet.getString("case_uid"), eamOrg, resultSet.getString("case_name"), + resultSet.getString("creation_date"), resultSet.getString("case_number"), resultSet.getString("examiner_name"), resultSet.getString("examiner_email"), resultSet.getString("examiner_phone"), resultSet.getString("notes")); - return eamCase; } @@ -3021,8 +3091,8 @@ abstract class AbstractSqlEamDb implements EamDb { new CorrelationDataSource(resultSet.getInt("case_id"), resultSet.getInt("data_source_id"), resultSet.getString("device_id"), resultSet.getString("name")), resultSet.getString("file_path"), resultSet.getString("comment"), - TskData.FileKnown.valueOf(resultSet.getByte("known_status")) - ); + TskData.FileKnown.valueOf(resultSet.getByte("known_status")), + resultSet.getLong("file_obj_id")); } private EamOrganization getEamOrganizationFromResultSet(ResultSet resultSet) throws SQLException { @@ -3070,6 +3140,18 @@ abstract class AbstractSqlEamDb implements EamDb { ); } + /** + * Determine if a specific column already exists in a specific table + * + * @param tableName the table to check for the specified column + * @param columnName the name of the column to check for + * + * @return true if the column exists, false if the column does not exist + * + * @throws EamDbException + */ + abstract boolean doesColumnExist(Connection conn, String tableName, String columnName) throws SQLException; + /** * Upgrade the schema of the database (if needed) * @@ -3080,6 +3162,7 @@ abstract class AbstractSqlEamDb implements EamDb { ResultSet resultSet = null; Statement statement = null; + PreparedStatement preparedStatement = null; Connection conn = null; try { @@ -3114,6 +3197,10 @@ abstract class AbstractSqlEamDb implements EamDb { logger.log(Level.INFO, "Central Repository is up to date"); return; } + if (dbSchemaVersion.compareTo(CURRENT_DB_SCHEMA_VERSION) > 0) { + logger.log(Level.INFO, "Central Repository is of newer version than software creates"); + return; + } // Update from 1.0 to 1.1 if (dbSchemaVersion.compareTo(new CaseDbSchemaVersionNumber(1, 1)) < 0) { @@ -3126,7 +3213,71 @@ abstract class AbstractSqlEamDb implements EamDb { // regardless of whether this succeeds. EamDbUtil.insertDefaultOrganization(conn); } + //Update to 1.2 + if (dbSchemaVersion.compareTo(new CaseDbSchemaVersionNumber(1, 2)) < 0) { + EamDbPlatformEnum selectedPlatform = EamDbPlatformEnum.getSelectedPlatform(); + final String addObjectIdColumnTemplate = "ALTER TABLE %s ADD COLUMN file_obj_id INTEGER;"; //NON-NLS + final String addSsidTableTemplate; + final String addCaseIdIndexTemplate; + final String addDataSourceIdIndexTemplate; + final String addValueIndexTemplate; + final String addKnownStatusIndexTemplate; + final String addObjectIdIndexTemplate; + + final String addAttributeSql; + //get the data base specific code for creating a new _instance table + switch (selectedPlatform) { + case POSTGRESQL: + addAttributeSql = "INSERT INTO correlation_types(id, display_name, db_table_name, supported, enabled) VALUES (?, ?, ?, ?, ?) " + getConflictClause(); //NON-NLS + + addSsidTableTemplate = PostgresEamDbSettings.getCreateArtifactInstancesTableTemplate(); + addCaseIdIndexTemplate = PostgresEamDbSettings.getAddCaseIdIndexTemplate(); + addDataSourceIdIndexTemplate = PostgresEamDbSettings.getAddDataSourceIdIndexTemplate(); + addValueIndexTemplate = PostgresEamDbSettings.getAddValueIndexTemplate(); + addKnownStatusIndexTemplate = PostgresEamDbSettings.getAddKnownStatusIndexTemplate(); + addObjectIdIndexTemplate = PostgresEamDbSettings.getAddObjectIdIndexTemplate(); + break; + case SQLITE: + addAttributeSql = "INSERT OR IGNORE INTO correlation_types(id, display_name, db_table_name, supported, enabled) VALUES (?, ?, ?, ?, ?)"; //NON-NLS + + addSsidTableTemplate = SqliteEamDbSettings.getCreateArtifactInstancesTableTemplate(); + addCaseIdIndexTemplate = SqliteEamDbSettings.getAddCaseIdIndexTemplate(); + addDataSourceIdIndexTemplate = SqliteEamDbSettings.getAddDataSourceIdIndexTemplate(); + addValueIndexTemplate = SqliteEamDbSettings.getAddValueIndexTemplate(); + addKnownStatusIndexTemplate = SqliteEamDbSettings.getAddKnownStatusIndexTemplate(); + addObjectIdIndexTemplate = SqliteEamDbSettings.getAddObjectIdIndexTemplate(); + break; + default: + throw new EamDbException("Currently selected database platform \"" + selectedPlatform.name() + "\" can not be upgraded."); + } + //update central repository to be able to store new correlation attributes + final String wirelessNetworsDbTableName = "wireless_networks"; + final String wirelessNetworksTableInstanceName = wirelessNetworsDbTableName + "_instances"; + //add the wireless_networks attribute to the correlation_types table + preparedStatement = conn.prepareStatement(addAttributeSql); + preparedStatement.setInt(1, CorrelationAttributeInstance.SSID_TYPE_ID); + preparedStatement.setString(2, Bundle.CorrelationType_SSID_displayName()); + preparedStatement.setString(3, wirelessNetworsDbTableName); + preparedStatement.setInt(4, 1); + preparedStatement.setInt(5, 1); + preparedStatement.execute(); + //create a new wireless_networks_instances table and add indexes for its columns + statement.execute(String.format(addSsidTableTemplate, wirelessNetworksTableInstanceName, wirelessNetworksTableInstanceName)); + statement.execute(String.format(addCaseIdIndexTemplate, wirelessNetworksTableInstanceName, wirelessNetworksTableInstanceName)); + statement.execute(String.format(addDataSourceIdIndexTemplate, wirelessNetworksTableInstanceName, wirelessNetworksTableInstanceName)); + statement.execute(String.format(addValueIndexTemplate, wirelessNetworksTableInstanceName, wirelessNetworksTableInstanceName)); + statement.execute(String.format(addKnownStatusIndexTemplate, wirelessNetworksTableInstanceName, wirelessNetworksTableInstanceName)); + //add file_obj_id column to _instances table which do not already have it + String instance_type_dbname; + for (CorrelationAttributeInstance.Type type : CorrelationAttributeInstance.getDefaultCorrelationTypes()) { + instance_type_dbname = EamDbUtil.correlationTypeToInstanceTableName(type); + if (!doesColumnExist(conn, instance_type_dbname, "file_obj_id")) { + statement.execute(String.format(addObjectIdColumnTemplate, instance_type_dbname)); //NON-NLS + } + statement.execute(String.format(addObjectIdIndexTemplate, instance_type_dbname, instance_type_dbname)); + } + } if (!updateSchemaVersion(conn)) { throw new EamDbException("Error updating schema version"); } @@ -3144,6 +3295,7 @@ abstract class AbstractSqlEamDb implements EamDb { throw ex; } finally { EamDbUtil.closeResultSet(resultSet); + EamDbUtil.closeStatement(preparedStatement); EamDbUtil.closeStatement(statement); EamDbUtil.closeConnection(conn); } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeInstance.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeInstance.java index c26134c5b8..8fd6b4170e 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeInstance.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeInstance.java @@ -48,46 +48,18 @@ public class CorrelationAttributeInstance implements Serializable { private String filePath; private String comment; private TskData.FileKnown knownStatus; + private Long objectId; public CorrelationAttributeInstance( - String correlationValue, CorrelationAttributeInstance.Type correlationType, - CorrelationCase eamCase, - CorrelationDataSource eamDataSource, - String filePath - ) throws EamDbException, CorrelationAttributeNormalizationException { - this(correlationType, correlationValue, -1, eamCase, eamDataSource, filePath, null, TskData.FileKnown.UNKNOWN); - } - - public CorrelationAttributeInstance( String correlationValue, - CorrelationAttributeInstance.Type correlationType, CorrelationCase eamCase, CorrelationDataSource eamDataSource, String filePath, String comment, - TskData.FileKnown knownStatus - ) throws EamDbException, CorrelationAttributeNormalizationException { - this(correlationType, correlationValue, -1, eamCase, eamDataSource, filePath, comment, knownStatus); - } - - public CorrelationAttributeInstance( - Type correlationType, - String correlationValue, - CorrelationCase correlationCase, - CorrelationDataSource fromTSKDataSource, - String string) throws EamDbException, CorrelationAttributeNormalizationException { - this(correlationType, correlationValue, -1, correlationCase, fromTSKDataSource, string, "", TskData.FileKnown.UNKNOWN); - } - - /** - * NOTE: Only used for when EamDB is NOT enabled. - * - * @param aType CorrelationAttributeInstance.Type - * @param value correlation value - */ - public CorrelationAttributeInstance(Type aType, String value) throws EamDbException, CorrelationAttributeNormalizationException { - this(aType, value, -1, null, null, "", "", TskData.FileKnown.UNKNOWN); + TskData.FileKnown knownStatus, + long fileObjectId) throws EamDbException, CorrelationAttributeNormalizationException { + this(correlationType, correlationValue, -1, eamCase, eamDataSource, filePath, comment, knownStatus, fileObjectId); } CorrelationAttributeInstance( @@ -98,7 +70,8 @@ public class CorrelationAttributeInstance implements Serializable { CorrelationDataSource eamDataSource, String filePath, String comment, - TskData.FileKnown knownStatus + TskData.FileKnown knownStatus, + Long fileObjectId ) throws EamDbException, CorrelationAttributeNormalizationException { if (filePath == null) { throw new EamDbException("file path is null"); @@ -113,6 +86,7 @@ public class CorrelationAttributeInstance implements Serializable { this.filePath = filePath.toLowerCase(); this.comment = comment; this.knownStatus = knownStatus; + this.objectId = fileObjectId; } public Boolean equals(CorrelationAttributeInstance otherInstance) { @@ -145,14 +119,6 @@ public class CorrelationAttributeInstance implements Serializable { return correlationValue; } - /** - * @param correlationValue the correlationValue to set - */ - public void setCorrelationValue(String correlationValue) { - // Lower-case all values to normalize and improve correlation hits, going forward make sure this makes sense for all correlation types - this.correlationValue = correlationValue.toLowerCase(); - } - /** * @return the correlation Type */ @@ -160,18 +126,11 @@ public class CorrelationAttributeInstance implements Serializable { return correlationType; } - /** - * @param correlationType the correlation Type to set - */ - public void setCorrelationType(Type correlationType) { - this.correlationType = correlationType; - } - /** * Is this a database instance? * * @return True if the instance ID is greater or equal to zero; otherwise - * false. + * false. */ public boolean isDatabaseInstance() { return (ID >= 0); @@ -234,30 +193,42 @@ public class CorrelationAttributeInstance implements Serializable { * as notable and should never be set to KNOWN. * * @param knownStatus Should be BAD if the item is tagged as notable, - * UNKNOWN otherwise + * UNKNOWN otherwise */ public void setKnownStatus(TskData.FileKnown knownStatus) { this.knownStatus = knownStatus; } + /** + * Get the objectId of the file associated with the correlation attribute or + * NULL if the objectId is not available. + * + * @return the objectId of the file + */ + public Long getFileObjectId() { + return objectId; + } + // Type ID's for Default Correlation Types public static final int FILES_TYPE_ID = 0; public static final int DOMAIN_TYPE_ID = 1; public static final int EMAIL_TYPE_ID = 2; public static final int PHONE_TYPE_ID = 3; public static final int USBID_TYPE_ID = 4; + public static final int SSID_TYPE_ID = 5; /** * Load the default correlation types * * @throws EamDbException if the Type's dbTableName has invalid - * characters/format + * characters/format */ @Messages({"CorrelationType.FILES.displayName=Files", "CorrelationType.DOMAIN.displayName=Domains", "CorrelationType.EMAIL.displayName=Email Addresses", "CorrelationType.PHONE.displayName=Phone Numbers", - "CorrelationType.USBID.displayName=USB Devices"}) + "CorrelationType.USBID.displayName=USB Devices", + "CorrelationType.SSID.displayName=Wireless Networks"}) public static List getDefaultCorrelationTypes() throws EamDbException { List DEFAULT_CORRELATION_TYPES = new ArrayList<>(); DEFAULT_CORRELATION_TYPES.add(new CorrelationAttributeInstance.Type(FILES_TYPE_ID, Bundle.CorrelationType_FILES_displayName(), "file", true, true)); // NON-NLS @@ -265,6 +236,7 @@ public class CorrelationAttributeInstance implements Serializable { DEFAULT_CORRELATION_TYPES.add(new CorrelationAttributeInstance.Type(EMAIL_TYPE_ID, Bundle.CorrelationType_EMAIL_displayName(), "email_address", true, true)); // NON-NLS DEFAULT_CORRELATION_TYPES.add(new CorrelationAttributeInstance.Type(PHONE_TYPE_ID, Bundle.CorrelationType_PHONE_displayName(), "phone_number", true, true)); // NON-NLS DEFAULT_CORRELATION_TYPES.add(new CorrelationAttributeInstance.Type(USBID_TYPE_ID, Bundle.CorrelationType_USBID_displayName(), "usb_devices", true, true)); // NON-NLS + DEFAULT_CORRELATION_TYPES.add(new CorrelationAttributeInstance.Type(SSID_TYPE_ID, Bundle.CorrelationType_SSID_displayName(), "wireless_networks", true, true)); // NON-NLS return DEFAULT_CORRELATION_TYPES; } @@ -283,13 +255,14 @@ public class CorrelationAttributeInstance implements Serializable { /** * - * @param typeId Unique ID for this Correlation Type + * @param typeId Unique ID for this Correlation Type * @param displayName Name of this type displayed in the UI. * @param dbTableName Central repository db table where data of this - * type is stored. Must start with a lowercase letter and only contain - * lowercase letters, numbers, and '_' characters. - * @param supported Is this Type currently supported - * @param enabled Is this Type currently enabled. + * type is stored. Must start with a lowercase letter + * and only contain lowercase letters, numbers, and + * '_' characters. + * @param supported Is this Type currently supported + * @param enabled Is this Type currently enabled. */ public Type(int typeId, String displayName, String dbTableName, Boolean supported, Boolean enabled) throws EamDbException { if (dbTableName == null) { @@ -312,10 +285,11 @@ public class CorrelationAttributeInstance implements Serializable { * * @param displayName Name of this type displayed in the UI. * @param dbTableName Central repository db table where data of this - * type is stored Must start with a lowercase letter and only contain - * lowercase letters, numbers, and '_' characters. - * @param supported Is this Type currently supported - * @param enabled Is this Type currently enabled. + * type is stored Must start with a lowercase letter + * and only contain lowercase letters, numbers, and + * '_' characters. + * @param supported Is this Type currently supported + * @param enabled Is this Type currently enabled. */ public Type(String displayName, String dbTableName, Boolean supported, Boolean enabled) throws EamDbException { this(-1, displayName, dbTableName, supported, enabled); @@ -477,8 +451,8 @@ public class CorrelationAttributeInstance implements Serializable { * custom_instances) * * @param dbTableName the dbTableName to set. Must start with lowercase - * letter and can only contain lowercase letters, numbers, and '_' - * characters. + * letter and can only contain lowercase letters, + * numbers, and '_' characters. * * @throws EamDbException if dbTableName contains invalid characters */ diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeNormalizer.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeNormalizer.java index 772e1c517e..4ce04769c8 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeNormalizer.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeNormalizer.java @@ -63,6 +63,8 @@ final public class CorrelationAttributeNormalizer { return normalizePhone(data); case CorrelationAttributeInstance.USBID_TYPE_ID: return normalizeUsbId(data); + case CorrelationAttributeInstance.SSID_TYPE_ID: + return data; default: final String errorMessage = String.format( "Validator function not found for attribute type: %s", diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamArtifactUtil.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamArtifactUtil.java index 30d539e87f..7d098fc590 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamArtifactUtil.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamArtifactUtil.java @@ -54,9 +54,9 @@ public class EamArtifactUtil { * EamArtifact with a single EamArtifactInstance within. If not, return * null. * - * @param bbArtifact BlackboardArtifact to examine + * @param bbArtifact BlackboardArtifact to examine * @param checkEnabled If true, only create a CorrelationAttribute if it is - * enabled + * enabled * * @return List of EamArtifacts */ @@ -93,10 +93,10 @@ public class EamArtifactUtil { * based on the data in the blackboard artifact. * * @param correlationType The Central Repository artifact type to create - * @param bbArtifact The blackboard artifact to pull data from + * @param bbArtifact The blackboard artifact to pull data from * * @return the new EamArtifact, or null if one was not created because - * bbArtifact did not contain the needed data + * bbArtifact did not contain the needed data */ private static CorrelationAttributeInstance makeInstanceFromBlackboardArtifact(CorrelationAttributeInstance.Type correlationType, BlackboardArtifact bbArtifact) throws EamDbException { @@ -159,13 +159,14 @@ public class EamArtifactUtil { return null; } } - } else if (correlationType.getId() == CorrelationAttributeInstance.USBID_TYPE_ID && BlackboardArtifact.ARTIFACT_TYPE.TSK_DEVICE_ATTACHED.getTypeID() == artifactTypeID) { value = bbArtifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DEVICE_ID)).getValueString(); + } else if (correlationType.getId() == CorrelationAttributeInstance.SSID_TYPE_ID + && BlackboardArtifact.ARTIFACT_TYPE.TSK_WIFI_NETWORK.getTypeID() == artifactTypeID) { + value = bbArtifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SSID)).getValueString(); } - } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error getting attribute while getting type from BlackboardArtifact.", ex); // NON-NLS return null; @@ -185,9 +186,10 @@ public class EamArtifactUtil { * Uses the determined type and vallue, then looks up instance details to * create proper CorrelationAttributeInstance. * - * @param bbArtifact the blackboard artifatc + * @param bbArtifact the blackboard artifact * @param correlationType the given type - * @param value the artifact value + * @param value the artifact value + * * @return CorrelationAttributeInstance from details */ private static CorrelationAttributeInstance makeCorrelationAttributeInstanceUsingTypeValue(BlackboardArtifact bbArtifact, CorrelationAttributeInstance.Type correlationType, String value) { @@ -205,14 +207,14 @@ public class EamArtifactUtil { correlationCase = EamDb.getInstance().newCase(Case.getCurrentCaseThrows()); } return new CorrelationAttributeInstance( - value, correlationType, + value, correlationCase, CorrelationDataSource.fromTSKDataSource(correlationCase, bbSourceFile.getDataSource()), bbSourceFile.getParentPath() + bbSourceFile.getName(), "", - TskData.FileKnown.UNKNOWN - ); + TskData.FileKnown.UNKNOWN, + bbSourceFile.getId()); } catch (TskCoreException | EamDbException | CorrelationAttributeNormalizationException ex) { logger.log(Level.SEVERE, "Error creating artifact instance.", ex); // NON-NLS @@ -245,8 +247,6 @@ public class EamArtifactUtil { CorrelationAttributeInstance.Type type; CorrelationCase correlationCase; CorrelationDataSource correlationDataSource; - String value; - String filePath; try { type = EamDb.getInstance().getCorrelationTypeById(CorrelationAttributeInstance.FILES_TYPE_ID); @@ -256,8 +256,6 @@ public class EamArtifactUtil { return null; } correlationDataSource = CorrelationDataSource.fromTSKDataSource(correlationCase, file.getDataSource()); - value = file.getMd5Hash(); - filePath = (file.getParentPath() + file.getName()).toLowerCase(); } catch (TskCoreException | EamDbException ex) { logger.log(Level.SEVERE, "Error retrieving correlation attribute.", ex); return null; @@ -268,13 +266,26 @@ public class EamArtifactUtil { CorrelationAttributeInstance correlationAttributeInstance; try { - correlationAttributeInstance = EamDb.getInstance().getCorrelationAttributeInstance(type, correlationCase, correlationDataSource, value, filePath); + correlationAttributeInstance = EamDb.getInstance().getCorrelationAttributeInstance(type, correlationCase, correlationDataSource, file.getId()); } catch (EamDbException | CorrelationAttributeNormalizationException ex) { logger.log(Level.WARNING, String.format( "Correlation attribute could not be retrieved for '%s' (id=%d): %s", content.getName(), content.getId(), ex.getMessage())); return null; } + //if there was no correlation attribute found for the item using object_id then check for attributes added with schema 1,1 which lack object_id + if (correlationAttributeInstance == null) { + String value = file.getMd5Hash(); + String filePath = (file.getParentPath() + file.getName()).toLowerCase(); + try { + correlationAttributeInstance = EamDb.getInstance().getCorrelationAttributeInstance(type, correlationCase, correlationDataSource, value, filePath); + } catch (EamDbException | CorrelationAttributeNormalizationException ex) { + logger.log(Level.WARNING, String.format( + "Correlation attribute could not be retrieved for '%s' (id=%d): %s", + content.getName(), content.getId(), ex.getMessage())); + return null; + } + } return correlationAttributeInstance; } @@ -317,12 +328,16 @@ public class EamArtifactUtil { if (null == correlationCase) { correlationCase = EamDb.getInstance().newCase(Case.getCurrentCaseThrows()); } + return new CorrelationAttributeInstance( filesType, af.getMd5Hash(), correlationCase, CorrelationDataSource.fromTSKDataSource(correlationCase, af.getDataSource()), - af.getParentPath() + af.getName()); + af.getParentPath() + af.getName(), + "", + TskData.FileKnown.UNKNOWN, + af.getId()); } catch (TskCoreException | EamDbException | CorrelationAttributeNormalizationException ex) { logger.log(Level.SEVERE, "Error making correlation attribute.", ex); @@ -340,7 +355,7 @@ public class EamArtifactUtil { * @param file The file to test * * @return true if the file should be added to the central repo, false - * otherwise + * otherwise */ public static boolean isSupportedAbstractFileType(AbstractFile file) { if (file == null) { diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamDb.java index c7e385928d..95584d0ebe 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamDb.java @@ -31,11 +31,10 @@ import org.sleuthkit.datamodel.CaseDbSchemaVersionNumber; */ public interface EamDb { - public static final int SCHEMA_VERSION = 1; + public static final int SCHEMA_VERSION = 2; public static final CaseDbSchemaVersionNumber CURRENT_DB_SCHEMA_VERSION - = new CaseDbSchemaVersionNumber(1, 1); - - + = new CaseDbSchemaVersionNumber(1, 2); + /** * Get the instance * @@ -183,13 +182,14 @@ public interface EamDb { * @return The retrieved case */ CorrelationCase getCaseById(int caseId) throws EamDbException; + /** * Retrieves cases that are in DB. * * @return List of cases */ List getCases() throws EamDbException; - + /** * Creates new Data Source in the database * @@ -208,18 +208,17 @@ public interface EamDb { */ CorrelationDataSource getDataSource(CorrelationCase correlationCase, String dataSourceDeviceId) throws EamDbException; - /** * Retrieves Data Source details based on data source ID * - * @param correlationCase the current CorrelationCase used for ensuring - * uniqueness of DataSource - * @param dataSourceId the data source ID number + * @param correlationCase the current CorrelationCase used for ensuring + * uniqueness of DataSource + * @param dataSourceId the data source ID number * * @return The data source */ CorrelationDataSource getDataSourceById(CorrelationCase correlationCase, int dataSourceId) throws EamDbException; - + /** * Retrieves data sources that are in DB * @@ -245,7 +244,7 @@ public interface EamDb { * @return List of artifact instances for a given type/value */ List getArtifactInstancesByTypeValue(CorrelationAttributeInstance.Type aType, String value) throws EamDbException, CorrelationAttributeNormalizationException; - + /** * Retrieves eamArtifact instances from the database that are associated * with the aType and filePath @@ -314,8 +313,8 @@ public interface EamDb { /** * Adds an eamArtifact to an internal list to be later added to DB. Artifact - can have 1 or more Artifact Instances. Insert will be triggered by a - threshold or a call to commitAttributeInstancesBulk(). + * can have 1 or more Artifact Instances. Insert will be triggered by a + * threshold or a call to commitAttributeInstancesBulk(). * * @param eamArtifact The artifact to add */ @@ -323,7 +322,7 @@ public interface EamDb { /** * Executes a bulk insert of the eamArtifacts added from the - addAttributeInstanceBulk() method + * addAttributeInstanceBulk() method */ void commitAttributeInstancesBulk() throws EamDbException; @@ -346,6 +345,9 @@ public interface EamDb { /** * Find a correlation attribute in the Central Repository database given the * instance type, case, data source, value, and file path. + * + * Method exists to support instances added using Central Repository version 1,1 and + * older * * @param type The type of instance. * @param correlationCase The case tied to the instance. @@ -354,12 +356,28 @@ public interface EamDb { * @param filePath The file path tied to the instance. * * @return The correlation attribute if it exists; otherwise null. - * + * * @throws EamDbException */ CorrelationAttributeInstance getCorrelationAttributeInstance(CorrelationAttributeInstance.Type type, CorrelationCase correlationCase, CorrelationDataSource correlationDataSource, String value, String filePath) throws EamDbException, CorrelationAttributeNormalizationException; + /** + * Find a correlation attribute in the Central Repository database given the + * instance type, case, data source, object id. + * + * @param type The type of instance. + * @param correlationCase The case tied to the instance. + * @param correlationDataSource The data source tied to the instance. + * @param objectID The object id of the file tied to the instance. + * + * @return The correlation attribute if it exists; otherwise null. + * + * @throws EamDbException + */ + CorrelationAttributeInstance getCorrelationAttributeInstance(CorrelationAttributeInstance.Type type, CorrelationCase correlationCase, + CorrelationDataSource correlationDataSource, long objectID) throws EamDbException, CorrelationAttributeNormalizationException; + /** * Sets an eamArtifact instance to the given known status. If eamArtifact * exists, it is updated. If eamArtifact does not exist nothing happens @@ -383,12 +401,15 @@ public interface EamDb { /** * Gets list of matching eamArtifact instances that have knownStatus = * "Bad". - * + * * @param aType EamArtifact.Type to search for + * * @return List with 0 or more matching eamArtifact instances. + * * @throws EamDbException */ List getArtifactInstancesKnownBad(CorrelationAttributeInstance.Type aType) throws EamDbException; + /** * Count matching eamArtifacts instances that have knownStatus = "Bad". * @@ -490,7 +511,7 @@ public interface EamDb { * * @param eamOrg The organization to add * - * @return The organization with the org ID set. + * @return The organization with the org ID set. * * @throws EamDbException */ @@ -700,18 +721,20 @@ public interface EamDb { /** * Process the Artifact instance in the EamDb * - * @param type EamArtifact.Type to search for + * @param type EamArtifact.Type to search for * @param instanceTableCallback callback to process the instance + * * @throws EamDbException */ void processInstanceTable(CorrelationAttributeInstance.Type type, InstanceTableCallback instanceTableCallback) throws EamDbException; - + /** * Process the Artifact instance in the EamDb * - * @param type EamArtifact.Type to search for + * @param type EamArtifact.Type to search for * @param instanceTableCallback callback to process the instance - * @param whereClause query string to execute + * @param whereClause query string to execute + * * @throws EamDbException */ void processInstanceTableWhere(CorrelationAttributeInstance.Type type, String whereClause, InstanceTableCallback instanceTableCallback) throws EamDbException; diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDb.java index 97abd1dec9..769b49bfd3 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDb.java @@ -19,6 +19,7 @@ package org.sleuthkit.autopsy.centralrepository.datamodel; import java.sql.Connection; +import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.concurrent.TimeUnit; @@ -29,8 +30,7 @@ import org.sleuthkit.autopsy.core.UserPreferences; import org.sleuthkit.autopsy.coreutils.Logger; /** - * Central Repository database implementation using Postgres as a - * backend + * Central Repository database implementation using Postgres as a backend */ final class PostgresEamDb extends AbstractSqlEamDb { @@ -47,10 +47,11 @@ final class PostgresEamDb extends AbstractSqlEamDb { /** * Get the singleton instance of PostgresEamDb - * + * * @return the singleton instance of PostgresEamDb - * - * @throws EamDbException if one or more default correlation type(s) have an invalid db table name. + * + * @throws EamDbException if one or more default correlation type(s) have an + * invalid db table name. */ public synchronized static PostgresEamDb getInstance() throws EamDbException { if (instance == null) { @@ -61,9 +62,10 @@ final class PostgresEamDb extends AbstractSqlEamDb { } /** - * - * @throws EamDbException if the AbstractSqlEamDb class has one or more default - * correlation type(s) having an invalid db table name. + * + * @throws EamDbException if the AbstractSqlEamDb class has one or more + * default correlation type(s) having an invalid db + * table name. */ private PostgresEamDb() throws EamDbException { dbSettings = new PostgresEamDbSettings(); @@ -73,8 +75,8 @@ final class PostgresEamDb extends AbstractSqlEamDb { @Override public void shutdownConnections() throws EamDbException { try { - synchronized(this) { - if(connectionPool != null){ + synchronized (this) { + if (connectionPool != null) { connectionPool.close(); connectionPool = null; // force it to be re-created on next connect() } @@ -148,7 +150,7 @@ final class PostgresEamDb extends AbstractSqlEamDb { connectionURL.append(dbSettings.getPort()); connectionURL.append("/"); connectionURL.append(dbSettings.getDbName()); - + connectionPool.setUrl(connectionURL.toString()); connectionPool.setUsername(dbSettings.getUserName()); connectionPool.setPassword(dbSettings.getPassword()); @@ -189,31 +191,34 @@ final class PostgresEamDb extends AbstractSqlEamDb { protected String getConflictClause() { return CONFLICT_CLAUSE; } - + /** - * Gets an exclusive lock (if applicable). - * Will return the lock if successful, null if unsuccessful because locking - * isn't supported, and throw an exception if we should have been able to get the - * lock but failed (meaning the database is in use). + * Gets an exclusive lock (if applicable). Will return the lock if + * successful, null if unsuccessful because locking isn't supported, and + * throw an exception if we should have been able to get the lock but failed + * (meaning the database is in use). + * * @return the lock, or null if locking is not supported - * @throws EamDbException if the coordination service is running but we fail to get the lock + * + * @throws EamDbException if the coordination service is running but we fail + * to get the lock */ @Override - public CoordinationService.Lock getExclusiveMultiUserDbLock() throws EamDbException{ + public CoordinationService.Lock getExclusiveMultiUserDbLock() throws EamDbException { try { // First check if multi user mode is enabled - if not there's no point trying to get a lock - if( ! UserPreferences.getIsMultiUserModeEnabled()){ + if (!UserPreferences.getIsMultiUserModeEnabled()) { return null; } - + String databaseNodeName = dbSettings.getHost() + "_" + dbSettings.getDbName(); CoordinationService.Lock lock = CoordinationService.getInstance().tryGetExclusiveLock(CoordinationService.CategoryNode.CENTRAL_REPO, databaseNodeName, 5, TimeUnit.MINUTES); - if(lock != null){ + if (lock != null) { return lock; } throw new EamDbException("Error acquiring database lock"); - } catch (InterruptedException ex){ + } catch (InterruptedException ex) { throw new EamDbException("Error acquiring database lock"); } catch (CoordinationService.CoordinationServiceException ex) { // This likely just means the coordination service isn't running, which is ok @@ -221,4 +226,23 @@ final class PostgresEamDb extends AbstractSqlEamDb { } } + @Override + boolean doesColumnExist(Connection conn, String tableName, String columnName) throws SQLException { + final String objectIdColumnExistsTemplate = "SELECT EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name='%s' AND column_name='%s')"; //NON-NLS + ResultSet resultSet = null; + Statement statement = null; + boolean columnExists = false; + try { + statement = conn.createStatement(); + resultSet = statement.executeQuery(String.format(objectIdColumnExistsTemplate, tableName, columnName)); + if (resultSet.next()) { + columnExists = resultSet.getBoolean(1); + } + } finally { + EamDbUtil.closeResultSet(resultSet); + EamDbUtil.closeStatement(statement); + } + return columnExists; + } + } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDbSettings.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDbSettings.java index 77ab8c23db..8183f445e3 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDbSettings.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDbSettings.java @@ -35,8 +35,9 @@ import org.sleuthkit.autopsy.coreutils.TextConverterException; /** * Settings for the Postgres implementation of the Central Repository database - * - * NOTE: This is public scope because the options panel calls it directly to set/get + * + * NOTE: This is public scope because the options panel calls it directly to + * set/get */ public final class PostgresEamDbSettings { @@ -266,7 +267,7 @@ public final class PostgresEamDbSettings { return true; } - + public boolean deleteDatabase() { Connection conn = getEphemeralConnection(true); if (null == conn) { @@ -391,26 +392,13 @@ public final class PostgresEamDbSettings { createCorrelationTypesTable.append("CONSTRAINT correlation_types_names UNIQUE (display_name, db_table_name)"); createCorrelationTypesTable.append(")"); - // Each "%s" will be replaced with the relevant TYPE_instances table name. - StringBuilder createArtifactInstancesTableTemplate = new StringBuilder(); - createArtifactInstancesTableTemplate.append("CREATE TABLE IF NOT EXISTS %s ("); - createArtifactInstancesTableTemplate.append("id SERIAL PRIMARY KEY,"); - createArtifactInstancesTableTemplate.append("case_id integer NOT NULL,"); - createArtifactInstancesTableTemplate.append("data_source_id integer NOT NULL,"); - createArtifactInstancesTableTemplate.append("value text NOT NULL,"); - createArtifactInstancesTableTemplate.append("file_path text NOT NULL,"); - createArtifactInstancesTableTemplate.append("known_status integer NOT NULL,"); - createArtifactInstancesTableTemplate.append("comment text,"); - createArtifactInstancesTableTemplate.append("CONSTRAINT %s_multi_unique_ UNIQUE (data_source_id, value, file_path),"); - createArtifactInstancesTableTemplate.append("foreign key (case_id) references cases(id) ON UPDATE SET NULL ON DELETE SET NULL,"); - createArtifactInstancesTableTemplate.append("foreign key (data_source_id) references data_sources(id) ON UPDATE SET NULL ON DELETE SET NULL"); - createArtifactInstancesTableTemplate.append(")"); + String createArtifactInstancesTableTemplate = getCreateArtifactInstancesTableTemplate(); - // Each "%s" will be replaced with the relevant TYPE_instances table name. - String instancesIdx1 = "CREATE INDEX IF NOT EXISTS %s_case_id ON %s (case_id)"; - String instancesIdx2 = "CREATE INDEX IF NOT EXISTS %s_data_source_id ON %s (data_source_id)"; - String instancesIdx3 = "CREATE INDEX IF NOT EXISTS %s_value ON %s (value)"; - String instancesIdx4 = "CREATE INDEX IF NOT EXISTS %s_value_known_status ON %s (value, known_status)"; + String instancesCaseIdIdx = getAddCaseIdIndexTemplate(); + String instancesDatasourceIdIdx = getAddDataSourceIdIndexTemplate(); + String instancesValueIdx = getAddValueIndexTemplate(); + String instancesKnownStatusIdx = getAddKnownStatusIndexTemplate(); + String instancesObjectIdIdx = getAddObjectIdIndexTemplate(); StringBuilder createDbInfoTable = new StringBuilder(); createDbInfoTable.append("CREATE TABLE IF NOT EXISTS db_info ("); @@ -447,25 +435,26 @@ public final class PostgresEamDbSettings { // Create a separate instance and reference table for each correlation type List DEFAULT_CORRELATION_TYPES = CorrelationAttributeInstance.getDefaultCorrelationTypes(); - + String reference_type_dbname; String instance_type_dbname; for (CorrelationAttributeInstance.Type type : DEFAULT_CORRELATION_TYPES) { reference_type_dbname = EamDbUtil.correlationTypeToReferenceTableName(type); instance_type_dbname = EamDbUtil.correlationTypeToInstanceTableName(type); - - stmt.execute(String.format(createArtifactInstancesTableTemplate.toString(), instance_type_dbname, instance_type_dbname)); - stmt.execute(String.format(instancesIdx1, instance_type_dbname, instance_type_dbname)); - stmt.execute(String.format(instancesIdx2, instance_type_dbname, instance_type_dbname)); - stmt.execute(String.format(instancesIdx3, instance_type_dbname, instance_type_dbname)); - stmt.execute(String.format(instancesIdx4, instance_type_dbname, instance_type_dbname)); + + stmt.execute(String.format(createArtifactInstancesTableTemplate, instance_type_dbname, instance_type_dbname)); + stmt.execute(String.format(instancesCaseIdIdx, instance_type_dbname, instance_type_dbname)); + stmt.execute(String.format(instancesDatasourceIdIdx, instance_type_dbname, instance_type_dbname)); + stmt.execute(String.format(instancesValueIdx, instance_type_dbname, instance_type_dbname)); + stmt.execute(String.format(instancesKnownStatusIdx, instance_type_dbname, instance_type_dbname)); + stmt.execute(String.format(instancesObjectIdIdx, instance_type_dbname, instance_type_dbname)); // FUTURE: allow more than the FILES type if (type.getId() == CorrelationAttributeInstance.FILES_TYPE_ID) { stmt.execute(String.format(createReferenceTypesTableTemplate.toString(), reference_type_dbname, reference_type_dbname)); stmt.execute(String.format(referenceTypesIdx1, reference_type_dbname, reference_type_dbname)); stmt.execute(String.format(referenceTypesIdx2, reference_type_dbname, reference_type_dbname)); - } + } } } catch (SQLException ex) { @@ -480,6 +469,97 @@ public final class PostgresEamDbSettings { return true; } + /** + * Get the template String for creating a new _instances table in a Postgres + * central repository. %s will exist in the template where the name of the + * new table will be addedd. + * + * @return a String which is a template for cretating a new _instances table + */ + static String getCreateArtifactInstancesTableTemplate() { + // Each "%s" will be replaced with the relevant TYPE_instances table name. + StringBuilder createArtifactInstancesTableTemplate = new StringBuilder(); + createArtifactInstancesTableTemplate.append("CREATE TABLE IF NOT EXISTS %s ("); + createArtifactInstancesTableTemplate.append("id SERIAL PRIMARY KEY,"); + createArtifactInstancesTableTemplate.append("case_id integer NOT NULL,"); + createArtifactInstancesTableTemplate.append("data_source_id integer NOT NULL,"); + createArtifactInstancesTableTemplate.append("value text NOT NULL,"); + createArtifactInstancesTableTemplate.append("file_path text NOT NULL,"); + createArtifactInstancesTableTemplate.append("known_status integer NOT NULL,"); + createArtifactInstancesTableTemplate.append("comment text,"); + createArtifactInstancesTableTemplate.append("file_obj_id integer,"); + createArtifactInstancesTableTemplate.append("CONSTRAINT %s_multi_unique_ UNIQUE (data_source_id, value, file_path),"); + createArtifactInstancesTableTemplate.append("foreign key (case_id) references cases(id) ON UPDATE SET NULL ON DELETE SET NULL,"); + createArtifactInstancesTableTemplate.append("foreign key (data_source_id) references data_sources(id) ON UPDATE SET NULL ON DELETE SET NULL"); + createArtifactInstancesTableTemplate.append(")"); + return createArtifactInstancesTableTemplate.toString(); + } + + /** + * Get the template for creating an index on the case_id column of an + * instance table. %s will exist in the template where the name of the new + * table will be addedd. + * + * @return a String which is a template for adding an index to the case_id + * column of a _instances table + */ + static String getAddCaseIdIndexTemplate() { + // Each "%s" will be replaced with the relevant TYPE_instances table name. + return "CREATE INDEX IF NOT EXISTS %s_case_id ON %s (case_id)"; + } + + /** + * Get the template for creating an index on the data_source_id column of an + * instance table. %s will exist in the template where the name of the new + * table will be addedd. + * + * @return a String which is a template for adding an index to the + * data_source_id column of a _instances table + */ + static String getAddDataSourceIdIndexTemplate() { + // Each "%s" will be replaced with the relevant TYPE_instances table name. + return "CREATE INDEX IF NOT EXISTS %s_data_source_id ON %s (data_source_id)"; + } + + /** + * Get the template for creating an index on the value column of an instance + * table. %s will exist in the template where the name of the new table will + * be addedd. + * + * @return a String which is a template for adding an index to the value + * column of a _instances table + */ + static String getAddValueIndexTemplate() { + // Each "%s" will be replaced with the relevant TYPE_instances table name. + return "CREATE INDEX IF NOT EXISTS %s_value ON %s (value)"; + } + + /** + * Get the template for creating an index on the known_status column of an + * instance table. %s will exist in the template where the name of the new + * table will be addedd. + * + * @return a String which is a template for adding an index to the + * known_status column of a _instances table + */ + static String getAddKnownStatusIndexTemplate() { + // Each "%s" will be replaced with the relevant TYPE_instances table name. + return "CREATE INDEX IF NOT EXISTS %s_value_known_status ON %s (value, known_status)"; + } + + /** + * Get the template for creating an index on the file_obj_id column of an + * instance table. %s will exist in the template where the name of the new + * table will be addedd. + * + * @return a String which is a template for adding an index to the file_obj_id + * column of a _instances table + */ + static String getAddObjectIdIndexTemplate() { + // Each "%s" will be replaced with the relevant TYPE_instances table name. + return "CREATE INDEX IF NOT EXISTS %s_file_obj_id ON %s (file_obj_id)"; + } + public boolean insertDefaultDatabaseContent() { Connection conn = getEphemeralConnection(false); if (null == conn) { diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java index a75f4648ff..6468801a57 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java @@ -19,6 +19,7 @@ package org.sleuthkit.autopsy.centralrepository.datamodel; import java.sql.Connection; +import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.Arrays; @@ -57,7 +58,7 @@ final class SqliteEamDb extends AbstractSqlEamDb { * @return the singleton instance of SqliteEamDb * * @throws EamDbException if one or more default correlation type(s) have an - * invalid db table name. + * invalid db table name. */ public synchronized static SqliteEamDb getInstance() throws EamDbException { if (instance == null) { @@ -70,7 +71,8 @@ final class SqliteEamDb extends AbstractSqlEamDb { /** * * @throws EamDbException if the AbstractSqlEamDb class has one or more - * default correlation type(s) having an invalid db table name. + * default correlation type(s) having an invalid db + * table name. */ private SqliteEamDb() throws EamDbException { dbSettings = new SqliteEamDbSettings(); @@ -205,7 +207,7 @@ final class SqliteEamDb extends AbstractSqlEamDb { /** * Add a new name/value pair in the db_info table. * - * @param name Key to set + * @param name Key to set * @param value Value to set * * @throws EamDbException @@ -242,7 +244,7 @@ final class SqliteEamDb extends AbstractSqlEamDb { /** * Update the value for a name in the name/value db_info table. * - * @param name Name to find + * @param name Name to find * @param value Value to assign to name. * * @throws EamDbException @@ -372,8 +374,8 @@ final class SqliteEamDb extends AbstractSqlEamDb { /** * Retrieves Data Source details based on data source device ID * - * @param correlationCase the current CorrelationCase used for ensuring - * uniqueness of DataSource + * @param correlationCase the current CorrelationCase used for ensuring + * uniqueness of DataSource * @param dataSourceDeviceId the data source device ID number * * @return The data source @@ -387,13 +389,13 @@ final class SqliteEamDb extends AbstractSqlEamDb { releaseSharedLock(); } } - + /** * Retrieves Data Source details based on data source ID * - * @param correlationCase the current CorrelationCase used for ensuring - * uniqueness of DataSource - * @param dataSourceId the data source ID number + * @param correlationCase the current CorrelationCase used for ensuring + * uniqueness of DataSource + * @param dataSourceId the data source ID number * * @return The data source */ @@ -461,7 +463,7 @@ final class SqliteEamDb extends AbstractSqlEamDb { * Retrieves eamArtifact instances from the database that are associated * with the aType and filePath * - * @param aType EamArtifact.Type to search for + * @param aType EamArtifact.Type to search for * @param filePath File path to search for * * @return List of 0 or more EamArtifactInstances @@ -486,7 +488,8 @@ final class SqliteEamDb extends AbstractSqlEamDb { * @param value The value to search for * * @return Number of artifact instances having ArtifactType and - * ArtifactValue. + * ArtifactValue. + * * @throws EamDbException */ @Override @@ -518,6 +521,7 @@ final class SqliteEamDb extends AbstractSqlEamDb { * @param value The value to search for * * @return Number of unique tuples + * * @throws EamDbException */ @Override @@ -545,11 +549,11 @@ final class SqliteEamDb extends AbstractSqlEamDb { * associated with the caseDisplayName and dataSource of the given * eamArtifact instance. * - * @param caseUUID Case ID to search for + * @param caseUUID Case ID to search for * @param dataSourceID Data source ID to search for * * @return Number of artifact instances having caseDisplayName and - * dataSource + * dataSource */ @Override public Long getCountArtifactInstancesByCaseDataSource(String caseUUID, String dataSourceID) throws EamDbException { @@ -563,7 +567,7 @@ final class SqliteEamDb extends AbstractSqlEamDb { /** * Executes a bulk insert of the eamArtifacts added from the - addAttributeInstanceBulk() method + * addAttributeInstanceBulk() method */ @Override public void commitAttributeInstancesBulk() throws EamDbException { @@ -596,7 +600,7 @@ final class SqliteEamDb extends AbstractSqlEamDb { * * @param eamArtifact Artifact containing exactly one (1) ArtifactInstance. * @param knownStatus The status to change the artifact to. Should never be - * KNOWN + * KNOWN */ @Override public void setAttributeInstanceKnownStatus(CorrelationAttributeInstance eamArtifact, TskData.FileKnown knownStatus) throws EamDbException { @@ -633,7 +637,9 @@ final class SqliteEamDb extends AbstractSqlEamDb { * "Bad". * * @param aType EamArtifact.Type to search for + * * @return List with 0 or more matching eamArtifact instances. + * * @throws EamDbException */ @Override @@ -672,7 +678,7 @@ final class SqliteEamDb extends AbstractSqlEamDb { * @param value Value to search for * * @return List of cases containing this artifact with instances marked as - * bad + * bad * * @throws EamDbException */ @@ -690,6 +696,7 @@ final class SqliteEamDb extends AbstractSqlEamDb { * Remove a reference set and all values contained in it. * * @param referenceSetID + * * @throws EamDbException */ @Override @@ -708,6 +715,7 @@ final class SqliteEamDb extends AbstractSqlEamDb { * @param value * @param referenceSetID * @param correlationTypeID + * * @return true if the hash is found in the reference set */ @Override @@ -723,8 +731,9 @@ final class SqliteEamDb extends AbstractSqlEamDb { /** * Process the Artifact instance in the EamDb * - * @param type EamArtifact.Type to search for + * @param type EamArtifact.Type to search for * @param instanceTableCallback callback to process the instance + * * @throws EamDbException */ @Override @@ -736,12 +745,13 @@ final class SqliteEamDb extends AbstractSqlEamDb { releaseSharedLock(); } } - + /** * Process the Artifact instance in the EamDb * - * @param type EamArtifact.Type to search for + * @param type EamArtifact.Type to search for * @param instanceTableCallback callback to process the instance + * * @throws EamDbException */ @Override @@ -752,7 +762,7 @@ final class SqliteEamDb extends AbstractSqlEamDb { } finally { releaseSharedLock(); } - } + } /** * Check whether a reference set with the given name/version is in the @@ -761,7 +771,9 @@ final class SqliteEamDb extends AbstractSqlEamDb { * * @param referenceSetName * @param version + * * @return true if a matching set is found + * * @throws EamDbException */ @Override @@ -928,7 +940,8 @@ final class SqliteEamDb extends AbstractSqlEamDb { * Add a new reference instance * * @param eamGlobalFileInstance The reference instance to add - * @param correlationType Correlation Type that this Reference Instance is + * @param correlationType Correlation Type that this Reference + * Instance is * * @throws EamDbException */ @@ -960,7 +973,7 @@ final class SqliteEamDb extends AbstractSqlEamDb { /** * Get all reference entries having a given correlation type and value * - * @param aType Type to use for matching + * @param aType Type to use for matching * @param aValue Value to use for matching * * @return List of all global file instances with a type and value @@ -1001,7 +1014,7 @@ final class SqliteEamDb extends AbstractSqlEamDb { * artifacts. * * @return List of EamArtifact.Type's. If none are defined in the database, - * the default list will be returned. + * the default list will be returned. * * @throws EamDbException */ @@ -1020,7 +1033,7 @@ final class SqliteEamDb extends AbstractSqlEamDb { * artifacts. * * @return List of enabled EamArtifact.Type's. If none are defined in the - * database, the default list will be returned. + * database, the default list will be returned. * * @throws EamDbException */ @@ -1039,7 +1052,7 @@ final class SqliteEamDb extends AbstractSqlEamDb { * correlate artifacts. * * @return List of supported EamArtifact.Type's. If none are defined in the - * database, the default list will be returned. + * database, the default list will be returned. * * @throws EamDbException */ @@ -1111,8 +1124,9 @@ final class SqliteEamDb extends AbstractSqlEamDb { * (meaning the database is in use). * * @return the lock, or null if locking is not supported + * * @throws EamDbException if the coordination service is running but we fail - * to get the lock + * to get the lock */ @Override public CoordinationService.Lock getExclusiveMultiUserDbLock() throws EamDbException { @@ -1156,4 +1170,26 @@ final class SqliteEamDb extends AbstractSqlEamDb { rwLock.readLock().unlock(); } + @Override + boolean doesColumnExist(Connection conn, String tableName, String columnName) throws SQLException { + final String tableInfoQueryTemplate = "PRAGMA table_info(%s)"; //NON-NLS + ResultSet resultSet = null; + Statement statement = null; + boolean columnExists = false; + try { + statement = conn.createStatement(); + resultSet = statement.executeQuery(String.format(tableInfoQueryTemplate, tableName)); + while (resultSet.next()) { + // the second value ( 2 ) is the column name + if (resultSet.getString(2).equals(columnName)) { + columnExists = true; + break; + } + } + } finally { + EamDbUtil.closeResultSet(resultSet); + EamDbUtil.closeStatement(statement); + } + return columnExists; + } } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDbSettings.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDbSettings.java index 4894a570e6..8b034149fc 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDbSettings.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDbSettings.java @@ -35,8 +35,9 @@ import org.sleuthkit.autopsy.coreutils.PlatformUtil; /** * Settings for the sqlite implementation of the Central Repository database - * - * NOTE: This is public scope because the options panel calls it directly to set/get + * + * NOTE: This is public scope because the options panel calls it directly to + * set/get */ public final class SqliteEamDbSettings { @@ -95,7 +96,7 @@ public final class SqliteEamDbSettings { ModuleSettings.setConfigSetting("CentralRepository", "db.sqlite.dbDirectory", getDbDirectory()); // NON-NLS ModuleSettings.setConfigSetting("CentralRepository", "db.sqlite.bulkThreshold", Integer.toString(getBulkThreshold())); // NON-NLS } - + /** * Verify that the db file exists. * @@ -103,11 +104,11 @@ public final class SqliteEamDbSettings { */ public boolean dbFileExists() { File dbFile = new File(getFileNameWithPath()); - if(! dbFile.exists()){ + if (!dbFile.exists()) { return false; } // It's unlikely, but make sure the file isn't actually a directory - return ( ! dbFile.isDirectory()); + return (!dbFile.isDirectory()); } /** @@ -148,10 +149,11 @@ public final class SqliteEamDbSettings { return true; } - + /** * Delete the database - * @return + * + * @return */ public boolean deleteDatabase() { File dbFile = new File(this.getFileNameWithPath()); @@ -333,27 +335,14 @@ public final class SqliteEamDbSettings { createCorrelationTypesTable.append("CONSTRAINT correlation_types_names UNIQUE (display_name, db_table_name)"); createCorrelationTypesTable.append(")"); - // Each "%s" will be replaced with the relevant TYPE_instances table name. - StringBuilder createArtifactInstancesTableTemplate = new StringBuilder(); - createArtifactInstancesTableTemplate.append("CREATE TABLE IF NOT EXISTS %s ("); - createArtifactInstancesTableTemplate.append("id integer primary key autoincrement NOT NULL,"); - createArtifactInstancesTableTemplate.append("case_id integer NOT NULL,"); - createArtifactInstancesTableTemplate.append("data_source_id integer NOT NULL,"); - createArtifactInstancesTableTemplate.append("value text NOT NULL,"); - createArtifactInstancesTableTemplate.append("file_path text NOT NULL,"); - createArtifactInstancesTableTemplate.append("known_status integer NOT NULL,"); - createArtifactInstancesTableTemplate.append("comment text,"); - createArtifactInstancesTableTemplate.append("CONSTRAINT %s_multi_unique UNIQUE(data_source_id, value, file_path) ON CONFLICT IGNORE,"); - createArtifactInstancesTableTemplate.append("foreign key (case_id) references cases(id) ON UPDATE SET NULL ON DELETE SET NULL,"); - createArtifactInstancesTableTemplate.append("foreign key (data_source_id) references data_sources(id) ON UPDATE SET NULL ON DELETE SET NULL"); - createArtifactInstancesTableTemplate.append(")"); - - // Each "%s" will be replaced with the relevant TYPE_instances table name. - String instancesIdx1 = "CREATE INDEX IF NOT EXISTS %s_case_id ON %s (case_id)"; - String instancesIdx2 = "CREATE INDEX IF NOT EXISTS %s_data_source_id ON %s (data_source_id)"; - String instancesIdx3 = "CREATE INDEX IF NOT EXISTS %s_value ON %s (value)"; - String instancesIdx4 = "CREATE INDEX IF NOT EXISTS %s_value_known_status ON %s (value, known_status)"; + String createArtifactInstancesTableTemplate = getCreateArtifactInstancesTableTemplate(); + String instancesCaseIdIdx = getAddCaseIdIndexTemplate(); + String instancesDatasourceIdIdx = getAddDataSourceIdIndexTemplate(); + String instancesValueIdx = getAddValueIndexTemplate(); + String instancesKnownStatusIdx = getAddKnownStatusIndexTemplate(); + String instancesObjectIdIdx = getAddObjectIdIndexTemplate(); + StringBuilder createDbInfoTable = new StringBuilder(); createDbInfoTable.append("CREATE TABLE IF NOT EXISTS db_info ("); createDbInfoTable.append("id integer primary key NOT NULL,"); @@ -402,11 +391,12 @@ public final class SqliteEamDbSettings { reference_type_dbname = EamDbUtil.correlationTypeToReferenceTableName(type); instance_type_dbname = EamDbUtil.correlationTypeToInstanceTableName(type); - stmt.execute(String.format(createArtifactInstancesTableTemplate.toString(), instance_type_dbname, instance_type_dbname)); - stmt.execute(String.format(instancesIdx1, instance_type_dbname, instance_type_dbname)); - stmt.execute(String.format(instancesIdx2, instance_type_dbname, instance_type_dbname)); - stmt.execute(String.format(instancesIdx3, instance_type_dbname, instance_type_dbname)); - stmt.execute(String.format(instancesIdx4, instance_type_dbname, instance_type_dbname)); + stmt.execute(String.format(createArtifactInstancesTableTemplate, instance_type_dbname, instance_type_dbname)); + stmt.execute(String.format(instancesCaseIdIdx, instance_type_dbname, instance_type_dbname)); + stmt.execute(String.format(instancesDatasourceIdIdx, instance_type_dbname, instance_type_dbname)); + stmt.execute(String.format(instancesValueIdx, instance_type_dbname, instance_type_dbname)); + stmt.execute(String.format(instancesKnownStatusIdx, instance_type_dbname, instance_type_dbname)); + stmt.execute(String.format(instancesObjectIdIdx, instance_type_dbname, instance_type_dbname)); // FUTURE: allow more than the FILES type if (type.getId() == CorrelationAttributeInstance.FILES_TYPE_ID) { @@ -426,6 +416,97 @@ public final class SqliteEamDbSettings { } return true; } + + /** + * Get the template String for creating a new _instances table in a Sqlite + * central repository. %s will exist in the template where the name of the + * new table will be addedd. + * + * @return a String which is a template for cretating a new _instances table + */ + static String getCreateArtifactInstancesTableTemplate() { + // Each "%s" will be replaced with the relevant TYPE_instances table name. + StringBuilder createArtifactInstancesTableTemplate = new StringBuilder(); + createArtifactInstancesTableTemplate.append("CREATE TABLE IF NOT EXISTS %s ("); + createArtifactInstancesTableTemplate.append("id integer primary key autoincrement NOT NULL,"); + createArtifactInstancesTableTemplate.append("case_id integer NOT NULL,"); + createArtifactInstancesTableTemplate.append("data_source_id integer NOT NULL,"); + createArtifactInstancesTableTemplate.append("value text NOT NULL,"); + createArtifactInstancesTableTemplate.append("file_path text NOT NULL,"); + createArtifactInstancesTableTemplate.append("known_status integer NOT NULL,"); + createArtifactInstancesTableTemplate.append("comment text,"); + createArtifactInstancesTableTemplate.append("file_obj_id integer,"); + createArtifactInstancesTableTemplate.append("CONSTRAINT %s_multi_unique UNIQUE(data_source_id, value, file_path) ON CONFLICT IGNORE,"); + createArtifactInstancesTableTemplate.append("foreign key (case_id) references cases(id) ON UPDATE SET NULL ON DELETE SET NULL,"); + createArtifactInstancesTableTemplate.append("foreign key (data_source_id) references data_sources(id) ON UPDATE SET NULL ON DELETE SET NULL"); + createArtifactInstancesTableTemplate.append(")"); + return createArtifactInstancesTableTemplate.toString(); + } + + /** + * Get the template for creating an index on the case_id column of an + * instance table. %s will exist in the template where the name of the new + * table will be addedd. + * + * @return a String which is a template for adding an index to the case_id + * column of a _instances table + */ + static String getAddCaseIdIndexTemplate() { + // Each "%s" will be replaced with the relevant TYPE_instances table name. + return "CREATE INDEX IF NOT EXISTS %s_case_id ON %s (case_id)"; + } + + /** + * Get the template for creating an index on the data_source_id column of an + * instance table. %s will exist in the template where the name of the new + * table will be addedd. + * + * @return a String which is a template for adding an index to the + * data_source_id column of a _instances table + */ + static String getAddDataSourceIdIndexTemplate() { + // Each "%s" will be replaced with the relevant TYPE_instances table name. + return "CREATE INDEX IF NOT EXISTS %s_data_source_id ON %s (data_source_id)"; + } + + /** + * Get the template for creating an index on the value column of an instance + * table. %s will exist in the template where the name of the new table will + * be addedd. + * + * @return a String which is a template for adding an index to the value + * column of a _instances table + */ + static String getAddValueIndexTemplate() { + // Each "%s" will be replaced with the relevant TYPE_instances table name. + return "CREATE INDEX IF NOT EXISTS %s_value ON %s (value)"; + } + + /** + * Get the template for creating an index on the known_status column of an + * instance table. %s will exist in the template where the name of the new + * table will be addedd. + * + * @return a String which is a template for adding an index to the + * known_status column of a _instances table + */ + static String getAddKnownStatusIndexTemplate() { + // Each "%s" will be replaced with the relevant TYPE_instances table name. + return "CREATE INDEX IF NOT EXISTS %s_value_known_status ON %s (value, known_status)"; + } + + /** + * Get the template for creating an index on the file_obj_id column of an + * instance table. %s will exist in the template where the name of the new + * table will be addedd. + * + * @return a String which is a template for adding an index to the file_obj_id + * column of a _instances table + */ + static String getAddObjectIdIndexTemplate() { + // Each "%s" will be replaced with the relevant TYPE_instances table name. + return "CREATE INDEX IF NOT EXISTS %s_file_obj_id ON %s (file_obj_id)"; + } public boolean insertDefaultDatabaseContent() { Connection conn = getEphemeralConnection(); @@ -490,8 +571,6 @@ public final class SqliteEamDbSettings { } } - - /** * @return the dbDirectory */ diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/IngestEventsListener.java b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/IngestEventsListener.java index cdfc282b7e..34436380e2 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/IngestEventsListener.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/IngestEventsListener.java @@ -183,7 +183,10 @@ public class IngestEventsListener { @Override public void propertyChange(PropertyChangeEvent evt) { - if (getCeModuleInstanceCount() > 0) { + //if ingest is running we want there to check if there is a Correlation Engine module running + //sometimes artifacts are generated by DSPs or other sources while ingest is not running + //in these cases we still want to create correlation attributes for those artifacts when appropriate + if (!IngestManager.getInstance().isIngestRunning() || getCeModuleInstanceCount() > 0) { EamDb dbManager; try { dbManager = EamDb.getInstance(); @@ -193,7 +196,9 @@ public class IngestEventsListener { } switch (IngestManager.IngestModuleEvent.valueOf(evt.getPropertyName())) { case DATA_ADDED: { - jobProcessingExecutor.submit(new DataAddedTask(dbManager, evt, isFlagNotableItems())); + //if ingest isn't running create the interesting items otherwise use the ingest module setting to determine if we create interesting items + boolean flagNotable = !IngestManager.getInstance().isIngestRunning() || isFlagNotableItems(); + jobProcessingExecutor.submit(new DataAddedTask(dbManager, evt, flagNotable)); break; } } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/IngestModule.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/IngestModule.java index 05fb3e4c8c..e7894c7916 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/IngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/IngestModule.java @@ -70,7 +70,6 @@ final class IngestModule implements FileIngestModule { private CorrelationDataSource eamDataSource; private Blackboard blackboard; private CorrelationAttributeInstance.Type filesType; - private final boolean flagTaggedNotableItems; /** @@ -152,14 +151,14 @@ final class IngestModule implements FileIngestModule { // insert this file into the central repository try { CorrelationAttributeInstance cefi = new CorrelationAttributeInstance( + filesType, md5, - filesType, eamCase, eamDataSource, abstractFile.getParentPath() + abstractFile.getName(), null, TskData.FileKnown.UNKNOWN // NOTE: Known status in the CR is based on tagging, not hashes like the Case Database. - ); +, abstractFile.getId()); dbManager.addAttributeInstanceBulk(cefi); } catch (EamDbException ex) { logger.log(Level.SEVERE, "Error adding artifact to bulk artifacts.", ex); // NON-NLS diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/Bundle.properties b/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/Bundle.properties index 0fc1951593..aae9fa321d 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/Bundle.properties +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/Bundle.properties @@ -34,7 +34,7 @@ AddNewOrganizationDialog.bnOK.text=OK AddNewOrganizationDialog.tfName.tooltip=POC Name ManageTagsDialog.okButton.text=OK ManageTagsDialog.cancelButton.text=Cancel -ManageArtifactTypesDialog.taInstructionsMsg.text=Enable one or more correlation properties to use for correlation during ingest. Note, these properties are global and impact all users of the central repository. +ManageArtifactTypesDialog.taInstructionsMsg.text=Enable one or more correlation properties to use for correlation during ingest. Note, these properties are global and impact all users of the Central Repository. EamSqliteSettingsDialog.bnOk.text=OK EamPostgresSettingsDialog.bnSave.text=Save EamDbSettingsDialog.bnDatabasePathFileOpen.text=Browse... @@ -58,11 +58,10 @@ ManageCorrelationPropertiesDialog.okButton.text=OK GlobalSettingsPanel.bnManageProperties.text=Manage Correlation Properties EamDbSettingsDialog.lbDatabaseDesc.text=Database File: EamDbSettingsDialog.lbFullDbPath.text= -GlobalSettingsPanel.cbUseCentralRepo.text=Use a central repository -GlobalSettingsPanel.correlationPropertiesTextArea.text=Choose which file and result properties to store in the central repository for later correlation.\n -GlobalSettingsPanel.organizationTextArea.text=Organization information can be tracked in the central repository. +GlobalSettingsPanel.cbUseCentralRepo.text=Use a Central Repository +GlobalSettingsPanel.organizationTextArea.text=Organization information can be tracked in the Central Repository. GlobalSettingsPanel.manageOrganizationButton.text=Manage Organizations -GlobalSettingsPanel.lbCentralRepository.text=A central repository allows you to correlate files and results between cases. +GlobalSettingsPanel.lbCentralRepository.text=A Central Repository allows you to correlate files and results between cases. GlobalSettingsPanel.pnCorrelationProperties.border.title=Correlation Properties GlobalSettingsPanel.organizationPanel.border.title=Organizations GlobalSettingsPanel.casesPanel.border.title=Case Details @@ -74,8 +73,9 @@ ShowCasesDialog.caseDetailsTable.toolTipText=Click column name to sort. Right-cl ShowCasesDialog.title=Case Details GlobalSettingsPanel.Case\ Details.AccessibleContext.accessibleName=Cases Details ShowCasesDialog.caseDetailsTable.AccessibleContext.accessibleDescription=Click column name to sort. -GlobalSettingsPanel.casesTextArea.text=Display table that lists central repository case details. -GlobalSettingsPanel.ingestRunningWarningLabel.text=Cannot make changes to central repository settings when ingest is running! +GlobalSettingsPanel.casesTextArea.text=Display table that lists Central Repository case details. +GlobalSettingsPanel.ingestRunningWarningLabel.text=Cannot make changes to Central Repository settings when ingest is running! +GlobalSettingsPanel.correlationPropertiesTextArea.text=Choose which file and result properties to store in the Central Repository for later correlation.\n ManageCasesDialog.examinerPhoneLabel.text=Examiner Phone: ManageCasesDialog.examinerNameLabel.text=Examiner Name: ManageCasesDialog.examinerEmailLabel.text=Examiner Email: @@ -84,4 +84,4 @@ ManageCasesDialog.orgLabel.text=Organization: ManageCasesDialog.closeButton.text=Close ManageCasesDialog.notesLabel.text=Notes: ManageCasesDialog.dataSourcesLabel.text=Data Sources: -ManageCasesDialog.caseInfoLabel.text=Case Info: +ManageCasesDialog.caseInfoLabel.text=Case Info: \ No newline at end of file diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/EamDbSettingsDialog.java b/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/EamDbSettingsDialog.java index c504f666e4..826e66ecbc 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/EamDbSettingsDialog.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/EamDbSettingsDialog.java @@ -102,7 +102,7 @@ public class EamDbSettingsDialog extends JDialog { @Override public String getDescription() { - return "Directories and central repository databases"; + return "Directories and Central Repository databases"; } }); cbDatabaseType.setSelectedItem(selectedPlatform); diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/GlobalSettingsPanel.form b/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/GlobalSettingsPanel.form index c3a8f678d6..0a8c7dcc64 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/GlobalSettingsPanel.form +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/GlobalSettingsPanel.form @@ -57,7 +57,7 @@ - + @@ -67,7 +67,7 @@ - + @@ -89,13 +89,13 @@ - + - + - + - + @@ -242,14 +242,14 @@ - - + + - - + + @@ -258,8 +258,7 @@ - - + @@ -301,7 +300,7 @@ - + @@ -334,7 +333,7 @@ - + diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/GlobalSettingsPanel.java b/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/GlobalSettingsPanel.java index e24fe70ea8..a4d6ef5cc0 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/GlobalSettingsPanel.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/GlobalSettingsPanel.java @@ -222,7 +222,7 @@ public final class GlobalSettingsPanel extends IngestModuleGlobalSettingsPanel i correlationPropertiesTextArea.setColumns(20); correlationPropertiesTextArea.setFont(new java.awt.Font("Tahoma", 0, 11)); // NOI18N correlationPropertiesTextArea.setLineWrap(true); - correlationPropertiesTextArea.setRows(2); + correlationPropertiesTextArea.setRows(1); correlationPropertiesTextArea.setText(org.openide.util.NbBundle.getMessage(GlobalSettingsPanel.class, "GlobalSettingsPanel.correlationPropertiesTextArea.text")); // NOI18N correlationPropertiesTextArea.setToolTipText(""); correlationPropertiesTextArea.setWrapStyleWord(true); @@ -236,17 +236,16 @@ public final class GlobalSettingsPanel extends IngestModuleGlobalSettingsPanel i .addGroup(pnCorrelationPropertiesLayout.createSequentialGroup() .addContainerGap() .addGroup(pnCorrelationPropertiesLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(correlationPropertiesScrollPane) .addGroup(pnCorrelationPropertiesLayout.createSequentialGroup() .addComponent(bnManageTypes) - .addGap(0, 0, Short.MAX_VALUE))) + .addGap(0, 0, Short.MAX_VALUE)) + .addComponent(correlationPropertiesScrollPane)) .addContainerGap()) ); pnCorrelationPropertiesLayout.setVerticalGroup( pnCorrelationPropertiesLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, pnCorrelationPropertiesLayout.createSequentialGroup() - .addGap(7, 7, 7) - .addComponent(correlationPropertiesScrollPane, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) + .addComponent(correlationPropertiesScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, 32, Short.MAX_VALUE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(bnManageTypes) .addGap(8, 8, 8)) @@ -281,7 +280,7 @@ public final class GlobalSettingsPanel extends IngestModuleGlobalSettingsPanel i .addGroup(organizationPanelLayout.createSequentialGroup() .addContainerGap() .addGroup(organizationPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(organizationScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, 992, Short.MAX_VALUE) + .addComponent(organizationScrollPane) .addGroup(organizationPanelLayout.createSequentialGroup() .addComponent(manageOrganizationButton) .addGap(0, 0, Short.MAX_VALUE))) @@ -356,7 +355,7 @@ public final class GlobalSettingsPanel extends IngestModuleGlobalSettingsPanel i jPanel1.setLayout(jPanel1Layout); jPanel1Layout.setHorizontalGroup( jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(lbCentralRepository, javax.swing.GroupLayout.DEFAULT_SIZE, 1022, Short.MAX_VALUE) + .addComponent(lbCentralRepository, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addGroup(jPanel1Layout.createSequentialGroup() .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(pnDatabaseConfiguration, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) @@ -366,7 +365,7 @@ public final class GlobalSettingsPanel extends IngestModuleGlobalSettingsPanel i .addGroup(jPanel1Layout.createSequentialGroup() .addComponent(cbUseCentralRepo, javax.swing.GroupLayout.PREFERRED_SIZE, 162, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) - .addComponent(ingestRunningWarningLabel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) + .addComponent(ingestRunningWarningLabel, javax.swing.GroupLayout.DEFAULT_SIZE, 844, Short.MAX_VALUE)) .addGroup(jPanel1Layout.createSequentialGroup() .addContainerGap() .addComponent(tbOops, javax.swing.GroupLayout.PREFERRED_SIZE, 974, javax.swing.GroupLayout.PREFERRED_SIZE))) @@ -382,13 +381,13 @@ public final class GlobalSettingsPanel extends IngestModuleGlobalSettingsPanel i .addComponent(ingestRunningWarningLabel)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(pnDatabaseConfiguration, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addGap(0, 0, 0) .addComponent(pnCorrelationProperties, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addGap(0, 0, 0) .addComponent(organizationPanel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addGap(0, 0, 0) .addComponent(casesPanel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addGap(0, 0, 0) .addComponent(tbOops, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addContainerGap()) ); diff --git a/Core/src/org/sleuthkit/autopsy/commonfilesearch/CaseDBCommonAttributeInstanceNode.java b/Core/src/org/sleuthkit/autopsy/commonfilesearch/CaseDBCommonAttributeInstanceNode.java index b4f8ee525e..504aa45b97 100644 --- a/Core/src/org/sleuthkit/autopsy/commonfilesearch/CaseDBCommonAttributeInstanceNode.java +++ b/Core/src/org/sleuthkit/autopsy/commonfilesearch/CaseDBCommonAttributeInstanceNode.java @@ -18,17 +18,16 @@ */ package org.sleuthkit.autopsy.commonfilesearch; -import java.util.List; -import org.apache.commons.lang3.StringUtils; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; import org.openide.nodes.Sheet; -import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance; -import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbUtil; -import org.sleuthkit.autopsy.core.UserPreferences; +import org.openide.util.NbBundle; +import org.sleuthkit.autopsy.datamodel.AbstractAbstractFileNode; import org.sleuthkit.autopsy.datamodel.DisplayableItemNodeVisitor; import org.sleuthkit.autopsy.datamodel.FileNode; import org.sleuthkit.autopsy.datamodel.NodeProperty; import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.ContentTag; /** * Node that wraps CaseDBCommonAttributeInstance to represent a file instance @@ -75,33 +74,25 @@ public class CaseDBCommonAttributeInstanceNode extends FileNode { @Override protected Sheet createSheet() { - Sheet sheet = new Sheet(); + Sheet sheet = super.createSheet(); Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES); - if (sheetSet == null) { - sheetSet = Sheet.createPropertiesSet(); - sheet.put(sheetSet); + Set keepProps = new HashSet<>(Arrays.asList( + NbBundle.getMessage(AbstractAbstractFileNode.class, "AbstractAbstractFileNode.nameColLbl"), + NbBundle.getMessage(AbstractAbstractFileNode.class, "AbstractAbstractFileNode.createSheet.score.name"), + NbBundle.getMessage(AbstractAbstractFileNode.class, "AbstractAbstractFileNode.createSheet.comment.name"), + NbBundle.getMessage(AbstractAbstractFileNode.class, "AbstractAbstractFileNode.createSheet.count.name"), + NbBundle.getMessage(AbstractAbstractFileNode.class, "AbstractAbstractFileNode.mimeType"))); + + for(Property p : sheetSet.getProperties()) { + if(!keepProps.contains(p.getName())){ + sheetSet.remove(p.getName()); + } } - List tags = getContentTagsFromDatabase(); - final String NO_DESCR = Bundle.CommonFilesSearchResultsViewerTable_noDescText(); - - sheetSet.put(new NodeProperty<>(Bundle.CommonFilesSearchResultsViewerTable_filesColLbl(), Bundle.CommonFilesSearchResultsViewerTable_filesColLbl(), NO_DESCR, this.getContent().getName())); - addScoreProperty(sheetSet, tags); - - CorrelationAttributeInstance correlationAttribute = null; - if (EamDbUtil.useCentralRepo() && UserPreferences.hideCentralRepoCommentsAndOccurrences()== false) { - correlationAttribute = getCorrelationAttributeInstance(); - } - addCommentProperty(sheetSet, tags, correlationAttribute); - - if (EamDbUtil.useCentralRepo() && UserPreferences.hideCentralRepoCommentsAndOccurrences()== false) { - addCountProperty(sheetSet, correlationAttribute); - } - sheetSet.put(new NodeProperty<>(Bundle.CommonFilesSearchResultsViewerTable_pathColLbl(), Bundle.CommonFilesSearchResultsViewerTable_pathColLbl(), NO_DESCR, this.getContent().getParentPath())); sheetSet.put(new NodeProperty<>(Bundle.CommonFilesSearchResultsViewerTable_dataSourceColLbl(), Bundle.CommonFilesSearchResultsViewerTable_dataSourceColLbl(), NO_DESCR, this.getDataSource())); - sheetSet.put(new NodeProperty<>(Bundle.CommonFilesSearchResultsViewerTable_mimeTypeColLbl(), Bundle.CommonFilesSearchResultsViewerTable_mimeTypeColLbl(), NO_DESCR, StringUtils.defaultString(this.getContent().getMIMEType()))); sheetSet.put(new NodeProperty<>(Bundle.CommonFilesSearchResultsViewerTable_caseColLbl1(), Bundle.CommonFilesSearchResultsViewerTable_caseColLbl1(), NO_DESCR, caseName)); + return sheet; } } \ No newline at end of file diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/AnnotationsContentViewer.java b/Core/src/org/sleuthkit/autopsy/contentviewers/AnnotationsContentViewer.java index d88b5ac2fb..72343ec616 100755 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/AnnotationsContentViewer.java +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/AnnotationsContentViewer.java @@ -67,19 +67,19 @@ public class AnnotationsContentViewer extends javax.swing.JPanel implements Data initComponents(); Utilities.configureTextPaneAsHtml(jTextPane1); } - + @Override public void setNode(Node node) { if ((node == null) || (!isSupported(node))) { resetComponent(); return; } - + StringBuilder html = new StringBuilder(); - + BlackboardArtifact artifact = node.getLookup().lookup(BlackboardArtifact.class); Content sourceFile = null; - + try { if (artifact != null) { /* @@ -100,32 +100,32 @@ public class AnnotationsContentViewer extends javax.swing.JPanel implements Data "Exception while trying to retrieve a Content instance from the BlackboardArtifact '%s' (id=%d).", artifact.getDisplayName(), artifact.getArtifactID()), ex); } - + if (artifact != null) { populateTagData(html, artifact, sourceFile); } else { populateTagData(html, sourceFile); } - + if (sourceFile instanceof AbstractFile) { populateCentralRepositoryData(html, artifact, (AbstractFile) sourceFile); } - + setText(html.toString()); jTextPane1.setCaretPosition(0); } - + /** * Populate the "Selected Item" sections with tag data for the supplied * content. - * + * * @param html The HTML text to update. * @param content Selected content. */ private void populateTagData(StringBuilder html, Content content) { try { SleuthkitCase tskCase = Case.getCurrentCaseThrows().getSleuthkitCase(); - + startSection(html, "Selected Item"); List fileTagsList = tskCase.getContentTagsByContent(content); if (fileTagsList.isEmpty()) { @@ -142,11 +142,11 @@ public class AnnotationsContentViewer extends javax.swing.JPanel implements Data logger.log(Level.SEVERE, "Exception while getting tags from the case database.", ex); //NON-NLS } } - + /** * Populate the "Selected Item" and "Source File" sections with tag data for * a supplied artifact. - * + * * @param html The HTML text to update. * @param artifact A selected artifact. * @param sourceFile The source content of the selected artifact. @@ -154,7 +154,7 @@ public class AnnotationsContentViewer extends javax.swing.JPanel implements Data private void populateTagData(StringBuilder html, BlackboardArtifact artifact, Content sourceFile) { try { SleuthkitCase tskCase = Case.getCurrentCaseThrows().getSleuthkitCase(); - + startSection(html, "Selected Item"); List artifactTagsList = tskCase.getBlackboardArtifactTagsByArtifact(artifact); if (artifactTagsList.isEmpty()) { @@ -165,7 +165,7 @@ public class AnnotationsContentViewer extends javax.swing.JPanel implements Data } } endSection(html); - + if (sourceFile != null) { startSection(html, "Source File"); List fileTagsList = tskCase.getContentTagsByContent(sourceFile); @@ -184,10 +184,10 @@ public class AnnotationsContentViewer extends javax.swing.JPanel implements Data logger.log(Level.SEVERE, "Exception while getting tags from the case database.", ex); //NON-NLS } } - + /** * Populate the "Central Repository Comments" section with data. - * + * * @param html The HTML text to update. * @param artifact A selected artifact (can be null). * @param sourceFile A selected file, or a source file of the selected @@ -208,23 +208,24 @@ public class AnnotationsContentViewer extends javax.swing.JPanel implements Data if (attributeType.getId() == CorrelationAttributeInstance.FILES_TYPE_ID) { CorrelationCase correlationCase = EamDb.getInstance().getCase(Case.getCurrentCase()); instancesList.add(new CorrelationAttributeInstance( - md5, attributeType, + md5, correlationCase, CorrelationDataSource.fromTSKDataSource(correlationCase, sourceFile.getDataSource()), sourceFile.getParentPath() + sourceFile.getName(), "", - sourceFile.getKnown())); + sourceFile.getKnown(), + sourceFile.getId())); break; } } } boolean commentDataFound = false; - + for (CorrelationAttributeInstance instance : instancesList) { - List correlatedInstancesList = - EamDb.getInstance().getArtifactInstancesByTypeValue(instance.getCorrelationType(), instance.getCorrelationValue()); + List correlatedInstancesList + = EamDb.getInstance().getArtifactInstancesByTypeValue(instance.getCorrelationType(), instance.getCorrelationValue()); for (CorrelationAttributeInstance correlatedInstance : correlatedInstancesList) { if (correlatedInstance.getComment() != null && correlatedInstance.getComment().isEmpty() == false) { commentDataFound = true; @@ -232,7 +233,7 @@ public class AnnotationsContentViewer extends javax.swing.JPanel implements Data } } } - + if (commentDataFound == false) { addMessage(html, "There is no comment data for the selected content in the Central Repository."); } @@ -247,16 +248,16 @@ public class AnnotationsContentViewer extends javax.swing.JPanel implements Data /** * Set the text of the text panel. - * + * * @param text The text to set to the text panel. */ private void setText(String text) { jTextPane1.setText("" + text + ""); //NON-NLS } - + /** * Start a new data section. - * + * * @param html The HTML text to add the section to. * @param sectionName The name of the section. */ @@ -265,10 +266,10 @@ public class AnnotationsContentViewer extends javax.swing.JPanel implements Data .append(sectionName) .append("


"); //NON-NLS } - + /** * Add a message. - * + * * @param html The HTML text to add the message to. * @param message The message text. */ @@ -277,10 +278,10 @@ public class AnnotationsContentViewer extends javax.swing.JPanel implements Data .append(message) .append("


"); //NON-NLS } - + /** * Add a data table containing information about a tag. - * + * * @param html The HTML text to add the table to. * @param tag The tag whose information will be used to populate the table. */ @@ -296,11 +297,11 @@ public class AnnotationsContentViewer extends javax.swing.JPanel implements Data addRow(html, Bundle.AnnotationsContentViewer_tagEntryDataLabel_comment(), formatHtmlString(tag.getComment())); endTable(html); } - + /** * Add a data table containing information about a correlation attribute * instance in the Central Repository. - * + * * @param html The HTML text to add the table to. * @param attributeInstance The attribute instance whose information will be * used to populate the table. @@ -319,10 +320,10 @@ public class AnnotationsContentViewer extends javax.swing.JPanel implements Data addRow(html, Bundle.AnnotationsContentViewer_centralRepositoryEntryDataLabel_path(), attributeInstance.getFilePath()); endTable(html); } - + /** * Start a data table. - * + * * @param html The HTML text to add the table to. */ private void startTable(StringBuilder html) { @@ -331,7 +332,7 @@ public class AnnotationsContentViewer extends javax.swing.JPanel implements Data /** * Add a data row to a table. - * + * * @param html The HTML text to add the row to. * @param key The key for the left column of the data row. * @param value The value for the right column of the data row. @@ -343,10 +344,10 @@ public class AnnotationsContentViewer extends javax.swing.JPanel implements Data html.append(value); html.append(""); //NON-NLS } - + /** * End a data table. - * + * * @param html The HTML text on which to end a table. */ private void endTable(StringBuilder html) { @@ -355,18 +356,19 @@ public class AnnotationsContentViewer extends javax.swing.JPanel implements Data /** * End a data section. - * + * * @param html The HTML text on which to end a section. */ private void endSection(StringBuilder html) { html.append("
"); //NON-NLS } - + /** * Apply escape sequence to special characters. Line feed and carriage * return character combinations will be converted to HTML line breaks. - * + * * @param text The text to format. + * * @return The formatted text. */ private String formatHtmlString(String text) { @@ -428,7 +430,7 @@ public class AnnotationsContentViewer extends javax.swing.JPanel implements Data @Override public boolean isSupported(Node node) { BlackboardArtifact artifact = node.getLookup().lookup(BlackboardArtifact.class); - + try { if (artifact != null) { if (artifact.getSleuthkitCase().getAbstractFileById(artifact.getObjectID()) != null) { @@ -444,7 +446,7 @@ public class AnnotationsContentViewer extends javax.swing.JPanel implements Data "Exception while trying to retrieve a Content instance from the BlackboardArtifact '%s' (id=%d).", artifact.getDisplayName(), artifact.getArtifactID()), ex); } - + return false; } diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/MessageContentViewer.java b/Core/src/org/sleuthkit/autopsy/contentviewers/MessageContentViewer.java index e1565c3b89..bf319bde0c 100644 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/MessageContentViewer.java +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/MessageContentViewer.java @@ -20,6 +20,7 @@ package org.sleuthkit.autopsy.contentviewers; import java.awt.Component; import java.util.Arrays; +import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.Set; @@ -43,6 +44,7 @@ import org.sleuthkit.autopsy.corecomponentinterfaces.DataContentViewer; import org.sleuthkit.autopsy.corecomponents.DataResultPanel; import org.sleuthkit.autopsy.corecomponents.TableFilterNode; import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.datamodel.AbstractAbstractFileNode; import org.sleuthkit.autopsy.datamodel.FileNode; import org.sleuthkit.autopsy.datamodel.NodeProperty; import org.sleuthkit.autopsy.directorytree.DataResultFilterNode; @@ -721,31 +723,23 @@ public class MessageContentViewer extends javax.swing.JPanel implements DataCont @Override protected Sheet createSheet() { - Sheet sheet = new Sheet(); + Sheet sheet = super.createSheet(); + Set keepProps = new HashSet<>(Arrays.asList( + NbBundle.getMessage(AbstractAbstractFileNode.class, "AbstractAbstractFileNode.nameColLbl"), + NbBundle.getMessage(AbstractAbstractFileNode.class, "AbstractAbstractFileNode.createSheet.score.name"), + NbBundle.getMessage(AbstractAbstractFileNode.class, "AbstractAbstractFileNode.createSheet.comment.name"), + NbBundle.getMessage(AbstractAbstractFileNode.class, "AbstractAbstractFileNode.createSheet.count.name"), + NbBundle.getMessage(AbstractAbstractFileNode.class, "AbstractAbstractFileNode.sizeColLbl"), + NbBundle.getMessage(AbstractAbstractFileNode.class, "AbstractAbstractFileNode.mimeType"), + NbBundle.getMessage(AbstractAbstractFileNode.class, "AbstractAbstractFileNode.knownColLbl"))); + + //Remove all other props except for the ones above Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES); - if (sheetSet == null) { - sheetSet = Sheet.createPropertiesSet(); - sheet.put(sheetSet); + for(Property p : sheetSet.getProperties()) { + if(!keepProps.contains(p.getName())){ + sheetSet.remove(p.getName()); + } } - List tags = getContentTagsFromDatabase(); - - AbstractFile file = getContent(); - sheetSet.put(new NodeProperty<>("Name", "Name", "Name", file.getName())); - - addScoreProperty(sheetSet, tags); - - CorrelationAttributeInstance correlationAttribute = null; - if (EamDbUtil.useCentralRepo() && UserPreferences.hideCentralRepoCommentsAndOccurrences()== false) { - correlationAttribute = getCorrelationAttributeInstance(); - } - addCommentProperty(sheetSet, tags, correlationAttribute); - - if (EamDbUtil.useCentralRepo() && UserPreferences.hideCentralRepoCommentsAndOccurrences()== false) { - addCountProperty(sheetSet, correlationAttribute); - } - sheetSet.put(new NodeProperty<>("Size", "Size", "Size", file.getSize())); - sheetSet.put(new NodeProperty<>("Mime Type", "Mime Type", "Mime Type", StringUtils.defaultString(file.getMIMEType()))); - sheetSet.put(new NodeProperty<>("Known", "Known", "Known", file.getKnown().getName())); return sheet; } diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/PListViewer.java b/Core/src/org/sleuthkit/autopsy/contentviewers/PListViewer.java index 506084f0c3..dd73b6f6ca 100644 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/PListViewer.java +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/PListViewer.java @@ -74,7 +74,7 @@ class PListViewer extends javax.swing.JPanel implements FileTypeViewer, Explorer private final Outline outline; private ExplorerManager explorerManager; - private NSDictionary rootDict; + private NSObject rootDict; /** * Creates new form PListViewer @@ -415,22 +415,35 @@ class PListViewer extends javax.swing.JPanel implements FileTypeViewer, Explorer } /** - * Parses given binary stream and extracts Plist key/value + * Parses given binary stream and extracts Plist key/value. * - * @param plistbytes + * @param plistbytes The byte array containing the Plist data. * * @return list of PropKeyValue */ private List parsePList(final byte[] plistbytes) throws IOException, PropertyListFormatException, ParseException, ParserConfigurationException, SAXException { final List plist = new ArrayList<>(); - rootDict = (NSDictionary) PropertyListParser.parse(plistbytes); + rootDict = PropertyListParser.parse(plistbytes); - final String[] keys = rootDict.allKeys(); - for (final String key : keys) { - final PropKeyValue pkv = parseProperty(key, rootDict.objectForKey(key)); - if (null != pkv) { - plist.add(pkv); + /* + * Parse the data if the root is an NSArray or NSDictionary. Anything + * else is unexpected and will be ignored. + */ + if (rootDict instanceof NSArray) { + for (int i=0; i < ((NSArray)rootDict).count(); i++) { + final PropKeyValue pkv = parseProperty("", ((NSArray)rootDict).objectAtIndex(i)); + if (null != pkv) { + plist.add(pkv); + } + } + } else if (rootDict instanceof NSDictionary) { + final String[] keys = ((NSDictionary)rootDict).allKeys(); + for (final String key : keys) { + final PropKeyValue pkv = parseProperty(key, ((NSDictionary)rootDict).objectForKey(key)); + if (null != pkv) { + plist.add(pkv); + } } } diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/SQLiteViewer.java b/Core/src/org/sleuthkit/autopsy/contentviewers/SQLiteViewer.java index f2be8c90c4..f2db7c2532 100755 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/SQLiteViewer.java +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/SQLiteViewer.java @@ -24,21 +24,15 @@ import java.awt.Cursor; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.ResultSet; -import java.sql.ResultSetMetaData; -import java.sql.SQLException; -import java.sql.Statement; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.LinkedHashMap; -import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.function.Consumer; import java.util.logging.Level; import javax.swing.JComboBox; import javax.swing.JFileChooser; @@ -48,11 +42,11 @@ import org.apache.commons.io.FilenameUtils; import org.openide.util.NbBundle; import org.openide.windows.WindowManager; import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; +import org.sleuthkit.autopsy.coreutils.SQLiteTableReaderException; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.autopsy.coreutils.SQLiteTableReader; /** * A file content viewer for SQLite database files. @@ -66,8 +60,14 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer { private static final Logger logger = Logger.getLogger(FileViewer.class.getName()); private final SQLiteTableView selectedTableView = new SQLiteTableView(); private AbstractFile sqliteDbFile; - private File tmpDbFile; - private Connection connection; + + private SQLiteTableReader viewReader; + + private Map row = new LinkedHashMap<>(); + private List> pageOfTableRows = new ArrayList<>(); + private List currentTableHeader = new ArrayList<>(); + private String prevTableName; + private int numRows; // num of rows in the selected table private int currPage = 0; // curr page of rows being displayed @@ -264,18 +264,18 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer { }//GEN-LAST:event_tablesDropdownListActionPerformed /** - * The action when the Export Csv button is pressed. The file chooser window will pop - * up to choose where the user wants to save the csv file. The default location is case export directory. + * The action when the Export Csv button is pressed. The file chooser window + * will pop up to choose where the user wants to save the csv file. The + * default location is case export directory. * * @param evt the action event */ - @NbBundle.Messages({"SQLiteViewer.csvExport.fileName.empty=Please input a file name for exporting.", - "SQLiteViewer.csvExport.title=Export to csv file", - "SQLiteViewer.csvExport.confirm.msg=Do you want to overwrite the existing file?"}) + "SQLiteViewer.csvExport.title=Export to csv file", + "SQLiteViewer.csvExport.confirm.msg=Do you want to overwrite the existing file?"}) private void exportCsvButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_exportCsvButtonActionPerformed Case openCase = Case.getCurrentCase(); - File caseDirectory = new File(openCase.getExportDirectory()); + File caseDirectory = new File(openCase.getExportDirectory()); JFileChooser fileChooser = new JFileChooser(); fileChooser.setDragEnabled(false); fileChooser.setCurrentDirectory(caseDirectory); @@ -292,14 +292,14 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer { File file = fileChooser.getSelectedFile(); if (file.exists() && FilenameUtils.getExtension(file.getName()).equalsIgnoreCase("csv")) { if (JOptionPane.YES_OPTION == JOptionPane.showConfirmDialog(this, - Bundle.SQLiteViewer_csvExport_confirm_msg(), - Bundle.SQLiteViewer_csvExport_title(), + Bundle.SQLiteViewer_csvExport_confirm_msg(), + Bundle.SQLiteViewer_csvExport_title(), JOptionPane.YES_NO_OPTION)) { } else { return; - } + } } - + exportTableToCsv(file); } }//GEN-LAST:event_exportCsvButtonActionPerformed @@ -328,6 +328,7 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer { public void setFile(AbstractFile file) { WindowManager.getDefault().getMainWindow().setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); sqliteDbFile = file; + initReader(); processSQLiteFile(); WindowManager.getDefault().getMainWindow().setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } @@ -343,16 +344,15 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer { tablesDropdownList.removeAllItems(); numEntriesField.setText(""); - // close DB connection to file - if (null != connection) { - try { - connection.close(); - connection = null; - } catch (SQLException ex) { - logger.log(Level.SEVERE, "Failed to close DB connection to file.", ex); //NON-NLS - } + try { + viewReader.close(); + } catch (SQLiteTableReaderException ex) { + //Could not successfully close the reader, nothing we can do to recover. } - + row = new LinkedHashMap<>(); + pageOfTableRows = new ArrayList<>(); + currentTableHeader = new ArrayList<>(); + viewReader = null; sqliteDbFile = null; } @@ -368,17 +368,10 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer { "SQLiteViewer.errorMessage.failedToinitJDBCDriver=The JDBC driver for SQLite could not be loaded.", "# {0} - exception message", "SQLiteViewer.errorMessage.unexpectedError=An unexpected error occurred:\n{0).",}) private void processSQLiteFile() { - - tablesDropdownList.removeAllItems(); - try { - String localDiskPath = SqliteUtil.writeAbstractFileToLocalDisk(sqliteDbFile); - SqliteUtil.findAndCopySQLiteMetaFile(sqliteDbFile); - // Load the SQLite JDBC driver, if necessary. - Class.forName("org.sqlite.JDBC"); //NON-NLS - connection = DriverManager.getConnection("jdbc:sqlite:" + localDiskPath); //NON-NLS + tablesDropdownList.removeAllItems(); - Collection dbTablesMap = getTables(); + Collection dbTablesMap = viewReader.getTableNames(); if (dbTablesMap.isEmpty()) { tablesDropdownList.addItem(Bundle.SQLiteViewer_comboBox_noTableEntry()); tablesDropdownList.setEnabled(false); @@ -387,46 +380,20 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer { tablesDropdownList.addItem(tableName); }); } - } catch (ClassNotFoundException ex) { - logger.log(Level.SEVERE, String.format("Failed to initialize JDBC SQLite '%s' (objId=%d)", sqliteDbFile.getName(), sqliteDbFile.getId()), ex); //NON-NLS - MessageNotifyUtil.Message.error(Bundle.SQLiteViewer_errorMessage_failedToinitJDBCDriver()); - } catch (SQLException ex) { - logger.log(Level.SEVERE, String.format("Failed to get tables from DB file '%s' (objId=%d)", sqliteDbFile.getName(), sqliteDbFile.getId()), ex); //NON-NLS + } catch (SQLiteTableReaderException ex) { + logger.log(Level.WARNING, String.format("Unable to get table names " + + "from sqlite file [%s] with id=[%d].", sqliteDbFile.getName(), + sqliteDbFile.getId(), ex.getMessage())); MessageNotifyUtil.Message.error(Bundle.SQLiteViewer_errorMessage_failedToQueryDatabase()); - } catch (IOException | NoCurrentCaseException | TskCoreException ex) { - logger.log(Level.SEVERE, String.format("Failed to create temp copy of DB file '%s' (objId=%d)", sqliteDbFile.getName(), sqliteDbFile.getId()), ex); //NON-NLS - MessageNotifyUtil.Message.error(Bundle.SQLiteViewer_errorMessage_failedToExtractFile()); } } - /** - * Gets a collection of table names from the SQLite database file. - * - * @return A collection of table names - */ - private Collection getTables() throws SQLException { - Collection tableNames = new LinkedList<>(); - try (Statement statement = connection.createStatement(); - ResultSet resultSet = statement.executeQuery( - "SELECT name FROM sqlite_master " - + " WHERE type= 'table' ")){ - while (resultSet.next()) { - tableNames.add(resultSet.getString("name")); //NON-NLS - } - } - return tableNames; - } - @NbBundle.Messages({"# {0} - tableName", "SQLiteViewer.selectTable.errorText=Error getting row count for table: {0}" }) private void selectTable(String tableName) { - - try (Statement statement = connection.createStatement(); - ResultSet resultSet = statement.executeQuery( - "SELECT count (*) as count FROM " + "\"" + tableName + "\"")) { //NON-NLS - - numRows = resultSet.getInt("count"); + try { + numRows = viewReader.getRowCount(tableName); numEntriesField.setText(numRows + " entries"); currPage = 1; @@ -442,25 +409,19 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer { } else { exportCsvButton.setEnabled(false); nextPageButton.setEnabled(false); - - //Execute a dummy SELECT * statement so that the metadata - //contains all column names - Map columnRow; - try (ResultSet metaDataResultSet = statement.executeQuery( - "SELECT * FROM " + "\"" + tableName + "\"")) { - //Column names are not found in the metadata of the result set - //above. - ResultSetMetaData metaData = metaDataResultSet.getMetaData(); - columnRow = new LinkedHashMap<>(); - for(int i = 1; i < metaData.getColumnCount(); i++){ - columnRow.put(metaData.getColumnName(i), ""); - } + + currentTableHeader = new ArrayList<>(); + viewReader.read(tableName); + Map columnRow = new LinkedHashMap<>(); + for(int i = 0; i< currentTableHeader.size(); i++){ + columnRow.put(currentTableHeader.get(i), ""); } - selectedTableView.setupTable(Collections.singletonList(columnRow)); - } - } catch (SQLException ex) { - logger.log(Level.SEVERE, String.format("Failed to load table %s from DB file '%s' (objId=%d)", tableName, sqliteDbFile.getName(), sqliteDbFile.getId()), ex); //NON-NLS + } + } catch (SQLiteTableReaderException ex) { + logger.log(Level.WARNING, String.format("Failed to load table %s " //NON-NLS + + "from DB file '%s' (objId=%d)", tableName, sqliteDbFile.getName(), //NON-NLS + sqliteDbFile.getId()), ex.getMessage()); MessageNotifyUtil.Message.error(Bundle.SQLiteViewer_selectTable_errorText(tableName)); } } @@ -468,110 +429,192 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer { @NbBundle.Messages({"# {0} - tableName", "SQLiteViewer.readTable.errorText=Error getting rows for table: {0}"}) private void readTable(String tableName, int startRow, int numRowsToRead) { - - try ( - Statement statement = connection.createStatement(); - ResultSet resultSet = statement.executeQuery( - "SELECT * FROM " + "\"" + tableName + "\"" - + " LIMIT " + Integer.toString(numRowsToRead) - + " OFFSET " + Integer.toString(startRow - 1))) { - - List> rows = resultSetToArrayList(resultSet); - if (Objects.nonNull(rows)) { - selectedTableView.setupTable(rows); - } else { - selectedTableView.setupTable(Collections.emptyList()); + try { + //If the table name has changed, then clear our table header. SQLiteTableReader + //will also detect the table name has changed and begin reading it as if it + //were a brand new table. + if (!tableName.equals(prevTableName)) { + prevTableName = tableName; } - } catch (SQLException ex) { - logger.log(Level.SEVERE, String.format("Failed to read table %s from DB file '%s' (objId=%d)", tableName, sqliteDbFile.getName(), sqliteDbFile.getId()), ex); //NON-NLS + currentTableHeader = new ArrayList<>(); + viewReader.read(tableName, numRowsToRead, startRow - 1); + selectedTableView.setupTable(pageOfTableRows); + pageOfTableRows = new ArrayList<>(); + } catch (SQLiteTableReaderException ex) { + logger.log(Level.WARNING, String.format("Failed to read table %s from DB file '%s' " //NON-NLS + + "(objId=%d) starting at row [%d] and limit [%d]", //NON-NLS + tableName, sqliteDbFile.getName(), sqliteDbFile.getId(), + startRow - 1, numRowsToRead), ex.getMessage()); MessageNotifyUtil.Message.error(Bundle.SQLiteViewer_readTable_errorText(tableName)); } } - @NbBundle.Messages("SQLiteViewer.BlobNotShown.message=BLOB Data not shown") - private List> resultSetToArrayList(ResultSet resultSet) throws SQLException { - ResultSetMetaData metaData = resultSet.getMetaData(); - int columns = metaData.getColumnCount(); - ArrayList> rowlist = new ArrayList<>(); - while (resultSet.next()) { - Map row = new LinkedHashMap<>(columns); - for (int i = 1; i <= columns; ++i) { - if (resultSet.getObject(i) == null) { - row.put(metaData.getColumnName(i), ""); - } else { - if (metaData.getColumnTypeName(i).compareToIgnoreCase("blob") == 0) { - row.put(metaData.getColumnName(i), Bundle.SQLiteViewer_BlobNotShown_message()); - } else { - row.put(metaData.getColumnName(i), resultSet.getObject(i)); - } - } - } - rowlist.add(row); - } - - return rowlist; + /** + * Creates a new SQLiteTableReader. This class will iterate through the + * table row by row and pass each value to the correct function based on its + * data type. For our use, we want to define an action when encountering + * column names and an action for all other data types. + */ + private void initReader() { + viewReader = new SQLiteTableReader.Builder(sqliteDbFile) + .onColumnNames((columnName) -> { + currentTableHeader.add(columnName); + }) + .forAll(getForAllStrategy()).build(); } - + + /** + * For every database value we encounter on our read of the table do the + * following: 1) Get the string representation of the value 2) Collect the + * values until we have a full database row. 3) If we have the full row, + * write it to the UI. + * + * rowIndex is purely for indicating if we have read the full row. + * + * @return Consumer that will perform the actions above. When the + * SQLiteTableReader is reading, values will be passed to this + * consumer. + */ + private Consumer getForAllStrategy() { + return new Consumer() { + private int rowIndex = 0; + + @Override + public void accept(Object t) { + rowIndex++; + String objectStr = (t instanceof byte[]) ? "BLOB Data not shown" + : Objects.toString(t, ""); + + row.put(currentTableHeader.get(rowIndex - 1), objectStr); + + //If we have built up a full database row, then add it to our page + //of rows to be displayed in the UI. + if (rowIndex == currentTableHeader.size()) { + pageOfTableRows.add(row); + row = new LinkedHashMap<>(); + } + rowIndex %= currentTableHeader.size(); + } + + }; + } + + private int totalColumnCount; + @NbBundle.Messages({"SQLiteViewer.exportTableToCsv.write.errText=Failed to export table content to csv file.", - "SQLiteViewer.exportTableToCsv.FileName=File name: ", - "SQLiteViewer.exportTableToCsv.TableName=Table name: " + "SQLiteViewer.exportTableToCsv.FileName=File name: ", + "SQLiteViewer.exportTableToCsv.TableName=Table name: " }) private void exportTableToCsv(File file) { + File csvFile = new File(file.toString() + ".csv"); String tableName = (String) this.tablesDropdownList.getSelectedItem(); - try ( - Statement statement = connection.createStatement(); - ResultSet resultSet = statement.executeQuery("SELECT * FROM " + "\"" + tableName + "\"")) { - List> currentTableRows = resultSetToArrayList(resultSet); - - if (Objects.isNull(currentTableRows) || currentTableRows.isEmpty()) { - logger.log(Level.INFO, String.format("The table %s is empty. (objId=%d)", tableName, sqliteDbFile.getId())); //NON-NLS - } else { - File csvFile; - String fileName = file.getName(); - if (FilenameUtils.getExtension(fileName).equalsIgnoreCase("csv")) { - csvFile = file; - } else { - csvFile = new File(file.toString() + ".csv"); - } - - try (FileOutputStream out = new FileOutputStream(csvFile, false)) { - - out.write((Bundle.SQLiteViewer_exportTableToCsv_FileName() + csvFile.getName() + "\n").getBytes()); - out.write((Bundle.SQLiteViewer_exportTableToCsv_TableName() + tableName + "\n").getBytes()); - // Set up the column names - Map row = currentTableRows.get(0); - StringBuffer header = new StringBuffer(); - for (Map.Entry col : row.entrySet()) { - String colName = col.getKey(); - if (header.length() > 0) { - header.append(',').append(colName); - } else { - header.append(colName); - } - } - out.write(header.append('\n').toString().getBytes()); - - for (Map maps : currentTableRows) { - StringBuffer valueLine = new StringBuffer(); - maps.values().forEach((value) -> { - if (valueLine.length() > 0) { - valueLine.append(',').append(value.toString()); - } else { - valueLine.append(value.toString()); - } - }); - out.write(valueLine.append('\n').toString().getBytes()); - } - } + try (FileOutputStream out = new FileOutputStream(csvFile, false)) { + try (SQLiteTableReader sqliteStream = new SQLiteTableReader.Builder(sqliteDbFile) + .onColumnNames(getColumnNameCSVStrategy(out)) + .forAll(getForAllCSVStrategy(out)).build()) { + totalColumnCount = sqliteStream.getColumnCount(tableName); + sqliteStream.read(tableName); } - } catch (SQLException ex) { - logger.log(Level.SEVERE, String.format("Failed to read table %s from DB file '%s' (objId=%d)", tableName, sqliteDbFile.getName(), sqliteDbFile.getId()), ex); //NON-NLS - MessageNotifyUtil.Message.error(Bundle.SQLiteViewer_readTable_errorText(tableName)); - } catch (IOException ex) { - logger.log(Level.SEVERE, String.format("Failed to export table %s to file '%s'", tableName, file.getName()), ex); //NON-NLS + } catch (IOException | SQLiteTableReaderException | RuntimeException ex) { + logger.log(Level.WARNING, String.format("Failed to export table [%s]" + + " to CSV in sqlite file '%s' (objId=%d)", tableName, sqliteDbFile.getName(), + sqliteDbFile.getId()), ex.getMessage()); //NON-NLS MessageNotifyUtil.Message.error(Bundle.SQLiteViewer_exportTableToCsv_write_errText()); } } - + /** + * For every column name we encounter on our read of the table do the + * following: 1) Format the name so that it is comma seperated 2) Write the + * value to the output stream. + * + * columnIndex is purely for keeping track of where the column name is in + * the table so the value can be correctly formatted. + * + * @param out Output stream that this database table is being written to. + * + * @return Consumer that will perform the actions above. When the + * SQLiteTableReader is reading, values will be passed to this + * consumer. + */ + private Consumer getColumnNameCSVStrategy(FileOutputStream out) { + return new Consumer() { + private int columnIndex = 0; + + @Override + public void accept(String columnName) { + columnIndex++; + + //Format the value to adhere to the format of a CSV file + if (columnIndex == 1) { + columnName = "\"" + columnName + "\""; + } else { + columnName = ",\"" + columnName + "\""; + } + if (columnIndex == totalColumnCount) { + columnName += "\n"; + } + + try { + out.write(columnName.getBytes()); + } catch (IOException ex) { + /* + * If we can no longer write to the output stream, toss a + * runtime exception to get out of iteration. We explicitly + * catch this in exportTableToCsv() above. + */ + throw new RuntimeException(ex); + } + } + }; + } + + /** + * For every database value we encounter on our read of the table do the + * following: 1) Get the string representation of the value 2) Format it so + * that it adheres to the CSV format. 3) Write it to the output file. + * + * rowIndex is purely for keeping track of positioning of the database value + * in the row, so that it can be properly formatted. + * + * @param out Output file + * + * @return Consumer that will perform the actions above. When the + * SQLiteTableReader is reading, values will be passed to this + * consumer. + */ + private Consumer getForAllCSVStrategy(FileOutputStream out) { + return new Consumer() { + private int rowIndex = 0; + + @Override + public void accept(Object tableValue) { + rowIndex++; + //Substitute string representation of blob with placeholder text. + //Automatically wrap the value in quotes in case it contains commas. + String objectStr = (tableValue instanceof byte[]) + ? "BLOB Data not shown" : Objects.toString(tableValue, ""); + objectStr = "\"" + objectStr + "\""; + + if (rowIndex > 1) { + objectStr = "," + objectStr; + } + if (rowIndex == totalColumnCount) { + objectStr += "\n"; + } + + try { + out.write(objectStr.getBytes()); + } catch (IOException ex) { + /* + * If we can no longer write to the output stream, toss a + * runtime exception to get out of iteration. We explicitly + * catch this in exportTableToCsv() above. + */ + throw new RuntimeException(ex); + } + rowIndex = rowIndex % totalColumnCount; + } + }; + } } diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/SqliteUtil.java b/Core/src/org/sleuthkit/autopsy/contentviewers/SqliteUtil.java deleted file mode 100755 index 4fc220cf0d..0000000000 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/SqliteUtil.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2018-2018 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.contentviewers; - -import java.io.File; -import java.io.IOException; -import java.util.List; -import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; -import org.sleuthkit.autopsy.casemodule.services.FileManager; -import org.sleuthkit.autopsy.casemodule.services.Services; -import org.sleuthkit.autopsy.datamodel.ContentUtils; -import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.SleuthkitCase; -import org.sleuthkit.datamodel.TskCoreException; - -/** - * Sqlite utility class. Find and copy metafiles, write sqlite abstract files to - * temp directory, and generate unique temp directory paths. - */ -final class SqliteUtil { - - private SqliteUtil() { - - } - - /** - * Overloaded implementation of - * {@link #findAndCopySQLiteMetaFile(AbstractFile, String) findAndCopySQLiteMetaFile} - * , automatically tries to copy -wal and -shm files without needing to know - * their existence. - * - * @param sqliteFile file which has -wal and -shm meta files - * - * @throws NoCurrentCaseException Case has been closed. - * @throws TskCoreException fileManager cannot find AbstractFile - * files. - * @throws IOException Issue during writing to file. - */ - public static void findAndCopySQLiteMetaFile(AbstractFile sqliteFile) - throws NoCurrentCaseException, TskCoreException, IOException { - - findAndCopySQLiteMetaFile(sqliteFile, sqliteFile.getName() + "-wal"); - findAndCopySQLiteMetaFile(sqliteFile, sqliteFile.getName() + "-shm"); - } - - /** - * Searches for a meta file associated with the give SQLite database. If - * found, it copies this file into the temp directory of the current case. - * - * @param sqliteFile file being processed - * @param metaFileName name of meta file to look for - * - * @throws NoCurrentCaseException Case has been closed. - * @throws TskCoreException fileManager cannot find AbstractFile - * files. - * @throws IOException Issue during writing to file. - */ - public static void findAndCopySQLiteMetaFile(AbstractFile sqliteFile, - String metaFileName) throws NoCurrentCaseException, TskCoreException, IOException { - - Case openCase = Case.getCurrentCaseThrows(); - SleuthkitCase sleuthkitCase = openCase.getSleuthkitCase(); - Services services = new Services(sleuthkitCase); - FileManager fileManager = services.getFileManager(); - - List metaFiles = fileManager.findFiles( - sqliteFile.getDataSource(), metaFileName, - sqliteFile.getParent().getName()); - - if (metaFiles != null) { - for (AbstractFile metaFile : metaFiles) { - writeAbstractFileToLocalDisk(metaFile); - } - } - } - - /** - * Copies the file contents into a unique path in the current case temp - * directory. - * - * @param file AbstractFile from the data source - * - * @return The path of the file on disk - * - * @throws IOException Exception writing file contents - * @throws NoCurrentCaseException Current case closed during file copying - */ - public static String writeAbstractFileToLocalDisk(AbstractFile file) - throws IOException, NoCurrentCaseException { - - String localDiskPath = getUniqueTempDirectoryPath(file); - File localDatabaseFile = new File(localDiskPath); - if (!localDatabaseFile.exists()) { - ContentUtils.writeToFile(file, localDatabaseFile); - } - return localDiskPath; - } - - /** - * Generates a unique local disk path that resides in the temp directory of - * the current case. - * - * @param file The database abstract file - * - * @return Unique local disk path living in the temp directory of the case - * - * @throws org.sleuthkit.autopsy.casemodule.NoCurrentCaseException - */ - public static String getUniqueTempDirectoryPath(AbstractFile file) throws NoCurrentCaseException { - return Case.getCurrentCaseThrows().getTempDirectory() - + File.separator + file.getId() + file.getName(); - } -} diff --git a/Core/src/org/sleuthkit/autopsy/core/UserPreferences.java b/Core/src/org/sleuthkit/autopsy/core/UserPreferences.java index bd55626a1b..2be9a9b447 100644 --- a/Core/src/org/sleuthkit/autopsy/core/UserPreferences.java +++ b/Core/src/org/sleuthkit/autopsy/core/UserPreferences.java @@ -74,7 +74,8 @@ public final class UserPreferences { public static final String GROUP_ITEMS_IN_TREE_BY_DATASOURCE = "GroupItemsInTreeByDataSource"; //NON-NLS public static final String SHOW_ONLY_CURRENT_USER_TAGS = "ShowOnlyCurrentUserTags"; public static final String HIDE_CENTRAL_REPO_COMMENTS_AND_OCCURRENCES = "HideCentralRepoCommentsAndOccurrences"; - + public static final String DISPLAY_TRANSLATED_NAMES = "DisplayTranslatedNames"; + // Prevent instantiation. private UserPreferences() { } @@ -254,6 +255,14 @@ public final class UserPreferences { public static void setHideCentralRepoCommentsAndOccurrences(boolean value) { preferences.putBoolean(HIDE_CENTRAL_REPO_COMMENTS_AND_OCCURRENCES, value); } + + public static void setDisplayTranslatedFileNames(boolean value) { + preferences.putBoolean(DISPLAY_TRANSLATED_NAMES, value); + } + + public static boolean displayTranslatedFileNames() { + return preferences.getBoolean(DISPLAY_TRANSLATED_NAMES, false); + } /** * Reads persisted case database connection info. diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle.properties b/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle.properties index 97815e1748..f07d7b911f 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle.properties +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle.properties @@ -165,30 +165,33 @@ AutopsyOptionsPanel.runtimePanel.border.title=Runtime DataResultPanel.matchLabel.text=Results DataResultPanel.numberOfChildNodesLabel.text=0 DataResultPanel.descriptionLabel.text=directoryPath -ViewPreferencesPanel.selectFileLabel.text=When selecting a file: -ViewPreferencesPanel.globalSettingsPanel.border.title=Global Settings -ViewPreferencesPanel.displayTimeLabel.text=When displaying times: -ViewPreferencesPanel.hideSlackFilesLabel.text=Hide slack files in the: ViewPreferencesPanel.groupByDataSourceCheckbox.text=Group by data source -ViewPreferencesPanel.hideKnownFilesLabel.text=Hide known files (i.e. those in the NIST NSRL) in the: -ViewPreferencesPanel.hideOtherUsersTagsCheckbox.text=Tags area in the tree ViewPreferencesPanel.currentCaseSettingsPanel.border.title=Current Case Settings OptionsCategory_Name_View=View OptionsCategory_Keywords_View=View -ViewPreferencesPanel.useBestViewerRadioButton.toolTipText=For example, change from Hex to Media when a JPEG is selected. -ViewPreferencesPanel.useBestViewerRadioButton.text=Change to the most specific file viewer -ViewPreferencesPanel.keepCurrentViewerRadioButton.toolTipText=For example, stay in Hex view when a JPEG is selected. -ViewPreferencesPanel.keepCurrentViewerRadioButton.text=Stay on the same file viewer -ViewPreferencesPanel.useLocalTimeRadioButton.text=Use local time zone -ViewPreferencesPanel.dataSourcesHideKnownCheckbox.text=Data Sources area (the directory hierarchy) -ViewPreferencesPanel.viewsHideKnownCheckbox.text=Views area -ViewPreferencesPanel.dataSourcesHideSlackCheckbox.text=Data Sources area (the directory hierarchy) -ViewPreferencesPanel.viewsHideSlackCheckbox.text=Views area ViewPreferencesPanel.currentSessionSettingsPanel.border.title=Current Session Settings ViewPreferencesPanel.hideRejectedResultsCheckbox.text=Hide rejected results -ViewPreferencesPanel.hideOtherUsersTagsLabel.text=Hide other users' tags in the: -ViewPreferencesPanel.centralRepoLabel.text=Do not use Central Repository for: -ViewPreferencesPanel.commentsOccurencesColumnsCheckbox.text=C(omments) and O(ccurences) columns to reduce loading times -ViewPreferencesPanel.deletedFilesLimitCheckbox.text=Limit to 10,000 +ViewPreferencesPanel.translateTextLabel.text=Translate text in the: +ViewPreferencesPanel.globalSettingsPanel.border.title=Global Settings +ViewPreferencesPanel.translateNamesInTableRadioButton.text=Table ViewPreferencesPanel.deletedFilesLimitLabel.text=Limit number of deleted files displayed: +ViewPreferencesPanel.deletedFilesLimitCheckbox.text=Limit to 10,000 +ViewPreferencesPanel.commentsOccurencesColumnsCheckbox.text=C(omments) and O(ccurences) columns +ViewPreferencesPanel.centralRepoLabel.text=Do not use Central Repository for: +ViewPreferencesPanel.hideOtherUsersTagsLabel.text=Hide other users' tags in the: +ViewPreferencesPanel.hideOtherUsersTagsCheckbox.text=Tags area in the tree ViewPreferencesPanel.useAnotherTimeRadioButton.text=Use another time zone +ViewPreferencesPanel.useLocalTimeRadioButton.text=Use local time zone +ViewPreferencesPanel.displayTimeLabel.text=When displaying times: +ViewPreferencesPanel.viewsHideSlackCheckbox.text=Views area +ViewPreferencesPanel.dataSourcesHideSlackCheckbox.text=Data Sources area (the directory hierarchy) +ViewPreferencesPanel.hideSlackFilesLabel.text=Hide slack files in the: +ViewPreferencesPanel.viewsHideKnownCheckbox.text=Views area +ViewPreferencesPanel.dataSourcesHideKnownCheckbox.text=Data Sources area (the directory hierarchy) +ViewPreferencesPanel.hideKnownFilesLabel.text=Hide known files (i.e. those in the NIST NSRL) in the: +ViewPreferencesPanel.keepCurrentViewerRadioButton.toolTipText=For example, stay in Hex view when a JPEG is selected. +ViewPreferencesPanel.keepCurrentViewerRadioButton.text=Stay on the same file viewer +ViewPreferencesPanel.useBestViewerRadioButton.toolTipText=For example, change from Hex to Media when a JPEG is selected. +ViewPreferencesPanel.useBestViewerRadioButton.text=Change to the most specific file viewer +ViewPreferencesPanel.selectFileLabel.text=When selecting a file: +ViewPreferencesPanel.commentsOccurencesColumnWrapAroundText.text=to reduce loading times diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle_ja.properties b/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle_ja.properties index c9172f03a0..7eba327056 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle_ja.properties +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle_ja.properties @@ -121,12 +121,12 @@ DataResultPanel.matchLabel.text=\u7d50\u679c DataResultPanel.numberOfChildNodesLabel.text=0 DataResultPanel.descriptionLabel.text=\u30c7\u30a3\u30ec\u30af\u30c8\u30ea\u30d1\u30b9 ViewPreferencesPanel.selectFileLabel.text=\u30d5\u30a1\u30a4\u30eb\u3092\u9078\u629e\u3059\u308b\u5834\u5408\uff1a +ViewPreferencesPanel.useLocalTimeRadioButton.text=\u30ed\u30fc\u30ab\u30eb\u30bf\u30a4\u30e0\u30be\u30fc\u30f3\u3092\u4f7f\u7528 ViewPreferencesPanel.displayTimeLabel.text=\u6642\u9593\u3092\u8868\u793a\u3059\u308b\u5834\u5408\uff1a +ViewPreferencesPanel.viewsHideKnownCheckbox.text=\u30d3\u30e5\u30fc\u30a8\u30ea\u30a2 +ViewPreferencesPanel.dataSourcesHideKnownCheckbox.text=\u30c7\u30fc\u30bf\u30bd\u30fc\u30b9\u30a8\u30ea\u30a2\uff08\u30c7\u30a3\u30ec\u30af\u30c8\u30ea\u968e\u5c64\uff09 ViewPreferencesPanel.hideKnownFilesLabel.text=\u65e2\u77e5\u30d5\u30a1\u30a4\u30eb\uff08NIST NSRL\u5185\u306e\uff09\u3092\u6b21\u306b\u96a0\u3059\uff1a +ViewPreferencesPanel.keepCurrentViewerRadioButton.toolTipText=\u4f8b\u3048\u3070\u3001JPEG\u304c\u9078\u629e\u3055\u308c\u305f\u5834\u5408\u306b\u305d\u306e\u307e\u307eHEX\u30d3\u30e5\u30fc\u3092\u4f7f\u7528\u3002 +ViewPreferencesPanel.keepCurrentViewerRadioButton.text=\u305d\u306e\u307e\u307e\u540c\u3058\u30d5\u30a1\u30a4\u30eb\u30d3\u30e5\u30fc\u30a2\u3092\u4f7f\u7528 ViewPreferencesPanel.useBestViewerRadioButton.toolTipText=\u4f8b\u3048\u3070\u3001JPEG\u304c\u9078\u629e\u3055\u308c\u305f\u5834\u5408\u306b\u306fHEX\u304b\u3089\u30e1\u30c7\u30a3\u30a2\u306b\u5909\u66f4\u3059\u308b\u3002 ViewPreferencesPanel.useBestViewerRadioButton.text=\u6700\u3082\u5c02\u9580\u7684\u306a\u30d5\u30a1\u30a4\u30eb\u30d3\u30e5\u30fc\u30a2\u306b\u5909\u66f4 -ViewPreferencesPanel.keepCurrentViewerRadioButton.text=\u305d\u306e\u307e\u307e\u540c\u3058\u30d5\u30a1\u30a4\u30eb\u30d3\u30e5\u30fc\u30a2\u3092\u4f7f\u7528 -ViewPreferencesPanel.keepCurrentViewerRadioButton.toolTipText=\u4f8b\u3048\u3070\u3001JPEG\u304c\u9078\u629e\u3055\u308c\u305f\u5834\u5408\u306b\u305d\u306e\u307e\u307eHEX\u30d3\u30e5\u30fc\u3092\u4f7f\u7528\u3002 -ViewPreferencesPanel.useLocalTimeRadioButton.text=\u30ed\u30fc\u30ab\u30eb\u30bf\u30a4\u30e0\u30be\u30fc\u30f3\u3092\u4f7f\u7528 -ViewPreferencesPanel.dataSourcesHideKnownCheckbox.text=\u30c7\u30fc\u30bf\u30bd\u30fc\u30b9\u30a8\u30ea\u30a2\uff08\u30c7\u30a3\u30ec\u30af\u30c8\u30ea\u968e\u5c64\uff09 -ViewPreferencesPanel.viewsHideKnownCheckbox.text=\u30d3\u30e5\u30fc\u30a8\u30ea\u30a2 diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/ViewPreferencesPanel.form b/Core/src/org/sleuthkit/autopsy/corecomponents/ViewPreferencesPanel.form index 711caf3b7b..1f9dfc4ceb 100755 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/ViewPreferencesPanel.form +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/ViewPreferencesPanel.form @@ -90,73 +90,70 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + @@ -183,9 +180,11 @@ + + - - + + @@ -202,11 +201,15 @@ + + + + - - + + @@ -343,16 +346,6 @@ - - - - - - - - - - @@ -360,6 +353,17 @@ + + + + + + + + + + + @@ -399,6 +403,30 @@ + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/ViewPreferencesPanel.java b/Core/src/org/sleuthkit/autopsy/corecomponents/ViewPreferencesPanel.java index 872a7d6d6d..5678b0f7cb 100755 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/ViewPreferencesPanel.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/ViewPreferencesPanel.java @@ -29,6 +29,7 @@ import org.sleuthkit.autopsy.core.UserPreferences; import org.sleuthkit.autopsy.coreutils.TimeZoneUtils; import org.sleuthkit.autopsy.deletedFiles.DeletedFilePreferences; import org.sleuthkit.autopsy.directorytree.DirectoryTreeTopComponent; +import org.sleuthkit.autopsy.texttranslation.TextTranslationService; /** * Panel for configuring view preferences. @@ -70,9 +71,14 @@ public class ViewPreferencesPanel extends JPanel implements OptionsPanel { viewsHideSlackCheckbox.setSelected(UserPreferences.hideSlackFilesInViewsTree()); commentsOccurencesColumnsCheckbox.setEnabled(EamDbUtil.useCentralRepo()); + commentsOccurencesColumnWrapAroundText.setEnabled(EamDbUtil.useCentralRepo()); commentsOccurencesColumnsCheckbox.setSelected(UserPreferences.hideCentralRepoCommentsAndOccurrences()); deletedFilesLimitCheckbox.setSelected(DeletedFilePreferences.getDefault().getShouldLimitDeletedFiles()); + translateNamesInTableRadioButton.setSelected(UserPreferences.displayTranslatedFileNames()); + + TextTranslationService tts = TextTranslationService.getInstance(); + translateNamesInTableRadioButton.setEnabled(tts.hasProvider()); // Current Case Settings boolean caseIsOpen = Case.isCaseOpen(); @@ -99,6 +105,7 @@ public class ViewPreferencesPanel extends JPanel implements OptionsPanel { UserPreferences.setHideSlackFilesInViewsTree(viewsHideSlackCheckbox.isSelected()); UserPreferences.setShowOnlyCurrentUserTags(hideOtherUsersTagsCheckbox.isSelected()); UserPreferences.setHideCentralRepoCommentsAndOccurrences(commentsOccurencesColumnsCheckbox.isSelected()); + UserPreferences.setDisplayTranslatedFileNames(translateNamesInTableRadioButton.isSelected()); storeGroupItemsInTreeByDataSource(); @@ -147,12 +154,15 @@ public class ViewPreferencesPanel extends JPanel implements OptionsPanel { useAnotherTimeRadioButton = new javax.swing.JRadioButton(); hideOtherUsersTagsCheckbox = new javax.swing.JCheckBox(); hideOtherUsersTagsLabel = new javax.swing.JLabel(); - commentsOccurencesColumnsCheckbox = new javax.swing.JCheckBox(); centralRepoLabel = new javax.swing.JLabel(); + commentsOccurencesColumnsCheckbox = new javax.swing.JCheckBox(); deletedFilesLimitCheckbox = new javax.swing.JCheckBox(); deletedFilesLimitLabel = new javax.swing.JLabel(); jScrollPane1 = new javax.swing.JScrollPane(); timeZoneList = new javax.swing.JList<>(); + translateTextLabel = new javax.swing.JLabel(); + commentsOccurencesColumnWrapAroundText = new javax.swing.JLabel(); + translateNamesInTableRadioButton = new javax.swing.JRadioButton(); currentCaseSettingsPanel = new javax.swing.JPanel(); groupByDataSourceCheckbox = new javax.swing.JCheckBox(); currentSessionSettingsPanel = new javax.swing.JPanel(); @@ -240,15 +250,16 @@ public class ViewPreferencesPanel extends JPanel implements OptionsPanel { org.openide.awt.Mnemonics.setLocalizedText(hideOtherUsersTagsLabel, org.openide.util.NbBundle.getMessage(ViewPreferencesPanel.class, "ViewPreferencesPanel.hideOtherUsersTagsLabel.text")); // NOI18N + org.openide.awt.Mnemonics.setLocalizedText(centralRepoLabel, org.openide.util.NbBundle.getMessage(ViewPreferencesPanel.class, "ViewPreferencesPanel.centralRepoLabel.text")); // NOI18N + org.openide.awt.Mnemonics.setLocalizedText(commentsOccurencesColumnsCheckbox, org.openide.util.NbBundle.getMessage(ViewPreferencesPanel.class, "ViewPreferencesPanel.commentsOccurencesColumnsCheckbox.text")); // NOI18N + commentsOccurencesColumnsCheckbox.setHorizontalAlignment(javax.swing.SwingConstants.TRAILING); commentsOccurencesColumnsCheckbox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { commentsOccurencesColumnsCheckboxActionPerformed(evt); } }); - org.openide.awt.Mnemonics.setLocalizedText(centralRepoLabel, org.openide.util.NbBundle.getMessage(ViewPreferencesPanel.class, "ViewPreferencesPanel.centralRepoLabel.text")); // NOI18N - org.openide.awt.Mnemonics.setLocalizedText(deletedFilesLimitCheckbox, org.openide.util.NbBundle.getMessage(ViewPreferencesPanel.class, "ViewPreferencesPanel.deletedFilesLimitCheckbox.text")); // NOI18N deletedFilesLimitCheckbox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { @@ -265,6 +276,17 @@ public class ViewPreferencesPanel extends JPanel implements OptionsPanel { }); jScrollPane1.setViewportView(timeZoneList); + org.openide.awt.Mnemonics.setLocalizedText(translateTextLabel, org.openide.util.NbBundle.getMessage(ViewPreferencesPanel.class, "ViewPreferencesPanel.translateTextLabel.text")); // NOI18N + + org.openide.awt.Mnemonics.setLocalizedText(commentsOccurencesColumnWrapAroundText, org.openide.util.NbBundle.getMessage(ViewPreferencesPanel.class, "ViewPreferencesPanel.commentsOccurencesColumnWrapAroundText.text")); // NOI18N + + org.openide.awt.Mnemonics.setLocalizedText(translateNamesInTableRadioButton, org.openide.util.NbBundle.getMessage(ViewPreferencesPanel.class, "ViewPreferencesPanel.translateNamesInTableRadioButton.text")); // NOI18N + translateNamesInTableRadioButton.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + translateNamesInTableRadioButtonActionPerformed(evt); + } + }); + javax.swing.GroupLayout globalSettingsPanelLayout = new javax.swing.GroupLayout(globalSettingsPanel); globalSettingsPanel.setLayout(globalSettingsPanelLayout); globalSettingsPanelLayout.setHorizontalGroup( @@ -273,52 +295,52 @@ public class ViewPreferencesPanel extends JPanel implements OptionsPanel { .addContainerGap() .addGroup(globalSettingsPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(globalSettingsPanelLayout.createSequentialGroup() - .addGap(10, 10, 10) - .addGroup(globalSettingsPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(commentsOccurencesColumnsCheckbox) - .addComponent(hideOtherUsersTagsCheckbox) - .addGroup(globalSettingsPanelLayout.createSequentialGroup() - .addComponent(deletedFilesLimitCheckbox, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) - .addGap(399, 399, 399)))) + .addComponent(centralRepoLabel) + .addGap(135, 135, 135) + .addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, 272, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addComponent(hideOtherUsersTagsLabel) .addGroup(globalSettingsPanelLayout.createSequentialGroup() .addGroup(globalSettingsPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addGroup(globalSettingsPanelLayout.createSequentialGroup() - .addComponent(centralRepoLabel) - .addGap(135, 135, 135) - .addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, 272, javax.swing.GroupLayout.PREFERRED_SIZE)) - .addGroup(globalSettingsPanelLayout.createSequentialGroup() + .addComponent(hideKnownFilesLabel) + .addGroup(globalSettingsPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) .addGroup(globalSettingsPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(globalSettingsPanelLayout.createSequentialGroup() + .addGap(10, 10, 10) .addGroup(globalSettingsPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(hideKnownFilesLabel) - .addGroup(globalSettingsPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) - .addGroup(globalSettingsPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addGroup(globalSettingsPanelLayout.createSequentialGroup() - .addGap(10, 10, 10) - .addGroup(globalSettingsPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(dataSourcesHideSlackCheckbox) - .addComponent(viewsHideSlackCheckbox))) - .addComponent(hideSlackFilesLabel)) - .addGroup(globalSettingsPanelLayout.createSequentialGroup() - .addGap(10, 10, 10) - .addGroup(globalSettingsPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(dataSourcesHideKnownCheckbox) - .addComponent(viewsHideKnownCheckbox))))) - .addGap(18, 18, 18) - .addGroup(globalSettingsPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(displayTimeLabel) - .addGroup(globalSettingsPanelLayout.createSequentialGroup() - .addGap(10, 10, 10) - .addGroup(globalSettingsPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(keepCurrentViewerRadioButton) - .addComponent(useBestViewerRadioButton) - .addComponent(useLocalTimeRadioButton) - .addComponent(useAnotherTimeRadioButton))) - .addComponent(selectFileLabel))) - .addComponent(hideOtherUsersTagsLabel) - .addComponent(deletedFilesLimitLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 215, javax.swing.GroupLayout.PREFERRED_SIZE)) - .addGap(0, 0, Short.MAX_VALUE))) - .addContainerGap()))) + .addComponent(dataSourcesHideSlackCheckbox) + .addComponent(viewsHideSlackCheckbox))) + .addComponent(hideSlackFilesLabel)) + .addGroup(globalSettingsPanelLayout.createSequentialGroup() + .addGap(10, 10, 10) + .addGroup(globalSettingsPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addComponent(dataSourcesHideKnownCheckbox) + .addComponent(viewsHideKnownCheckbox)))) + .addGroup(globalSettingsPanelLayout.createSequentialGroup() + .addGap(10, 10, 10) + .addComponent(commentsOccurencesColumnsCheckbox)) + .addGroup(globalSettingsPanelLayout.createSequentialGroup() + .addGap(32, 32, 32) + .addComponent(commentsOccurencesColumnWrapAroundText))) + .addGap(18, 18, 18) + .addGroup(globalSettingsPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addComponent(displayTimeLabel) + .addComponent(selectFileLabel) + .addComponent(translateTextLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 120, javax.swing.GroupLayout.PREFERRED_SIZE) + .addGroup(globalSettingsPanelLayout.createSequentialGroup() + .addGap(10, 10, 10) + .addGroup(globalSettingsPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addComponent(keepCurrentViewerRadioButton) + .addComponent(useBestViewerRadioButton) + .addComponent(useLocalTimeRadioButton) + .addComponent(useAnotherTimeRadioButton) + .addComponent(translateNamesInTableRadioButton))))) + .addComponent(deletedFilesLimitLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 215, javax.swing.GroupLayout.PREFERRED_SIZE) + .addGroup(globalSettingsPanelLayout.createSequentialGroup() + .addGap(10, 10, 10) + .addGroup(globalSettingsPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addComponent(hideOtherUsersTagsCheckbox) + .addComponent(deletedFilesLimitCheckbox, javax.swing.GroupLayout.DEFAULT_SIZE, 567, Short.MAX_VALUE)))) + .addContainerGap()) ); globalSettingsPanelLayout.setVerticalGroup( globalSettingsPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) @@ -343,9 +365,11 @@ public class ViewPreferencesPanel extends JPanel implements OptionsPanel { .addComponent(hideOtherUsersTagsCheckbox) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(centralRepoLabel) + .addGap(3, 3, 3) + .addComponent(commentsOccurencesColumnsCheckbox, javax.swing.GroupLayout.PREFERRED_SIZE, 18, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addComponent(commentsOccurencesColumnsCheckbox) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) + .addComponent(commentsOccurencesColumnWrapAroundText) + .addGap(11, 11, 11) .addComponent(deletedFilesLimitLabel)) .addGroup(globalSettingsPanelLayout.createSequentialGroup() .addComponent(selectFileLabel) @@ -360,10 +384,14 @@ public class ViewPreferencesPanel extends JPanel implements OptionsPanel { .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(useAnotherTimeRadioButton) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, 67, javax.swing.GroupLayout.PREFERRED_SIZE))) + .addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, 67, javax.swing.GroupLayout.PREFERRED_SIZE) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(translateTextLabel) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(translateNamesInTableRadioButton))) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addComponent(deletedFilesLimitCheckbox, javax.swing.GroupLayout.PREFERRED_SIZE, 33, javax.swing.GroupLayout.PREFERRED_SIZE) - .addGap(0, 0, 0)) + .addComponent(deletedFilesLimitCheckbox, javax.swing.GroupLayout.PREFERRED_SIZE, 26, javax.swing.GroupLayout.PREFERRED_SIZE) + .addContainerGap(8, Short.MAX_VALUE)) ); currentCaseSettingsPanel.setBorder(javax.swing.BorderFactory.createTitledBorder(org.openide.util.NbBundle.getMessage(ViewPreferencesPanel.class, "ViewPreferencesPanel.currentCaseSettingsPanel.border.title"))); // NOI18N @@ -453,88 +481,6 @@ public class ViewPreferencesPanel extends JPanel implements OptionsPanel { ); }// //GEN-END:initComponents - private void useBestViewerRadioButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_useBestViewerRadioButtonActionPerformed - useBestViewerRadioButton.setSelected(true); - keepCurrentViewerRadioButton.setSelected(false); - if (immediateUpdates) { - UserPreferences.setKeepPreferredContentViewer(false); - } else { - firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); - } - }//GEN-LAST:event_useBestViewerRadioButtonActionPerformed - - private void keepCurrentViewerRadioButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_keepCurrentViewerRadioButtonActionPerformed - useBestViewerRadioButton.setSelected(false); - keepCurrentViewerRadioButton.setSelected(true); - if (immediateUpdates) { - UserPreferences.setKeepPreferredContentViewer(true); - } else { - firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); - } - }//GEN-LAST:event_keepCurrentViewerRadioButtonActionPerformed - - private void useLocalTimeRadioButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_useLocalTimeRadioButtonActionPerformed - useLocalTimeRadioButton.setSelected(true); - useAnotherTimeRadioButton.setSelected(false); - timeZoneList.setEnabled(false); - if (immediateUpdates) { - UserPreferences.setDisplayTimesInLocalTime(true); - } else { - firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); - } - }//GEN-LAST:event_useLocalTimeRadioButtonActionPerformed - - private void useAnotherTimeRadioButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_useAnotherTimeRadioButtonActionPerformed - useLocalTimeRadioButton.setSelected(false); - useAnotherTimeRadioButton.setSelected(true); - timeZoneList.setEnabled(true); - if (immediateUpdates) { - UserPreferences.setDisplayTimesInLocalTime(false); - } else { - firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); - } - }//GEN-LAST:event_useAnotherTimeRadioButtonActionPerformed - - private void dataSourcesHideKnownCheckboxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_dataSourcesHideKnownCheckboxActionPerformed - if (immediateUpdates) { - UserPreferences.setHideKnownFilesInDataSourcesTree(dataSourcesHideKnownCheckbox.isSelected()); - } else { - firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); - } - }//GEN-LAST:event_dataSourcesHideKnownCheckboxActionPerformed - - private void viewsHideKnownCheckboxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_viewsHideKnownCheckboxActionPerformed - if (immediateUpdates) { - UserPreferences.setHideKnownFilesInViewsTree(viewsHideKnownCheckbox.isSelected()); - } else { - firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); - } - }//GEN-LAST:event_viewsHideKnownCheckboxActionPerformed - - private void dataSourcesHideSlackCheckboxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_dataSourcesHideSlackCheckboxActionPerformed - if (immediateUpdates) { - UserPreferences.setHideSlackFilesInDataSourcesTree(dataSourcesHideSlackCheckbox.isSelected()); - } else { - firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); - } - }//GEN-LAST:event_dataSourcesHideSlackCheckboxActionPerformed - - private void viewsHideSlackCheckboxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_viewsHideSlackCheckboxActionPerformed - if (immediateUpdates) { - UserPreferences.setHideSlackFilesInViewsTree(viewsHideSlackCheckbox.isSelected()); - } else { - firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); - } - }//GEN-LAST:event_viewsHideSlackCheckboxActionPerformed - - private void hideOtherUsersTagsCheckboxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_hideOtherUsersTagsCheckboxActionPerformed - if (immediateUpdates) { - UserPreferences.setShowOnlyCurrentUserTags(hideOtherUsersTagsCheckbox.isSelected()); - } else { - firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); - } - }//GEN-LAST:event_hideOtherUsersTagsCheckboxActionPerformed - private void groupByDataSourceCheckboxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_groupByDataSourceCheckboxActionPerformed if (immediateUpdates) { storeGroupItemsInTreeByDataSource(); @@ -551,21 +497,13 @@ public class ViewPreferencesPanel extends JPanel implements OptionsPanel { } }//GEN-LAST:event_hideRejectedResultsCheckboxActionPerformed - private void commentsOccurencesColumnsCheckboxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_commentsOccurencesColumnsCheckboxActionPerformed + private void translateNamesInTableRadioButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_translateNamesInTableRadioButtonActionPerformed if (immediateUpdates) { - UserPreferences.setHideCentralRepoCommentsAndOccurrences(commentsOccurencesColumnsCheckbox.isSelected()); + UserPreferences.setDisplayTranslatedFileNames(translateNamesInTableRadioButton.isSelected()); } else { firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); } - }//GEN-LAST:event_commentsOccurencesColumnsCheckboxActionPerformed - - private void deletedFilesLimitCheckboxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_deletedFilesLimitCheckboxActionPerformed - if (immediateUpdates) { - DeletedFilePreferences.getDefault().setShouldLimitDeletedFiles(deletedFilesLimitCheckbox.isSelected()); - } else { - firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); - } - }//GEN-LAST:event_deletedFilesLimitCheckboxActionPerformed + }//GEN-LAST:event_translateNamesInTableRadioButtonActionPerformed private void timeZoneListValueChanged(javax.swing.event.ListSelectionEvent evt) {//GEN-FIRST:event_timeZoneListValueChanged if (immediateUpdates && useAnotherTimeRadioButton.isSelected()) { @@ -575,9 +513,108 @@ public class ViewPreferencesPanel extends JPanel implements OptionsPanel { } }//GEN-LAST:event_timeZoneListValueChanged + private void deletedFilesLimitCheckboxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_deletedFilesLimitCheckboxActionPerformed + if (immediateUpdates) { + DeletedFilePreferences.getDefault().setShouldLimitDeletedFiles(deletedFilesLimitCheckbox.isSelected()); + } else { + firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); + } + }//GEN-LAST:event_deletedFilesLimitCheckboxActionPerformed + + private void commentsOccurencesColumnsCheckboxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_commentsOccurencesColumnsCheckboxActionPerformed + if (immediateUpdates) { + UserPreferences.setHideCentralRepoCommentsAndOccurrences(commentsOccurencesColumnsCheckbox.isSelected()); + } else { + firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); + } + }//GEN-LAST:event_commentsOccurencesColumnsCheckboxActionPerformed + + private void hideOtherUsersTagsCheckboxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_hideOtherUsersTagsCheckboxActionPerformed + if (immediateUpdates) { + UserPreferences.setShowOnlyCurrentUserTags(hideOtherUsersTagsCheckbox.isSelected()); + } else { + firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); + } + }//GEN-LAST:event_hideOtherUsersTagsCheckboxActionPerformed + + private void useAnotherTimeRadioButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_useAnotherTimeRadioButtonActionPerformed + useLocalTimeRadioButton.setSelected(false); + useAnotherTimeRadioButton.setSelected(true); + timeZoneList.setEnabled(true); + if (immediateUpdates) { + UserPreferences.setDisplayTimesInLocalTime(false); + } else { + firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); + } + }//GEN-LAST:event_useAnotherTimeRadioButtonActionPerformed + + private void useLocalTimeRadioButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_useLocalTimeRadioButtonActionPerformed + useLocalTimeRadioButton.setSelected(true); + useAnotherTimeRadioButton.setSelected(false); + timeZoneList.setEnabled(false); + if (immediateUpdates) { + UserPreferences.setDisplayTimesInLocalTime(true); + } else { + firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); + } + }//GEN-LAST:event_useLocalTimeRadioButtonActionPerformed + + private void viewsHideSlackCheckboxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_viewsHideSlackCheckboxActionPerformed + if (immediateUpdates) { + UserPreferences.setHideSlackFilesInViewsTree(viewsHideSlackCheckbox.isSelected()); + } else { + firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); + } + }//GEN-LAST:event_viewsHideSlackCheckboxActionPerformed + + private void dataSourcesHideSlackCheckboxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_dataSourcesHideSlackCheckboxActionPerformed + if (immediateUpdates) { + UserPreferences.setHideSlackFilesInDataSourcesTree(dataSourcesHideSlackCheckbox.isSelected()); + } else { + firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); + } + }//GEN-LAST:event_dataSourcesHideSlackCheckboxActionPerformed + + private void viewsHideKnownCheckboxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_viewsHideKnownCheckboxActionPerformed + if (immediateUpdates) { + UserPreferences.setHideKnownFilesInViewsTree(viewsHideKnownCheckbox.isSelected()); + } else { + firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); + } + }//GEN-LAST:event_viewsHideKnownCheckboxActionPerformed + + private void dataSourcesHideKnownCheckboxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_dataSourcesHideKnownCheckboxActionPerformed + if (immediateUpdates) { + UserPreferences.setHideKnownFilesInDataSourcesTree(dataSourcesHideKnownCheckbox.isSelected()); + } else { + firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); + } + }//GEN-LAST:event_dataSourcesHideKnownCheckboxActionPerformed + + private void keepCurrentViewerRadioButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_keepCurrentViewerRadioButtonActionPerformed + useBestViewerRadioButton.setSelected(false); + keepCurrentViewerRadioButton.setSelected(true); + if (immediateUpdates) { + UserPreferences.setKeepPreferredContentViewer(true); + } else { + firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); + } + }//GEN-LAST:event_keepCurrentViewerRadioButtonActionPerformed + + private void useBestViewerRadioButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_useBestViewerRadioButtonActionPerformed + useBestViewerRadioButton.setSelected(true); + keepCurrentViewerRadioButton.setSelected(false); + if (immediateUpdates) { + UserPreferences.setKeepPreferredContentViewer(false); + } else { + firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); + } + }//GEN-LAST:event_useBestViewerRadioButtonActionPerformed + // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JLabel centralRepoLabel; + private javax.swing.JLabel commentsOccurencesColumnWrapAroundText; private javax.swing.JCheckBox commentsOccurencesColumnsCheckbox; private javax.swing.JPanel currentCaseSettingsPanel; private javax.swing.JPanel currentSessionSettingsPanel; @@ -597,6 +634,8 @@ public class ViewPreferencesPanel extends JPanel implements OptionsPanel { private javax.swing.JRadioButton keepCurrentViewerRadioButton; private javax.swing.JLabel selectFileLabel; private javax.swing.JList timeZoneList; + private javax.swing.JRadioButton translateNamesInTableRadioButton; + private javax.swing.JLabel translateTextLabel; private javax.swing.JRadioButton useAnotherTimeRadioButton; private javax.swing.JRadioButton useBestViewerRadioButton; private javax.swing.JRadioButton useLocalTimeRadioButton; @@ -605,4 +644,4 @@ public class ViewPreferencesPanel extends JPanel implements OptionsPanel { private javax.swing.JCheckBox viewsHideKnownCheckbox; private javax.swing.JCheckBox viewsHideSlackCheckbox; // End of variables declaration//GEN-END:variables -} +} \ No newline at end of file diff --git a/Core/src/org/sleuthkit/autopsy/coreutils/SQLiteTableReader.java b/Core/src/org/sleuthkit/autopsy/coreutils/SQLiteTableReader.java new file mode 100755 index 0000000000..2d950b90f4 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/coreutils/SQLiteTableReader.java @@ -0,0 +1,561 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2018-2018 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.coreutils; + +import java.io.File; +import java.io.IOException; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.function.BooleanSupplier; +import java.util.function.Consumer; +import java.util.logging.Level; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; +import org.sleuthkit.autopsy.casemodule.services.FileManager; +import org.sleuthkit.autopsy.casemodule.services.Services; +import org.sleuthkit.autopsy.datamodel.ContentUtils; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.SleuthkitCase; +import org.sleuthkit.datamodel.TskCoreException; + +/** + * Reads row by row through SQLite tables and performs user-defined actions on + * the row values. Table values are processed by data type. Users configure + * these actions for certain data types in the Builder. Example usage: + * + * SQLiteTableReader reader = new SQLiteTableReader.Builder(file) + * .onInteger((i) + * -> { System.out.println(i); }) + * .build(); + * + * reader.read(tableName); + * + * or + * + * SQLiteTableReader reader = new SQLiteTableReader.Builder(file) + * .onInteger(new Consumer() { + * @Override public void accept(Integer i) { + * System.out.println(i); + * } + * }).build(); + * + * reader.reader(tableName); + * + * Invocation of read(String tableName) reads row by row. When an Integer is + * encountered, its value will be passed to the Consumer that was defined above. + */ +public class SQLiteTableReader implements AutoCloseable { + + /** + * Builder patten for configuring SQLiteTableReader instances. + */ + public static class Builder { + + private final AbstractFile file; + + private Consumer onColumnNameAction; + private Consumer onStringAction; + private Consumer onLongAction; + private Consumer onIntegerAction; + private Consumer onFloatAction; + private Consumer onBlobAction; + private Consumer forAllAction; + + static Consumer doNothing() { + return NOOP -> {}; + } + + /** + * Creates a Builder for this abstract file. + * + * @param file + */ + public Builder(AbstractFile file) { + this.file = file; + + this.onColumnNameAction = Builder.doNothing(); + this.onStringAction = Builder.doNothing(); + this.onLongAction = Builder.doNothing(); + this.onIntegerAction = Builder.doNothing(); + this.onFloatAction = Builder.doNothing(); + this.onBlobAction = Builder.doNothing(); + this.forAllAction = Builder.doNothing(); + } + + /** + * Specify a function to do on column names. Column names will be read + * from left to right. + * + * @param action Consumer of column name strings + * + * @return Builder reference + */ + public Builder onColumnNames(Consumer action) { + this.onColumnNameAction = action; + return this; + } + + /** + * Specify a function to do when encountering a database value that is + * of java type String. + * + * @param action Consumer of strings + * + * @return Builder reference + */ + public Builder onString(Consumer action) { + this.onStringAction = action; + return this; + } + + /** + * Specify a function to do when encountering a database value that is + * of java type Integer. + * + * @param action Consumer of integer + * + * @return Builder reference + */ + public Builder onInteger(Consumer action) { + this.onIntegerAction = action; + return this; + } + + /** + * Specify a function to do when encountering a database value that is + * of java type Double. + * + * @param action Consumer of doubles + * + * @return Builder reference + */ + public Builder onFloat(Consumer action) { + this.onFloatAction = action; + return this; + } + + /** + * Specify a function to do when encountering a database value that is + * of java type Long. + * + * @param action Consumer of longs + * + * @return Builder reference + */ + public Builder onLong(Consumer action) { + this.onLongAction = action; + return this; + } + + /** + * Specify a function to do when encountering a database value that is + * of java type byte[] aka blob. + * + * @param action Consumer of blobs + * + * @return Builder reference + */ + public Builder onBlob(Consumer action) { + this.onBlobAction = action; + return this; + } + + /** + * Specify a function to do when encountering any database value, + * regardless of type. This function only captures database values, not + * column names. + * + * @param action Consumer of objects + * + * @return Builder reference + */ + public Builder forAll(Consumer action) { + this.forAllAction = action; + return this; + } + + /** + * Creates a SQLiteTableReader instance given this Builder + * configuration. + * + * @return SQLiteTableReader instance + */ + public SQLiteTableReader build() { + return new SQLiteTableReader(this); + } + } + + private final AbstractFile file; + private final Builder builder; + + private static final String SELECT_ALL_QUERY = "SELECT * FROM \"%s\""; + private static final Logger logger = Logger.getLogger(SQLiteTableReader.class.getName()); + + private Connection conn; + private PreparedStatement statement; + private ResultSet queryResults; + private ResultSetMetaData currentMetadata; + + //Iteration state + private int currRowColumnIndex; + private int columnNameIndex; + private int totalColumnCount; + private boolean unfinishedRow; + private boolean liveResultSet; + private String prevTableName; + + /** + * Holds reference to the builder instance so that we can use its actions + * during iteration. + */ + private SQLiteTableReader(Builder builder) { + this.builder = builder; + this.file = builder.file; + } + + /** + * Fetches all table names from the database. + * + * @return List of all table names found while querying the sqlite_master + * table + * + * @throws SQLiteTableReaderException + */ + public List getTableNames() throws SQLiteTableReaderException { + ensureOpen(); + try (ResultSet tableNameResult = conn.createStatement() + .executeQuery("SELECT name FROM sqlite_master " + + " WHERE type= 'table' ")) { + List tableNames = new ArrayList<>(); + while (tableNameResult.next()) { + tableNames.add(tableNameResult.getString("name")); //NON-NLS + } + return tableNames; + } catch (SQLException ex) { + throw new SQLiteTableReaderException(ex); + } + } + + /** + * Fetches the row count. + * + * @param tableName Source table to count + * + * @return Count as an integer + * + * @throws SQLiteTableReaderException + */ + public int getRowCount(String tableName) throws SQLiteTableReaderException { + ensureOpen(); + try (ResultSet countResult = conn.createStatement() + .executeQuery("SELECT count (*) as count FROM " + + "\"" + tableName + "\"")) { + return countResult.getInt("count"); + } catch (SQLException ex) { + throw new SQLiteTableReaderException(ex); + } + } + + /** + * Fetches the column count of the table. + * + * @param tableName Source table to count + * + * @return Count as an integer + * + * @throws SQLiteTableReaderException + */ + public int getColumnCount(String tableName) throws SQLiteTableReaderException { + ensureOpen(); + try (ResultSet columnCount = conn.createStatement() + .executeQuery(String.format(SELECT_ALL_QUERY, tableName))) { + return columnCount.getMetaData().getColumnCount(); + } catch (SQLException ex) { + throw new SQLiteTableReaderException(ex); + } + } + + /** + * Reads column names and values from the table. Only actions that were + * configured in the Builder will be invoked during iteration. Iteration + * will stop when the table read has completed or an exception was + * encountered. + * + * @param tableName Source table to read + * + * @throws SQLiteTableReaderException + */ + public void read(String tableName) throws SQLiteTableReaderException { + readHelper(String.format(SELECT_ALL_QUERY, tableName), () -> false); + } + + /** + * Reads column names and values from the table. Only actions that were + * configured in the Builder will be invoked during iteration. Iteration + * will stop when the table read has completed or an exception was + * encountered. + * + * @param tableName Source table to perform a read + * @param limit Number of rows to read from the table + * @param offset Starting row to read from in the table + * + * @throws SQLiteTableReaderException + * + */ + public void read(String tableName, int limit, int offset) throws SQLiteTableReaderException { + readHelper(String.format(SELECT_ALL_QUERY, tableName) + " LIMIT " + limit + + " OFFSET " + offset, () -> false); + } + + /** + * Reads column names and values from the table. Iteration will stop when + * the condition is true. + * + * @param tableName Source table to perform a read + * @param condition Condition to stop iteration when true + * + * @throws SQLiteTableReaderException + * + */ + public void read(String tableName, BooleanSupplier condition) throws SQLiteTableReaderException { + if (Objects.isNull(prevTableName) || !prevTableName.equals(tableName)) { + prevTableName = tableName; + closeTableResources(); + } + readHelper(String.format(SELECT_ALL_QUERY, tableName), condition); + } + + /** + * Performs the result set iteration and is responsible for maintaining + * state of the read over multiple invocations. + * + * @throws SQLiteTableReaderException + */ + private void readHelper(String query, BooleanSupplier condition) throws SQLiteTableReaderException { + try { + if (!liveResultSet) { + openTableResources(query); + columnNameIndex = 0; + } + + //Process column names before reading the database table values + while (columnNameIndex < totalColumnCount) { + if (condition.getAsBoolean()) { + return; + } + builder.onColumnNameAction.accept(currentMetadata + .getColumnName(++columnNameIndex)); + } + + while (unfinishedRow || queryResults.next()) { + while (currRowColumnIndex < totalColumnCount) { + if (condition.getAsBoolean()) { + unfinishedRow = true; + return; + } + + Object item = queryResults.getObject(++currRowColumnIndex); + if (item instanceof String) { + builder.onStringAction.accept((String) item); + } else if (item instanceof Integer) { + builder.onIntegerAction.accept((Integer) item); + } else if (item instanceof Double) { + builder.onFloatAction.accept((Double) item); + } else if (item instanceof Long) { + builder.onLongAction.accept((Long) item); + } else if (item instanceof byte[]) { + builder.onBlobAction.accept((byte[]) item); + } + + builder.forAllAction.accept(item); + } + unfinishedRow = false; + //Wrap column index back around if we've reached the end of the row + currRowColumnIndex = currRowColumnIndex % totalColumnCount; + } + closeTableResources(); + } catch (SQLException ex) { + closeTableResources(); + throw new SQLiteTableReaderException(ex); + } + } + + /** + * Ensures that the underlying database connection is open. This entails + * copying the abstract file contents to temp directory, copying over any + * WAL or SHM files and getting the connection from the DriverManager. + * + * @throws SQLiteTableReaderException + */ + private void ensureOpen() throws SQLiteTableReaderException { + if (Objects.isNull(conn)) { + try { + Class.forName("org.sqlite.JDBC"); //NON-NLS + String localDiskPath = copyFileToTempDirectory(file, file.getId()); + + //Find and copy both WAL and SHM meta files + findAndCopySQLiteMetaFile(file, file.getName() + "-wal"); + findAndCopySQLiteMetaFile(file, file.getName() + "-shm"); + conn = DriverManager.getConnection("jdbc:sqlite:" + localDiskPath); + } catch (NoCurrentCaseException | TskCoreException | IOException + | ClassNotFoundException | SQLException ex) { + throw new SQLiteTableReaderException(ex); + } + } + } + + /** + * Searches for a meta file associated with the give SQLite database. If + * found, it copies this file into the temp directory of the current case. + * + * @param sqliteFile file being processed + * @param metaFileName name of meta file to look for + * + * @throws NoCurrentCaseException Case has been closed. + * @throws TskCoreException fileManager cannot find AbstractFile + * files. + * @throws IOException Issue during writing to file. + */ + private void findAndCopySQLiteMetaFile(AbstractFile sqliteFile, + String metaFileName) throws NoCurrentCaseException, TskCoreException, IOException { + + Case openCase = Case.getCurrentCaseThrows(); + SleuthkitCase sleuthkitCase = openCase.getSleuthkitCase(); + Services services = new Services(sleuthkitCase); + FileManager fileManager = services.getFileManager(); + + List metaFiles = fileManager.findFiles( + sqliteFile.getDataSource(), metaFileName, + sqliteFile.getParent().getName()); + + if (metaFiles != null) { + for (AbstractFile metaFile : metaFiles) { + copyFileToTempDirectory(metaFile, sqliteFile.getId()); + } + } + } + + /** + * Copies the file contents into a unique path in the current case temp + * directory. + * + * @param file AbstractFile from the data source + * @param id The input files id value + * + * @return The path of the file on disk + * + * @throws IOException Exception writing file contents + * @throws NoCurrentCaseException Current case closed during file copying + */ + private String copyFileToTempDirectory(AbstractFile file, long fileId) + throws IOException, NoCurrentCaseException { + + String localDiskPath = Case.getCurrentCaseThrows().getTempDirectory() + + File.separator + fileId + file.getName(); + File localDatabaseFile = new File(localDiskPath); + if (!localDatabaseFile.exists()) { + ContentUtils.writeToFile(file, localDatabaseFile); + } + return localDiskPath; + } + + /** + * Executes the query and assigns resource references to instance variables. + * + * @param query Input query to execute + * + * @throws SQLiteTableReaderException + */ + private void openTableResources(String query) throws SQLiteTableReaderException { + try { + ensureOpen(); + statement = conn.prepareStatement(query); + queryResults = statement.executeQuery(); + currentMetadata = queryResults.getMetaData(); + totalColumnCount = currentMetadata.getColumnCount(); + liveResultSet = true; + } catch (SQLException ex) { + throw new SQLiteTableReaderException(ex); + } + } + + /** + * Ensures both the statement and the result set for a table are closed. + */ + private void closeTableResources() { + try { + if (Objects.nonNull(statement)) { + statement.close(); + } + if (Objects.nonNull(queryResults)) { + queryResults.close(); + } + liveResultSet = false; + } catch (SQLException ex) { + logger.log(Level.SEVERE, "Failed to close table resources", ex); + } + } + + /** + * Closes all resources attached to the database file. + * + * @throws SQLiteTableReaderException + */ + @Override + public void close() throws SQLiteTableReaderException { + try { + if (Objects.nonNull(conn)) { + conn.close(); + } + } catch (SQLException ex) { + throw new SQLiteTableReaderException(ex); + } + } + + /** + * Provides status of the current read operation. + * + * @return + */ + public boolean isFinished() { + return !liveResultSet; + } + + /** + * Last ditch effort to close the connections during garbage collection. + * + * @throws Throwable + */ + @Override + protected void finalize() throws Throwable { + try { + close(); + } catch (SQLiteTableReaderException ex) { + logger.log(Level.SEVERE, "Failed to close reader in finalizer", ex); + } + super.finalize(); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/coreutils/SQLiteTableReaderException.java b/Core/src/org/sleuthkit/autopsy/coreutils/SQLiteTableReaderException.java new file mode 100755 index 0000000000..63f907cc36 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/coreutils/SQLiteTableReaderException.java @@ -0,0 +1,44 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2018-2018 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.coreutils; + +/** + * Provides a system exception for the SQLiteTableReader class. + */ +public class SQLiteTableReaderException extends Exception { + + /** + * Accepts both a message and a parent exception. + * + * @param msg Message detailing the cause + * @param parentEx Parent exception + */ + public SQLiteTableReaderException(String msg, Throwable parentEx) { + super(msg, parentEx); + } + + /** + * Accepts only a parent exception. + * + * @param parentEx Parent exception + */ + public SQLiteTableReaderException(Throwable parentEx) { + super(parentEx); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/AbstractAbstractFileNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/AbstractAbstractFileNode.java index 5f8ea3d63f..9a9c52ab39 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/AbstractAbstractFileNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/AbstractAbstractFileNode.java @@ -18,16 +18,22 @@ */ package org.sleuthkit.autopsy.datamodel; +import com.google.common.util.concurrent.ThreadFactoryBuilder; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; +import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.EnumSet; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; import java.util.logging.Level; import java.util.stream.Collectors; +import org.apache.commons.io.FilenameUtils; import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.tuple.Pair; import org.openide.nodes.Children; import org.openide.nodes.Sheet; import org.openide.util.NbBundle; @@ -43,13 +49,18 @@ import org.sleuthkit.autopsy.centralrepository.datamodel.EamArtifactUtil; import org.sleuthkit.autopsy.centralrepository.datamodel.EamDb; import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException; import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbUtil; +import org.sleuthkit.autopsy.core.UserPreferences; +import org.sleuthkit.autopsy.corecomponents.DataResultViewerTable; +import org.sleuthkit.autopsy.corecomponents.DataResultViewerTable.HasCommentStatus; import org.sleuthkit.autopsy.corecomponents.DataResultViewerTable.Score; import org.sleuthkit.autopsy.coreutils.Logger; -import static org.sleuthkit.autopsy.datamodel.AbstractAbstractFileNode.AbstractFilePropertyType.*; import static org.sleuthkit.autopsy.datamodel.Bundle.*; -import org.sleuthkit.autopsy.corecomponents.DataResultViewerTable.HasCommentStatus; +import static org.sleuthkit.autopsy.datamodel.AbstractAbstractFileNode.AbstractFilePropertyType.*; import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.autopsy.ingest.ModuleContentEvent; +import org.sleuthkit.autopsy.texttranslation.NoServiceProviderException; +import org.sleuthkit.autopsy.texttranslation.TextTranslationService; +import org.sleuthkit.autopsy.texttranslation.TranslationException; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.Content; @@ -71,6 +82,9 @@ public abstract class AbstractAbstractFileNode extends A private static final Set CASE_EVENTS_OF_INTEREST = EnumSet.of(Case.Events.CURRENT_CASE, Case.Events.CONTENT_TAG_ADDED, Case.Events.CONTENT_TAG_DELETED, Case.Events.CR_COMMENT_CHANGED); + private static final ExecutorService translationPool; + private static final Integer MAX_POOL_SIZE = 10; + /** * @param abstractFile file to wrap */ @@ -90,6 +104,13 @@ public abstract class AbstractAbstractFileNode extends A Case.addEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, weakPcl); } + static { + //Initialize this pool only once! This will be used by every instance of AAFN + //to do their heavy duty SCO column and translation updates. + translationPool = Executors.newFixedThreadPool(MAX_POOL_SIZE, + new ThreadFactoryBuilder().setNameFormat("translation-task-thread-%d").build()); + } + /** * The finalizer removes event listeners as the BlackboardArtifactNode is * being garbage collected. Yes, we know that finalizers are considered to @@ -110,6 +131,16 @@ public abstract class AbstractAbstractFileNode extends A Case.removeEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, weakPcl); } + /** + * Event signals to indicate the background tasks have completed processing. + * Currently, we have one property task in the background: + * + * 1) Retreiving the translation of the file name + */ + enum NodeSpecificEvents { + TRANSLATION_AVAILABLE, + } + private final PropertyChangeListener pcl = (PropertyChangeEvent evt) -> { String eventType = evt.getPropertyName(); @@ -146,24 +177,51 @@ public abstract class AbstractAbstractFileNode extends A // case was closed. Remove listeners so that we don't get called with a stale case handle removeListeners(); } + /* + * No need to do any asynchrony around tag added, deleted or CR + * change events, they are so infrequent and user driven that we can + * just keep a simple blocking approach, where we go out to the + * database ourselves. + */ } else if (eventType.equals(Case.Events.CONTENT_TAG_ADDED.toString())) { ContentTagAddedEvent event = (ContentTagAddedEvent) evt; if (event.getAddedTag().getContent().equals(content)) { - updateSheet(); + List tags = getContentTagsFromDatabase(); + Pair scorePropAndDescr = getScorePropertyAndDescription(tags); + Score value = scorePropAndDescr.getLeft(); + String descr = scorePropAndDescr.getRight(); + CorrelationAttributeInstance attribute = getCorrelationAttributeInstance(); + updateSheet(new NodeProperty<>(SCORE.toString(),SCORE.toString(),descr,value), + new NodeProperty<>(COMMENT.toString(),COMMENT.toString(),NO_DESCR,getCommentProperty(tags, attribute)) + ); } } else if (eventType.equals(Case.Events.CONTENT_TAG_DELETED.toString())) { ContentTagDeletedEvent event = (ContentTagDeletedEvent) evt; if (event.getDeletedTagInfo().getContentID() == content.getId()) { - updateSheet(); + List tags = getContentTagsFromDatabase(); + Pair scorePropAndDescr = getScorePropertyAndDescription(tags); + Score value = scorePropAndDescr.getLeft(); + String descr = scorePropAndDescr.getRight(); + CorrelationAttributeInstance attribute = getCorrelationAttributeInstance(); + updateSheet(new NodeProperty<>(SCORE.toString(), SCORE.toString(),descr,value), + new NodeProperty<>(COMMENT.toString(), COMMENT.toString(),NO_DESCR,getCommentProperty(tags, attribute)) + ); } } else if (eventType.equals(Case.Events.CR_COMMENT_CHANGED.toString())) { CommentChangedEvent event = (CommentChangedEvent) evt; if (event.getContentID() == content.getId()) { - updateSheet(); + List tags = getContentTagsFromDatabase(); + CorrelationAttributeInstance attribute = getCorrelationAttributeInstance(); + updateSheet(new NodeProperty<>(COMMENT.toString(), COMMENT.toString(),NO_DESCR,getCommentProperty(tags, attribute))); } + /* + * Data that was being computed in the background task. Kicked off by a + * call to createSheet(). + */ + } else if (eventType.equals(NodeSpecificEvents.TRANSLATION_AVAILABLE.toString())) { + updateSheet(new NodeProperty<>(TRANSLATION.toString(),TRANSLATION.toString(),NO_DESCR,evt.getNewValue())); } }; - /** * We pass a weak reference wrapper around the listener to the event * publisher. This allows Netbeans to delete the node when the user @@ -174,11 +232,70 @@ public abstract class AbstractAbstractFileNode extends A */ private final PropertyChangeListener weakPcl = WeakListeners.propertyChange(pcl, null); - private void updateSheet() { - this.setSheet(createSheet()); + /** + * Updates the values of the properties in the current property sheet with + * the new properties being passed in. Only if that property exists in the + * current sheet will it be applied. That way, we allow for subclasses to + * add their own (or omit some!) properties and we will not accidentally + * disrupt their UI. + * + * Race condition if not synchronized. Only one update should be applied at + * a time. + * + * @param newProps New file property instances to be updated in the current + * sheet. + */ + private synchronized void updateSheet(NodeProperty... newProps) { + //Refresh ONLY those properties in the sheet currently. Subclasses may have + //only added a subset of our properties or their own props. Let's keep their UI correct. + Sheet visibleSheet = this.getSheet(); + Sheet.Set visibleSheetSet = visibleSheet.get(Sheet.PROPERTIES); + Property[] visibleProps = visibleSheetSet.getProperties(); + for(NodeProperty newProp: newProps) { + for(int i = 0; i < visibleProps.length; i++) { + if(visibleProps[i].getName().equals(newProp.getName())) { + visibleProps[i] = newProp; + } + } + } + visibleSheetSet.put(visibleProps); + visibleSheet.put(visibleSheetSet); + //setSheet() will notify Netbeans to update this node in the UI. + this.setSheet(visibleSheet); + } + + /* + * This is called when the node is first initialized. Any new updates or + * changes happen by directly manipulating the sheet. That means we can fire + * off background events everytime this method is called and not worry about + * duplicated jobs. + */ + @Override + protected synchronized Sheet createSheet() { + Sheet sheet = new Sheet(); + Sheet.Set sheetSet = Sheet.createPropertiesSet(); + sheet.put(sheetSet); + + //This will fire off fresh background tasks. + List> newProperties = getProperties(); + newProperties.forEach((property) -> { + sheetSet.put(property); + }); + + /* + * Submit the translation task ASAP. Keep all weak references so + * this task doesn't block the ability of this node to be GC'd. + */ + translationPool.submit(new TranslationTask(new WeakReference<>(this), weakPcl)); + + return sheet; } @NbBundle.Messages({"AbstractAbstractFileNode.nameColLbl=Name", + "AbstractAbstractFileNode.translateFileName=Translated Name", + "AbstractAbstractFileNode.createSheet.score.name=S", + "AbstractAbstractFileNode.createSheet.comment.name=C", + "AbstractAbstractFileNode.createSheet.count.name=O", "AbstractAbstractFileNode.locationColLbl=Location", "AbstractAbstractFileNode.modifiedTimeColLbl=Modified Time", "AbstractAbstractFileNode.changeTimeColLbl=Change Time", @@ -202,6 +319,10 @@ public abstract class AbstractAbstractFileNode extends A public enum AbstractFilePropertyType { NAME(AbstractAbstractFileNode_nameColLbl()), + TRANSLATION(AbstractAbstractFileNode_translateFileName()), + SCORE(AbstractAbstractFileNode_createSheet_score_name()), + COMMENT(AbstractAbstractFileNode_createSheet_comment_name()), + OCCURRENCES(AbstractAbstractFileNode_createSheet_count_name()), LOCATION(AbstractAbstractFileNode_locationColLbl()), MOD_TIME(AbstractAbstractFileNode_modifiedTimeColLbl()), CHANGED_TIME(AbstractAbstractFileNode_changeTimeColLbl()), @@ -235,12 +356,270 @@ public abstract class AbstractAbstractFileNode extends A } } + /** + * Creates and populates a list of properties for this nodes property sheet. + */ + private List> getProperties() { + List> properties = new ArrayList<>(); + properties.add(new NodeProperty<>(NAME.toString(), NAME.toString(), NO_DESCR, getContentDisplayName(content))); + /* + * Initialize an empty place holder value. At the bottom, we kick off a + * background task that promises to update these values. + */ + + if (UserPreferences.displayTranslatedFileNames()) { + properties.add(new NodeProperty<>(TRANSLATION.toString(), TRANSLATION.toString(), NO_DESCR, "")); + } + + //SCO column prereq info.. + List tags = getContentTagsFromDatabase(); + CorrelationAttributeInstance attribute = getCorrelationAttributeInstance(); + + Pair scoreAndDescription = getScorePropertyAndDescription(tags); + properties.add(new NodeProperty<>(SCORE.toString(), SCORE.toString(), scoreAndDescription.getRight(), scoreAndDescription.getLeft())); + DataResultViewerTable.HasCommentStatus comment = getCommentProperty(tags, attribute); + properties.add(new NodeProperty<>(COMMENT.toString(), COMMENT.toString(), NO_DESCR, comment)); + if (!UserPreferences.hideCentralRepoCommentsAndOccurrences()) { + Pair countAndDescription = getCountPropertyAndDescription(attribute); + properties.add(new NodeProperty<>(OCCURRENCES.toString(), OCCURRENCES.toString(), countAndDescription.getRight(), countAndDescription.getLeft())); + } + properties.add(new NodeProperty<>(LOCATION.toString(), LOCATION.toString(), NO_DESCR, getContentPath(content))); + properties.add(new NodeProperty<>(MOD_TIME.toString(), MOD_TIME.toString(), NO_DESCR, ContentUtils.getStringTime(content.getMtime(), content))); + properties.add(new NodeProperty<>(CHANGED_TIME.toString(), CHANGED_TIME.toString(), NO_DESCR, ContentUtils.getStringTime(content.getCtime(), content))); + properties.add(new NodeProperty<>(ACCESS_TIME.toString(), ACCESS_TIME.toString(), NO_DESCR, ContentUtils.getStringTime(content.getAtime(), content))); + properties.add(new NodeProperty<>(CREATED_TIME.toString(), CREATED_TIME.toString(), NO_DESCR, ContentUtils.getStringTime(content.getCrtime(), content))); + properties.add(new NodeProperty<>(SIZE.toString(), SIZE.toString(), NO_DESCR, content.getSize())); + properties.add(new NodeProperty<>(FLAGS_DIR.toString(), FLAGS_DIR.toString(), NO_DESCR, content.getDirFlagAsString())); + properties.add(new NodeProperty<>(FLAGS_META.toString(), FLAGS_META.toString(), NO_DESCR, content.getMetaFlagsAsString())); + properties.add(new NodeProperty<>(MODE.toString(), MODE.toString(), NO_DESCR, content.getModesAsString())); + properties.add(new NodeProperty<>(USER_ID.toString(), USER_ID.toString(), NO_DESCR, content.getUid())); + properties.add(new NodeProperty<>(GROUP_ID.toString(), GROUP_ID.toString(), NO_DESCR, content.getGid())); + properties.add(new NodeProperty<>(META_ADDR.toString(), META_ADDR.toString(), NO_DESCR, content.getMetaAddr())); + properties.add(new NodeProperty<>(ATTR_ADDR.toString(), ATTR_ADDR.toString(), NO_DESCR, content.getAttrType().getValue() + "-" + content.getAttributeId())); + properties.add(new NodeProperty<>(TYPE_DIR.toString(), TYPE_DIR.toString(), NO_DESCR, content.getDirType().getLabel())); + properties.add(new NodeProperty<>(TYPE_META.toString(), TYPE_META.toString(), NO_DESCR, content.getMetaType().toString())); + properties.add(new NodeProperty<>(KNOWN.toString(), KNOWN.toString(), NO_DESCR, content.getKnown().getName())); + properties.add(new NodeProperty<>(MD5HASH.toString(), MD5HASH.toString(), NO_DESCR, StringUtils.defaultString(content.getMd5Hash()))); + properties.add(new NodeProperty<>(ObjectID.toString(), ObjectID.toString(), NO_DESCR, content.getId())); + properties.add(new NodeProperty<>(MIMETYPE.toString(), MIMETYPE.toString(), NO_DESCR, StringUtils.defaultString(content.getMIMEType()))); + properties.add(new NodeProperty<>(EXTENSION.toString(), EXTENSION.toString(), NO_DESCR, content.getNameExtension())); + + return properties; + } + + /** + * Used by subclasses of AbstractAbstractFileNode to add the tags property + * to their sheets. + * + * @param sheetSet the modifiable Sheet.Set returned by + * Sheet.get(Sheet.PROPERTIES) + * + * @deprecated + */ + @NbBundle.Messages("AbstractAbstractFileNode.tagsProperty.displayName=Tags") + @Deprecated + protected void addTagProperty(Sheet.Set sheetSet) { + List tags = getContentTagsFromDatabase(); + sheetSet.put(new NodeProperty<>("Tags", AbstractAbstractFileNode_tagsProperty_displayName(), + NO_DESCR, tags.stream().map(t -> t.getName().getDisplayName()) + .distinct() + .collect(Collectors.joining(", ")))); + } + + /** + * Gets a comma-separated values list of the names of the hash sets + * currently identified as including a given file. + * + * @param file The file. + * + * @return The CSV list of hash set names. + * + * @deprecated + */ + @Deprecated + protected static String getHashSetHitsCsvList(AbstractFile file) { + try { + return StringUtils.join(file.getHashSetNames(), ", "); + } catch (TskCoreException tskCoreException) { + logger.log(Level.WARNING, "Error getting hashset hits: ", tskCoreException); //NON-NLS + return ""; + } + } + + @NbBundle.Messages({ + "AbstractAbstractFileNode.createSheet.count.displayName=O", + "AbstractAbstractFileNode.createSheet.count.noCentralRepo.description=Central repository was not enabled when this column was populated", + "AbstractAbstractFileNode.createSheet.count.hashLookupNotRun.description=Hash lookup had not been run on this file when the column was populated", + "# {0} - occuranceCount", + "AbstractAbstractFileNode.createSheet.count.description=There were {0} datasource(s) found with occurances of the correlation value"}) + Pair getCountPropertyAndDescription(CorrelationAttributeInstance attribute) { + Long count = -1L; //The column renderer will not display negative values, negative value used when count unavailble to preserve sorting + String description = Bundle.AbstractAbstractFileNode_createSheet_count_noCentralRepo_description(); + try { + //don't perform the query if there is no correlation value + if (attribute != null && StringUtils.isNotBlank(attribute.getCorrelationValue())) { + count = EamDb.getInstance().getCountUniqueCaseDataSourceTuplesHavingTypeValue(attribute.getCorrelationType(), attribute.getCorrelationValue()); + description = Bundle.AbstractAbstractFileNode_createSheet_count_description(count); + } else if (attribute != null) { + description = Bundle.AbstractAbstractFileNode_createSheet_count_hashLookupNotRun_description(); + } + } catch (EamDbException ex) { + logger.log(Level.WARNING, "Error getting count of datasources with correlation attribute", ex); + } catch (CorrelationAttributeNormalizationException ex) { + logger.log(Level.WARNING, "Unable to normalize data to get count of datasources with correlation attribute", ex); + } + + return Pair.of(count, description); + } + + @NbBundle.Messages({ + "AbstractAbstractFileNode.createSheet.score.displayName=S", + "AbstractAbstractFileNode.createSheet.notableFile.description=File recognized as notable.", + "AbstractAbstractFileNode.createSheet.interestingResult.description=File has interesting result associated with it.", + "AbstractAbstractFileNode.createSheet.taggedFile.description=File has been tagged.", + "AbstractAbstractFileNode.createSheet.notableTaggedFile.description=File tagged with notable tag.", + "AbstractAbstractFileNode.createSheet.noScore.description=No score"}) + Pair getScorePropertyAndDescription(List tags) { + DataResultViewerTable.Score score = DataResultViewerTable.Score.NO_SCORE; + String description = ""; + if (content.getKnown() == TskData.FileKnown.BAD) { + score = DataResultViewerTable.Score.NOTABLE_SCORE; + description = Bundle.AbstractAbstractFileNode_createSheet_notableFile_description(); + } + try { + if (score == DataResultViewerTable.Score.NO_SCORE && !content.getArtifacts(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT).isEmpty()) { + score = DataResultViewerTable.Score.INTERESTING_SCORE; + description = Bundle.AbstractAbstractFileNode_createSheet_interestingResult_description(); + } + } catch (TskCoreException ex) { + logger.log(Level.WARNING, "Error getting artifacts for file: " + content.getName(), ex); + } + if (!tags.isEmpty() && (score == DataResultViewerTable.Score.NO_SCORE || score == DataResultViewerTable.Score.INTERESTING_SCORE)) { + score = DataResultViewerTable.Score.INTERESTING_SCORE; + description = Bundle.AbstractAbstractFileNode_createSheet_taggedFile_description(); + for (ContentTag tag : tags) { + if (tag.getName().getKnownStatus() == TskData.FileKnown.BAD) { + score = DataResultViewerTable.Score.NOTABLE_SCORE; + description = Bundle.AbstractAbstractFileNode_createSheet_notableTaggedFile_description(); + break; + } + } + } + return Pair.of(score, description); + } + + @NbBundle.Messages({ + "AbstractAbstractFileNode.createSheet.comment.displayName=C"}) + HasCommentStatus getCommentProperty(List tags, CorrelationAttributeInstance attribute) { + + DataResultViewerTable.HasCommentStatus status = !tags.isEmpty() ? DataResultViewerTable.HasCommentStatus.TAG_NO_COMMENT : DataResultViewerTable.HasCommentStatus.NO_COMMENT; + + for (ContentTag tag : tags) { + if (!StringUtils.isBlank(tag.getComment())) { + //if the tag is null or empty or contains just white space it will indicate there is not a comment + status = DataResultViewerTable.HasCommentStatus.TAG_COMMENT; + break; + } + } + if (attribute != null && !StringUtils.isBlank(attribute.getComment())) { + if (status == DataResultViewerTable.HasCommentStatus.TAG_COMMENT) { + status = DataResultViewerTable.HasCommentStatus.CR_AND_TAG_COMMENTS; + } else { + status = DataResultViewerTable.HasCommentStatus.CR_COMMENT; + } + } + return status; + } + + /** + * Translates this nodes content name. Doesn't attempt translation if + * the name is in english or if there is now translation service available. + */ + String getTranslatedFileName() { + //If already in complete English, don't translate. + if (content.getName().matches("^\\p{ASCII}+$")) { + return ""; + } + TextTranslationService tts = TextTranslationService.getInstance(); + if (tts.hasProvider()) { + //Seperate out the base and ext from the contents file name. + String base = FilenameUtils.getBaseName(content.getName()); + try { + String translation = tts.translate(base); + String ext = FilenameUtils.getExtension(content.getName()); + + //If we have no extension, then we shouldn't add the . + String extensionDelimiter = (ext.isEmpty()) ? "" : "."; + + //Talk directly to this nodes pcl, fire an update when the translation + //is complete. + if (!translation.isEmpty()) { + return translation + extensionDelimiter + ext; + } + } catch (NoServiceProviderException noServiceEx) { + logger.log(Level.WARNING, "Translate unsuccessful because no TextTranslator " + + "implementation was provided.", noServiceEx.getMessage()); + } catch (TranslationException noTranslationEx) { + logger.log(Level.WARNING, "Could not successfully translate file name " + + content.getName(), noTranslationEx.getMessage()); + } + } + return ""; + } + + /** + * Get all tags from the case database that are associated with the file + * + * @return a list of tags that are associated with the file + */ + List getContentTagsFromDatabase() { + List tags = new ArrayList<>(); + try { + tags.addAll(Case.getCurrentCaseThrows().getServices().getTagsManager().getContentTagsByContent(content)); + } catch (TskCoreException | NoCurrentCaseException ex) { + logger.log(Level.SEVERE, "Failed to get tags for content " + content.getName(), ex); + } + return tags; + } + + CorrelationAttributeInstance getCorrelationAttributeInstance() { + CorrelationAttributeInstance attribute = null; + if (EamDbUtil.useCentralRepo()) { + attribute = EamArtifactUtil.getInstanceFromContent(content); + } + return attribute; + } + + static String getContentPath(AbstractFile file) { + try { + return file.getUniquePath(); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, "Except while calling Content.getUniquePath() on " + file.getName(), ex); //NON-NLS + return ""; //NON-NLS + } + } + + static String getContentDisplayName(AbstractFile file) { + String name = file.getName(); + switch (name) { + case "..": + return DirectoryNode.DOTDOTDIR; + case ".": + return DirectoryNode.DOTDIR; + default: + return name; + } + } + /** * Fill map with AbstractFile properties * * @param map map with preserved ordering, where property names/values * are put * @param content The content to get properties for. + * + * TODO JIRA-4421: Deprecate this method and resolve warnings that appear + * in other locations. */ static public void fillPropertyMap(Map map, AbstractFile content) { map.put(NAME.toString(), getContentDisplayName(content)); @@ -265,196 +644,4 @@ public abstract class AbstractAbstractFileNode extends A map.put(MIMETYPE.toString(), StringUtils.defaultString(content.getMIMEType())); map.put(EXTENSION.toString(), content.getNameExtension()); } - - /** - * Get all tags from the case database that are associated with the file - * - * @return a list of tags that are associated with the file - */ - protected final List getContentTagsFromDatabase() { - List tags = new ArrayList<>(); - try { - tags.addAll(Case.getCurrentCaseThrows().getServices().getTagsManager().getContentTagsByContent(content)); - } catch (TskCoreException | NoCurrentCaseException ex) { - logger.log(Level.SEVERE, "Failed to get tags for content " + content.getName(), ex); - } - return tags; - } - - protected final CorrelationAttributeInstance getCorrelationAttributeInstance() { - CorrelationAttributeInstance correlationAttribute = null; - if (EamDbUtil.useCentralRepo()) { - correlationAttribute = EamArtifactUtil.getInstanceFromContent(content); - } - return correlationAttribute; - } - - /** - * Used by subclasses of AbstractAbstractFileNode to add the comment - * property to their sheets. - * - * @param sheetSet the modifiable Sheet.Set returned by - * Sheet.get(Sheet.PROPERTIES) - * @param tags the list of tags associated with the file - * @param attribute the correlation attribute associated with this file, - * null if central repo is not enabled - */ - @NbBundle.Messages({"AbstractAbstractFileNode.createSheet.comment.name=C", - "AbstractAbstractFileNode.createSheet.comment.displayName=C"}) - protected final void addCommentProperty(Sheet.Set sheetSet, List tags, CorrelationAttributeInstance attribute) { - - HasCommentStatus status = tags.size() > 0 ? HasCommentStatus.TAG_NO_COMMENT : HasCommentStatus.NO_COMMENT; - - for (ContentTag tag : tags) { - if (!StringUtils.isBlank(tag.getComment())) { - //if the tag is null or empty or contains just white space it will indicate there is not a comment - status = HasCommentStatus.TAG_COMMENT; - break; - } - } - if (attribute != null && !StringUtils.isBlank(attribute.getComment())) { - if (status == HasCommentStatus.TAG_COMMENT) { - status = HasCommentStatus.CR_AND_TAG_COMMENTS; - } else { - status = HasCommentStatus.CR_COMMENT; - } - } - sheetSet.put(new NodeProperty<>(AbstractAbstractFileNode_createSheet_comment_name(), AbstractAbstractFileNode_createSheet_comment_displayName(), NO_DESCR, - status)); - } - - /** - * Used by subclasses of AbstractAbstractFileNode to add the Score property - * to their sheets. - * - * @param sheetSet the modifiable Sheet.Set returned by - * Sheet.get(Sheet.PROPERTIES) - * @param tags the list of tags associated with the file - */ - @NbBundle.Messages({"AbstractAbstractFileNode.createSheet.score.name=S", - "AbstractAbstractFileNode.createSheet.score.displayName=S", - "AbstractAbstractFileNode.createSheet.notableFile.description=File recognized as notable.", - "AbstractAbstractFileNode.createSheet.interestingResult.description=File has interesting result associated with it.", - "AbstractAbstractFileNode.createSheet.taggedFile.description=File has been tagged.", - "AbstractAbstractFileNode.createSheet.notableTaggedFile.description=File tagged with notable tag.", - "AbstractAbstractFileNode.createSheet.noScore.description=No score"}) - protected final void addScoreProperty(Sheet.Set sheetSet, List tags) { - Score score = Score.NO_SCORE; - String description = Bundle.AbstractAbstractFileNode_createSheet_noScore_description(); - if (content.getKnown() == TskData.FileKnown.BAD) { - score = Score.NOTABLE_SCORE; - description = Bundle.AbstractAbstractFileNode_createSheet_notableFile_description(); - } - try { - if (score == Score.NO_SCORE && !content.getArtifacts(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT).isEmpty()) { - score = Score.INTERESTING_SCORE; - description = Bundle.AbstractAbstractFileNode_createSheet_interestingResult_description(); - } - } catch (TskCoreException ex) { - logger.log(Level.WARNING, "Error getting artifacts for file: " + content.getName(), ex); - } - if (tags.size() > 0 && (score == Score.NO_SCORE || score == Score.INTERESTING_SCORE)) { - score = Score.INTERESTING_SCORE; - description = Bundle.AbstractAbstractFileNode_createSheet_taggedFile_description(); - for (ContentTag tag : tags) { - if (tag.getName().getKnownStatus() == TskData.FileKnown.BAD) { - score = Score.NOTABLE_SCORE; - description = Bundle.AbstractAbstractFileNode_createSheet_notableTaggedFile_description(); - break; - } - } - } - sheetSet.put(new NodeProperty<>(Bundle.AbstractAbstractFileNode_createSheet_score_name(), Bundle.AbstractAbstractFileNode_createSheet_score_displayName(), description, score)); - } - - @NbBundle.Messages({"AbstractAbstractFileNode.createSheet.count.name=O", - "AbstractAbstractFileNode.createSheet.count.displayName=O", - "AbstractAbstractFileNode.createSheet.count.noCentralRepo.description=Central repository was not enabled when this column was populated", - "AbstractAbstractFileNode.createSheet.count.hashLookupNotRun.description=Hash lookup had not been run on this file when the column was populated", - "# {0} - occuranceCount", - "AbstractAbstractFileNode.createSheet.count.description=There were {0} datasource(s) found with occurances of the correlation value"}) - protected final void addCountProperty(Sheet.Set sheetSet, CorrelationAttributeInstance attribute) { - Long count = -1L; //The column renderer will not display negative values, negative value used when count unavailble to preserve sorting - String description = Bundle.AbstractAbstractFileNode_createSheet_count_noCentralRepo_description(); - try { - //don't perform the query if there is no correlation value - if (attribute != null && StringUtils.isNotBlank(attribute.getCorrelationValue())) { - count = EamDb.getInstance().getCountUniqueCaseDataSourceTuplesHavingTypeValue(attribute.getCorrelationType(), attribute.getCorrelationValue()); - description = Bundle.AbstractAbstractFileNode_createSheet_count_description(count); - } else if (attribute != null) { - description = Bundle.AbstractAbstractFileNode_createSheet_count_hashLookupNotRun_description(); - } - } catch (EamDbException ex) { - logger.log(Level.WARNING, "Error getting count of datasources with correlation attribute", ex); - } catch (CorrelationAttributeNormalizationException ex) { - logger.log(Level.WARNING, "Unable to normalize data to get count of datasources with correlation attribute", ex); - } - - sheetSet.put( - new NodeProperty<>(Bundle.AbstractAbstractFileNode_createSheet_count_name(), Bundle.AbstractAbstractFileNode_createSheet_count_displayName(), description, count)); - } - - /** - * Used by subclasses of AbstractAbstractFileNode to add the tags property - * to their sheets. - * - * @param sheetSet the modifiable Sheet.Set returned by - * Sheet.get(Sheet.PROPERTIES) - * @deprecated - */ - @NbBundle.Messages("AbstractAbstractFileNode.tagsProperty.displayName=Tags") - @Deprecated - protected void addTagProperty(Sheet.Set sheetSet) { - List tags = new ArrayList<>(); - try { - tags.addAll(Case.getCurrentCaseThrows().getServices().getTagsManager().getContentTagsByContent(content)); - } catch (TskCoreException | NoCurrentCaseException ex) { - logger.log(Level.SEVERE, "Failed to get tags for content " + content.getName(), ex); - } - sheetSet.put(new NodeProperty<>("Tags", AbstractAbstractFileNode_tagsProperty_displayName(), - NO_DESCR, tags.stream().map(t -> t.getName().getDisplayName()) - .distinct() - .collect(Collectors.joining(", ")))); - } - - private static String getContentPath(AbstractFile file) { - try { - return file.getUniquePath(); - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Except while calling Content.getUniquePath() on " + file, ex); //NON-NLS - return ""; //NON-NLS - } - } - - static String getContentDisplayName(AbstractFile file) { - String name = file.getName(); - switch (name) { - case "..": - return DirectoryNode.DOTDOTDIR; - - case ".": - return DirectoryNode.DOTDIR; - default: - return name; - } - } - - /** - * Gets a comma-separated values list of the names of the hash sets - * currently identified as including a given file. - * - * @param file The file. - * - * @return The CSV list of hash set names. - * @deprecated - */ - @Deprecated - protected static String getHashSetHitsCsvList(AbstractFile file) { - try { - return StringUtils.join(file.getHashSetNames(), ", "); - } catch (TskCoreException tskCoreException) { - logger.log(Level.WARNING, "Error getting hashset hits: ", tskCoreException); //NON-NLS - return ""; - } - } -} +} \ No newline at end of file diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/AbstractFsContentNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/AbstractFsContentNode.java index ef8e1d024e..e4af5e24ef 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/AbstractFsContentNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/AbstractFsContentNode.java @@ -18,17 +18,10 @@ */ package org.sleuthkit.autopsy.datamodel; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; import org.openide.nodes.Sheet; import org.openide.util.NbBundle; -import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance; -import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbUtil; -import org.sleuthkit.autopsy.core.UserPreferences; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.ContentTag; /** * Abstract class that implements the commonality between File and Directory @@ -58,7 +51,7 @@ public abstract class AbstractFsContentNode extends Abst */ AbstractFsContentNode(T content, boolean directoryBrowseMode) { super(content); - this.setDisplayName(AbstractAbstractFileNode.getContentDisplayName(content)); + this.setDisplayName(getContentDisplayName(content)); this.directoryBrowseMode = directoryBrowseMode; } @@ -71,37 +64,6 @@ public abstract class AbstractFsContentNode extends Abst protected Sheet createSheet() { Sheet sheet = super.createSheet(); Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES); - if (sheetSet == null) { - sheetSet = Sheet.createPropertiesSet(); - sheet.put(sheetSet); - } - List tags = getContentTagsFromDatabase(); - Map map = new LinkedHashMap<>(); - fillPropertyMap(map, getContent()); - final String NO_DESCR = Bundle.AbstractFsContentNode_noDesc_text(); - //add the name property before the comment property to ensure it is first column - sheetSet.put(new NodeProperty<>(AbstractFilePropertyType.NAME.toString(), - AbstractFilePropertyType.NAME.toString(), - NO_DESCR, - getName())); - - addScoreProperty(sheetSet, tags); - - //add the comment property before the propertyMap to ensure it is early in column order - CorrelationAttributeInstance correlationAttribute = null; - if (EamDbUtil.useCentralRepo() && UserPreferences.hideCentralRepoCommentsAndOccurrences()== false) { - correlationAttribute = getCorrelationAttributeInstance(); - } - addCommentProperty(sheetSet, tags, correlationAttribute); - - if (EamDbUtil.useCentralRepo() && UserPreferences.hideCentralRepoCommentsAndOccurrences()== false) { - addCountProperty(sheetSet, correlationAttribute); - } - - for (AbstractFilePropertyType propType : AbstractFilePropertyType.values()) { - final String propString = propType.toString(); - sheetSet.put(new NodeProperty<>(propString, propString, NO_DESCR, map.get(propString))); - } if (directoryBrowseMode) { sheetSet.put(new NodeProperty<>(HIDE_PARENT, HIDE_PARENT, HIDE_PARENT, HIDE_PARENT)); } diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/BlackboardArtifactNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/BlackboardArtifactNode.java index 9a322446bc..7269a09e2c 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/BlackboardArtifactNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/BlackboardArtifactNode.java @@ -320,7 +320,7 @@ public class BlackboardArtifactNode extends AbstractContentNode { } } - @Override - protected Sheet createSheet() { - Sheet sheet = super.createSheet(); - Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES); - if (sheetSet == null) { - sheetSet = Sheet.createPropertiesSet(); - sheet.put(sheetSet); - } - - List tags = getContentTagsFromDatabase(); - - Map map = new LinkedHashMap<>(); - fillPropertyMap(map); - - sheetSet.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "LayoutFileNode.createSheet.name.name"), - NbBundle.getMessage(this.getClass(), "LayoutFileNode.createSheet.name.displayName"), - NbBundle.getMessage(this.getClass(), "LayoutFileNode.createSheet.name.desc"), - getName())); - - addScoreProperty(sheetSet, tags); - - CorrelationAttributeInstance correlationAttribute = null; - if (EamDbUtil.useCentralRepo() && UserPreferences.hideCentralRepoCommentsAndOccurrences() == false) { - correlationAttribute = getCorrelationAttributeInstance(); - } - addCommentProperty(sheetSet, tags, correlationAttribute); - - if (EamDbUtil.useCentralRepo() && UserPreferences.hideCentralRepoCommentsAndOccurrences() == false) { - addCountProperty(sheetSet, correlationAttribute); - } - final String NO_DESCR = NbBundle.getMessage(this.getClass(), "LayoutFileNode.createSheet.noDescr.text"); - for (Map.Entry entry : map.entrySet()) { - sheetSet.put(new NodeProperty<>(entry.getKey(), entry.getKey(), NO_DESCR, entry.getValue())); - } - - return sheet; - } - - @Override public T accept(ContentNodeVisitor visitor) { return visitor.visit(this); } @@ -151,10 +106,6 @@ public class LayoutFileNode extends AbstractAbstractFileNode { return actionsList.toArray(new Action[actionsList.size()]); } - void fillPropertyMap(Map map) { - AbstractAbstractFileNode.fillPropertyMap(map, getContent()); - } - @Override public String getItemType() { return getClass().getName(); diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/LocalDirectoryNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/LocalDirectoryNode.java index f4146e7a55..3e72ae92aa 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/LocalDirectoryNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/LocalDirectoryNode.java @@ -18,15 +18,6 @@ */ package org.sleuthkit.autopsy.datamodel; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import org.openide.nodes.Sheet; -import org.openide.util.NbBundle; -import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance; -import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbUtil; -import org.sleuthkit.autopsy.core.UserPreferences; -import org.sleuthkit.datamodel.ContentTag; import org.sleuthkit.datamodel.LocalDirectory; /** @@ -46,51 +37,6 @@ public class LocalDirectoryNode extends SpecialDirectoryNode { } - @Override - @NbBundle.Messages({ - "LocalDirectoryNode.createSheet.name.name=Name", - "LocalDirectoryNode.createSheet.name.displayName=Name", - "LocalDirectoryNode.createSheet.name.desc=no description", - "LocalDirectoryNode.createSheet.noDesc=no description"}) - protected Sheet createSheet() { - Sheet sheet = super.createSheet(); - Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES); - if (sheetSet == null) { - sheetSet = Sheet.createPropertiesSet(); - sheet.put(sheetSet); - } - - List tags = getContentTagsFromDatabase(); - sheetSet.put(new NodeProperty<>(Bundle.LocalDirectoryNode_createSheet_name_name(), - Bundle.LocalDirectoryNode_createSheet_name_displayName(), - Bundle.LocalDirectoryNode_createSheet_name_desc(), - getName())); - - addScoreProperty(sheetSet, tags); - - CorrelationAttributeInstance correlationAttribute = null; - if (EamDbUtil.useCentralRepo() && UserPreferences.hideCentralRepoCommentsAndOccurrences() == false) { - correlationAttribute = getCorrelationAttributeInstance(); - } - addCommentProperty(sheetSet, tags, correlationAttribute); - - if (EamDbUtil.useCentralRepo() && UserPreferences.hideCentralRepoCommentsAndOccurrences() == false) { - addCountProperty(sheetSet, correlationAttribute); - } - // At present, a LocalDirectory will never be a datasource - the top level of a logical - // file set is a VirtualDirectory - Map map = new LinkedHashMap<>(); - fillPropertyMap(map, getContent()); - - final String NO_DESCR = Bundle.LocalDirectoryNode_createSheet_noDesc(); - for (Map.Entry entry : map.entrySet()) { - sheetSet.put(new NodeProperty<>(entry.getKey(), entry.getKey(), NO_DESCR, entry.getValue())); - } - - return sheet; - } - - @Override public T accept(ContentNodeVisitor visitor) { return visitor.visit(this); } diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/LocalFileNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/LocalFileNode.java index 0f69666454..5c83708fec 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/LocalFileNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/LocalFileNode.java @@ -22,19 +22,13 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; -import java.util.LinkedHashMap; import java.util.List; -import java.util.Map; import java.util.logging.Level; import javax.swing.Action; -import org.openide.nodes.Sheet; import org.openide.util.NbBundle; import org.openide.util.Utilities; import org.sleuthkit.autopsy.actions.AddContentTagAction; import org.sleuthkit.autopsy.actions.DeleteFileContentTagAction; -import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance; -import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbUtil; -import org.sleuthkit.autopsy.core.UserPreferences; import org.sleuthkit.autopsy.coreutils.ContextMenuExtensionPoint; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.directorytree.ExternalViewerAction; @@ -44,7 +38,6 @@ import org.sleuthkit.autopsy.directorytree.ViewContextAction; import org.sleuthkit.autopsy.modules.embeddedfileextractor.ExtractArchiveWithPasswordAction; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.ContentTag; import org.sleuthkit.datamodel.TskCoreException; /** @@ -68,43 +61,6 @@ public class LocalFileNode extends AbstractAbstractFileNode { } - @Override - protected Sheet createSheet() { - Sheet sheet = super.createSheet(); - Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES); - if (sheetSet == null) { - sheetSet = Sheet.createPropertiesSet(); - sheet.put(sheetSet); - } - List tags = getContentTagsFromDatabase(); - Map map = new LinkedHashMap<>(); - fillPropertyMap(map, getContent()); - - sheetSet.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "LocalFileNode.createSheet.name.name"), - NbBundle.getMessage(this.getClass(), "LocalFileNode.createSheet.name.displayName"), - NbBundle.getMessage(this.getClass(), "LocalFileNode.createSheet.name.desc"), - getName())); - - addScoreProperty(sheetSet, tags); - - CorrelationAttributeInstance correlationAttribute = null; - if (EamDbUtil.useCentralRepo() && UserPreferences.hideCentralRepoCommentsAndOccurrences() == false) { - correlationAttribute = getCorrelationAttributeInstance(); - } - addCommentProperty(sheetSet, tags, correlationAttribute); - - if (EamDbUtil.useCentralRepo() && UserPreferences.hideCentralRepoCommentsAndOccurrences() == false) { - addCountProperty(sheetSet, correlationAttribute); - } - final String NO_DESCR = NbBundle.getMessage(this.getClass(), "LocalFileNode.createSheet.noDescr.text"); - for (Map.Entry entry : map.entrySet()) { - sheetSet.put(new NodeProperty<>(entry.getKey(), entry.getKey(), NO_DESCR, entry.getValue())); - } - - return sheet; - } - - @Override public Action[] getActions(boolean context) { List actionsList = new ArrayList<>(); actionsList.addAll(Arrays.asList(super.getActions(true))); diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/TranslationTask.java b/Core/src/org/sleuthkit/autopsy/datamodel/TranslationTask.java new file mode 100755 index 0000000000..7589fbbf1e --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/datamodel/TranslationTask.java @@ -0,0 +1,59 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2018-2018 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.datamodel; + +import java.beans.PropertyChangeEvent; +import java.beans.PropertyChangeListener; +import java.lang.ref.WeakReference; +import org.sleuthkit.autopsy.events.AutopsyEvent; + +/** + * Completes the tasks needed to populate the Translation columns in the + * background so that the UI is not blocked while waiting for responses from the + * translation service. Once the event is done, it fires a PropertyChangeEvent + * to let the AbstractAbstractFileNode know it's time to update. + */ +class TranslationTask implements Runnable { + + private final WeakReference> weakNodeRef; + private final PropertyChangeListener listener; + + public TranslationTask(WeakReference> weakContentRef, PropertyChangeListener listener) { + this.weakNodeRef = weakContentRef; + this.listener = listener; + } + + @Override + public void run() { + AbstractAbstractFileNode fileNode = weakNodeRef.get(); + //Check for stale reference + if (fileNode == null) { + return; + } + + String translatedFileName = fileNode.getTranslatedFileName(); + if (!translatedFileName.isEmpty() && listener != null) { + //Only fire if the result is meaningful and the listener is not a stale reference + listener.propertyChange(new PropertyChangeEvent( + AutopsyEvent.SourceType.LOCAL.toString(), + AbstractAbstractFileNode.NodeSpecificEvents.TRANSLATION_AVAILABLE.toString(), + null, translatedFileName)); + } + } +} diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/VirtualDirectoryNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/VirtualDirectoryNode.java index 719bc3420a..19e8950d33 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/VirtualDirectoryNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/VirtualDirectoryNode.java @@ -21,18 +21,13 @@ package org.sleuthkit.autopsy.datamodel; import java.sql.ResultSet; import java.sql.SQLException; import java.util.LinkedHashMap; -import java.util.List; import java.util.Map; import java.util.logging.Level; import org.openide.nodes.Sheet; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; -import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance; -import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbUtil; -import org.sleuthkit.autopsy.core.UserPreferences; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.datamodel.ContentTag; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.VirtualDirectory; @@ -78,39 +73,18 @@ public class VirtualDirectoryNode extends SpecialDirectoryNode { "VirtualDirectoryNode.createSheet.deviceId.displayName=Device ID", "VirtualDirectoryNode.createSheet.deviceId.desc=Device ID of the image"}) protected Sheet createSheet() { - Sheet sheet = super.createSheet(); - Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES); - if (sheetSet == null) { - sheetSet = Sheet.createPropertiesSet(); + //Do a special strategy for virtual directories.. + if(this.content.isDataSource()){ + Sheet sheet = new Sheet(); + Sheet.Set sheetSet = Sheet.createPropertiesSet(); sheet.put(sheetSet); - } - List tags = getContentTagsFromDatabase(); - - sheetSet.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "VirtualDirectoryNode.createSheet.name.name"), + + sheetSet.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "VirtualDirectoryNode.createSheet.name.name"), NbBundle.getMessage(this.getClass(), "VirtualDirectoryNode.createSheet.name.displayName"), NbBundle.getMessage(this.getClass(), "VirtualDirectoryNode.createSheet.name.desc"), getName())); - if (!this.content.isDataSource()) { - addScoreProperty(sheetSet, tags); - - CorrelationAttributeInstance correlationAttribute = null; - if (EamDbUtil.useCentralRepo() && UserPreferences.hideCentralRepoCommentsAndOccurrences() == false) { - correlationAttribute = getCorrelationAttributeInstance(); - } - addCommentProperty(sheetSet, tags, correlationAttribute); - - if (EamDbUtil.useCentralRepo() && UserPreferences.hideCentralRepoCommentsAndOccurrences() == false) { - addCountProperty(sheetSet, correlationAttribute); - } - Map map = new LinkedHashMap<>(); - fillPropertyMap(map, getContent()); - - final String NO_DESCR = NbBundle.getMessage(this.getClass(), "VirtualDirectoryNode.createSheet.noDesc"); - for (Map.Entry entry : map.entrySet()) { - sheetSet.put(new NodeProperty<>(entry.getKey(), entry.getKey(), NO_DESCR, entry.getValue())); - } - } else { + sheetSet.put(new NodeProperty<>(Bundle.VirtualDirectoryNode_createSheet_type_name(), Bundle.VirtualDirectoryNode_createSheet_type_displayName(), Bundle.VirtualDirectoryNode_createSheet_type_desc(), @@ -141,10 +115,11 @@ public class VirtualDirectoryNode extends SpecialDirectoryNode { } catch (SQLException | TskCoreException | NoCurrentCaseException ex) { logger.log(Level.SEVERE, "Failed to get device id for the following image: " + this.content.getId(), ex); } - + return sheet; } - return sheet; + //Otherwise default to the AAFN createSheet method. + return super.createSheet(); } @Override diff --git a/Core/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeTopComponent.java b/Core/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeTopComponent.java index 97bc006441..4868106217 100644 --- a/Core/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeTopComponent.java +++ b/Core/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeTopComponent.java @@ -171,6 +171,7 @@ public final class DirectoryTreeTopComponent extends TopComponent implements Dat case UserPreferences.HIDE_KNOWN_FILES_IN_DATA_SRCS_TREE: case UserPreferences.HIDE_SLACK_FILES_IN_DATA_SRCS_TREE: case UserPreferences.HIDE_CENTRAL_REPO_COMMENTS_AND_OCCURRENCES: + case UserPreferences.DISPLAY_TRANSLATED_NAMES: case UserPreferences.KEEP_PREFERRED_VIEWER: refreshContentTreeSafe(); break; diff --git a/Core/src/org/sleuthkit/autopsy/images/network-wifi.png b/Core/src/org/sleuthkit/autopsy/images/network-wifi.png new file mode 100644 index 0000000000..5d7cb76ca1 Binary files /dev/null and b/Core/src/org/sleuthkit/autopsy/images/network-wifi.png differ diff --git a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java index 41758a0ec0..48fe8b5400 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java +++ b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java @@ -21,9 +21,6 @@ package org.sleuthkit.autopsy.modules.embeddedfileextractor; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; -import java.io.OutputStream; -import java.nio.file.Files; -import java.nio.file.Paths; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -92,7 +89,7 @@ class SevenZipExtractor { private String moduleDirAbsolute; private Blackboard blackboard; - + private ProgressHandle progress; private int numItems; private String currentArchiveName; @@ -551,7 +548,7 @@ class SevenZipExtractor { numItems = inArchive.getNumberOfItems(); progress.start(numItems); progressStarted = true; - + //setup the archive local root folder final String uniqueArchiveFileName = FileUtil.escapeFileName(EmbeddedFileExtractorIngestModule.getUniqueName(archiveFile)); try { @@ -677,7 +674,7 @@ class SevenZipExtractor { inArchive.extract(extractionIndices, false, archiveCallBack); unpackSuccessful &= archiveCallBack.wasSuccessful(); - + archiveDetailsMap = null; @@ -799,140 +796,57 @@ class SevenZipExtractor { .mapToInt(Integer::intValue) .toArray(); } - + /** - * Stream used to unpack the archive to local file + * UnpackStream used by the SevenZipBindings to do archive extraction. A memory + * leak exists in the SevenZip library that will not let go of the streams until + * the entire archive extraction is complete. Instead of creating a new UnpackStream + * for every file in the archive, instead we just rebase our EncodedFileOutputStream pointer + * for every new file. */ - private abstract static class UnpackStream implements ISequentialOutStream { + private final static class UnpackStream implements ISequentialOutStream { - private OutputStream output; + private EncodedFileOutputStream output; private String localAbsPath; - - UnpackStream(String localAbsPath) { + private int bytesWritten; + + UnpackStream(String localAbsPath) throws IOException { + this.output = new EncodedFileOutputStream(new FileOutputStream(localAbsPath), TskData.EncodingType.XOR1); this.localAbsPath = localAbsPath; - try { - output = new EncodedFileOutputStream(new FileOutputStream(localAbsPath), TskData.EncodingType.XOR1); - } catch (IOException ex) { - logger.log(Level.SEVERE, "Error writing extracted file: " + localAbsPath, ex); //NON-NLS - } - + this.bytesWritten = 0; + } + + public void setNewOutputStream(String localAbsPath) throws IOException { + this.output.close(); + this.output = new EncodedFileOutputStream(new FileOutputStream(localAbsPath), TskData.EncodingType.XOR1); + this.localAbsPath = localAbsPath; + this.bytesWritten = 0; } - - public abstract long getSize(); - - OutputStream getOutput() { - return output; + + public int getSize() { + return bytesWritten; } - - String getLocalAbsPath() { - return localAbsPath; - } - - public void close() { - if (output != null) { - try { - output.flush(); - output.close(); - output = null; - } catch (IOException e) { - logger.log(Level.SEVERE, "Error closing unpack stream for file: {0}", localAbsPath); //NON-NLS - } - } - } - } - - /** - * Stream used to unpack the archive of unknown size to local file - */ - private static class UnknownSizeUnpackStream extends UnpackStream { - - private long freeDiskSpace; - private boolean outOfSpace = false; - private long bytesWritten = 0; - - UnknownSizeUnpackStream(String localAbsPath, long freeDiskSpace) { - super(localAbsPath); - this.freeDiskSpace = freeDiskSpace; - } - - @Override - public long getSize() { - return this.bytesWritten; - } - + @Override public int write(byte[] bytes) throws SevenZipException { try { - // If the content size is unknown, cautiously write to disk. - // Write only if byte array is less than 80% of the current - // free disk space. - if (freeDiskSpace == IngestMonitor.DISK_FREE_SPACE_UNKNOWN || bytes.length < 0.8 * freeDiskSpace) { - getOutput().write(bytes); - // NOTE: this method is called multiple times for a - // single extractSlow() call. Update bytesWritten and - // freeDiskSpace after every write operation. - this.bytesWritten += bytes.length; - this.freeDiskSpace -= bytes.length; - } else { - this.outOfSpace = true; - logger.log(Level.INFO, NbBundle.getMessage( - SevenZipExtractor.class, - "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackStream.write.noSpace.msg")); - throw new SevenZipException( - NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackStream.write.noSpace.msg")); - } + output.write(bytes); + this.bytesWritten += bytes.length; } catch (IOException ex) { throw new SevenZipException( - NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackStream.write.exception.msg", - getLocalAbsPath()), ex); + NbBundle.getMessage(SevenZipExtractor.class, + "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackStream.write.exception.msg", + localAbsPath), ex); } return bytes.length; } - - @Override - public void close() { - if (getOutput() != null) { - try { - getOutput().flush(); - getOutput().close(); - if (this.outOfSpace) { - Files.delete(Paths.get(getLocalAbsPath())); - } - } catch (IOException e) { - logger.log(Level.SEVERE, "Error closing unpack stream for file: {0}", getLocalAbsPath()); //NON-NLS - } - } - } - } - - /** - * Stream used to unpack the archive of known size to local file - */ - private static class KnownSizeUnpackStream extends UnpackStream { - - private long size; - - KnownSizeUnpackStream(String localAbsPath, long size) { - super(localAbsPath); - this.size = size; - } - - @Override - public long getSize() { - return this.size; - } - - @Override - public int write(byte[] bytes) throws SevenZipException { - try { - getOutput().write(bytes); - } catch (IOException ex) { - throw new SevenZipException( - NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackStream.write.exception.msg", - getLocalAbsPath()), ex); - } - return bytes.length; + + public void close() throws IOException { + try(EncodedFileOutputStream out = output) { + out.flush(); + } } + } /** @@ -979,7 +893,6 @@ class SevenZipExtractor { private final ProgressHandle progressHandle; private int inArchiveItemIndex; - private final long freeDiskSpace; private long createTimeInSeconds; private long modTimeInSeconds; @@ -996,7 +909,6 @@ class SevenZipExtractor { String password, long freeDiskSpace) { this.inArchive = inArchive; - this.freeDiskSpace = freeDiskSpace; this.progressHandle = progressHandle; this.archiveFile = archiveFile; this.archiveDetailsMap = archiveDetailsMap; @@ -1029,17 +941,24 @@ class SevenZipExtractor { return null; } - final Long archiveItemSize = (Long) inArchive.getProperty( - inArchiveItemIndex, PropID.SIZE); final String localAbsPath = archiveDetailsMap.get( inArchiveItemIndex).getLocalAbsPath(); - - if (archiveItemSize != null) { - unpackStream = new SevenZipExtractor.KnownSizeUnpackStream( - localAbsPath, archiveItemSize); - } else { - unpackStream = new SevenZipExtractor.UnknownSizeUnpackStream( - localAbsPath, freeDiskSpace); + + //If the Unpackstream has been allocated, then set the Outputstream + //to another file rather than creating a new unpack stream. The 7Zip + //binding has a memory leak, so creating new unpack streams will not be + //dereferenced. As a fix, we create one UnpackStream, and mutate its state, + //so that there only exists one 8192 byte buffer in memory per archive. + try { + if (unpackStream != null) { + unpackStream.setNewOutputStream(localAbsPath); + } else { + unpackStream = new UnpackStream(localAbsPath); + } + } catch (IOException ex) { + logger.log(Level.WARNING, String.format("Error opening or setting new stream " //NON-NLS + + "for archive file at %s", localAbsPath), ex.getMessage()); //NON-NLS + return null; } return unpackStream; @@ -1068,18 +987,18 @@ class SevenZipExtractor { : writeTime.getTime() / 1000; accessTimeInSeconds = accessTime == null ? 0L : accessTime.getTime() / 1000; - + progressHandle.progress(archiveFile.getName() + ": " + (String) inArchive.getProperty(inArchiveItemIndex, PropID.PATH), inArchiveItemIndex); - + } /** * Updates the unpackedNode data in the tree after the archive has been * expanded to local disk. * - * @param result - ExtractOperationResult + * @param result - ExtractOperationResult * * @throws SevenZipException */ @@ -1111,7 +1030,11 @@ class SevenZipExtractor { !(Boolean) inArchive.getProperty(inArchiveItemIndex, PropID.IS_FOLDER), 0L, createTimeInSeconds, accessTimeInSeconds, modTimeInSeconds, localRelPath); - unpackStream.close(); + try { + unpackStream.close(); + } catch (IOException e) { + logger.log(Level.WARNING, "Error closing unpack stream for file: {0}", localAbsPath); //NON-NLS + } } @Override @@ -1222,9 +1145,9 @@ class SevenZipExtractor { */ List getRootFileObjects() { List ret = new ArrayList<>(); - for (UnpackedNode child : rootNode.getChildren()) { + rootNode.getChildren().forEach((child) -> { ret.add(child.getFile()); - } + }); return ret; } @@ -1236,17 +1159,17 @@ class SevenZipExtractor { */ List getAllFileObjects() { List ret = new ArrayList<>(); - for (UnpackedNode child : rootNode.getChildren()) { + rootNode.getChildren().forEach((child) -> { getAllFileObjectsRec(ret, child); - } + }); return ret; } private void getAllFileObjectsRec(List list, UnpackedNode parent) { list.add(parent.getFile()); - for (UnpackedNode child : parent.getChildren()) { + parent.getChildren().forEach((child) -> { getAllFileObjectsRec(list, child); - } + }); } /** diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportHTML.java b/Core/src/org/sleuthkit/autopsy/report/ReportHTML.java index 81c4c9ac72..13a5078658 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ReportHTML.java +++ b/Core/src/org/sleuthkit/autopsy/report/ReportHTML.java @@ -271,6 +271,9 @@ class ReportHTML implements TableReportModule { case TSK_ACCOUNT: in = getClass().getResourceAsStream("/org/sleuthkit/autopsy/report/images/accounts.png"); //NON-NLS break; + case TSK_WIFI_NETWORK: + in = getClass().getResourceAsStream("/org/sleuthkit/autopsy/report/images/network-wifi.png"); //NON-NLS + break; default: logger.log(Level.WARNING, "useDataTypeIcon: unhandled artifact type = {0}", dataType); //NON-NLS in = getClass().getResourceAsStream("/org/sleuthkit/autopsy/report/images/star.png"); //NON-NLS diff --git a/Core/src/org/sleuthkit/autopsy/texttranslation/NoServiceProviderException.java b/Core/src/org/sleuthkit/autopsy/texttranslation/NoServiceProviderException.java new file mode 100755 index 0000000000..79590fcfd9 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/texttranslation/NoServiceProviderException.java @@ -0,0 +1,30 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2018-2018 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.texttranslation; + +/** + * Exception to indicate that no Service Provider could be found during the + * Lookup action. + */ +public class NoServiceProviderException extends Exception { + + public NoServiceProviderException(String msg) { + super(msg); + } +} \ No newline at end of file diff --git a/Core/src/org/sleuthkit/autopsy/texttranslation/TextTranslationService.java b/Core/src/org/sleuthkit/autopsy/texttranslation/TextTranslationService.java new file mode 100755 index 0000000000..9e9432248b --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/texttranslation/TextTranslationService.java @@ -0,0 +1,77 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2018-2018 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.texttranslation; + +import java.util.Optional; + +import org.openide.util.Lookup; + +/** + * Performs a lookup for a TextTranslator service provider and if present, + * will use this provider to run translation on the input. + */ +public final class TextTranslationService { + + private final static TextTranslationService tts = new TextTranslationService(); + + private final Optional translator; + + private TextTranslationService(){ + //Perform look up for Text Translation implementations ONLY ONCE during + //class loading. + translator = Optional.ofNullable(Lookup.getDefault() + .lookup(TextTranslator.class)); + } + + public static TextTranslationService getInstance() { + return tts; + } + + /** + * Translates the input string using whichever TextTranslator Service Provider + * was found during lookup. + * + * @param input Input string to be translated + * + * @return Translation string + * + * @throws NoServiceProviderException Failed to find a Translation service + * provider + * @throws TranslationException System exception for classes to use + * when specific translation + * implementations fail + */ + public String translate(String input) throws NoServiceProviderException, TranslationException { + if (translator.isPresent()) { + return translator.get().translate(input); + } + throw new NoServiceProviderException( + "Could not find a TextTranslator service provider"); + } + + /** + * Returns if a TextTranslator lookup successfully found an implementing + * class. + * + * @return + */ + public boolean hasProvider() { + return translator.isPresent(); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/texttranslation/TextTranslator.java b/Core/src/org/sleuthkit/autopsy/texttranslation/TextTranslator.java new file mode 100755 index 0000000000..2e6b6bbd07 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/texttranslation/TextTranslator.java @@ -0,0 +1,29 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2018-2018 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.texttranslation; + +/** + * Interface for creating text translators. Implementing classes will be picked + * up and run by the Text Translation Service. + */ +public interface TextTranslator { + + String translate(String input) throws TranslationException; + +} diff --git a/Core/src/org/sleuthkit/autopsy/texttranslation/TranslationException.java b/Core/src/org/sleuthkit/autopsy/texttranslation/TranslationException.java new file mode 100755 index 0000000000..9c03f322dd --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/texttranslation/TranslationException.java @@ -0,0 +1,51 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2018-2018 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.texttranslation; + +/** + * Provides a system exception for the Text Translation errors + */ +public class TranslationException extends Exception { + + /** + * Constructs a new exception with null as its message. + */ + public TranslationException() { + super(); + } + + /** + * Constructs a new exception with the specified message. + * + * @param message The message. + */ + public TranslationException(String message) { + super(message); + } + + /** + * Constructs a new exception with the specified message and cause. + * + * @param message The message. + * @param cause The cause. + */ + public TranslationException(String message, Throwable cause) { + super(message, cause); + } +} diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryController.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryController.java index 666a2cd02d..862713a807 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryController.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryController.java @@ -22,6 +22,8 @@ import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.ThreadFactoryBuilder; import java.beans.PropertyChangeListener; +import java.sql.SQLException; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -68,6 +70,7 @@ import org.sleuthkit.autopsy.imagegallery.datamodel.grouping.GroupManager; import org.sleuthkit.autopsy.imagegallery.datamodel.grouping.GroupViewState; import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction; @@ -204,8 +207,8 @@ public final class ImageGalleryController { // if we just turned on listening and a single-user case is open and that case is not up to date, then rebuild it // For multiuser cases, we defer DB rebuild till the user actually opens Image Gallery if (isEnabled && !wasPreviouslyEnabled - && isDataSourcesTableStale() - && (Case.getCurrentCaseThrows().getCaseType() == CaseType.SINGLE_USER_CASE)) { + && isDataSourcesTableStale() + && (Case.getCurrentCaseThrows().getCaseType() == CaseType.SINGLE_USER_CASE)) { //populate the db this.rebuildDB(); } @@ -324,6 +327,7 @@ public final class ImageGalleryController { groupManager.reset(); shutDownDBExecutor(); + drawableDB.close(); dbExecutor = getNewDBExecutor(); } @@ -384,6 +388,51 @@ public final class ImageGalleryController { } + /** + * Returns a map of all data source object ids, along with their DB build + * status. + * + * This includes any data sources already in the table, and any data sources + * that might have been added to the case, but are not in the datasources + * table. + * + * @return map of data source object ids and their Db build status. + */ + public Map getAllDataSourcesDrawableDBStatus() { + + Map dataSourceStatusMap = new HashMap<>(); + + // no current case open to check + if ((null == getDatabase()) || (null == getSleuthKitCase())) { + return dataSourceStatusMap; + } + + try { + Map knownDataSourceIds = getDatabase().getDataSourceDbBuildStatus(); + + List dataSources = getSleuthKitCase().getDataSources(); + Set caseDataSourceIds = new HashSet<>(); + dataSources.stream().map(DataSource::getId).forEach(caseDataSourceIds::add); + + // collect all data sources already in the table + knownDataSourceIds.entrySet().stream().forEach((Map.Entry t) -> { + dataSourceStatusMap.put(t.getKey(), t.getValue()); + }); + + // collect any new data sources in the case. + caseDataSourceIds.forEach((Long id) -> { + if (!knownDataSourceIds.containsKey(id)) { + dataSourceStatusMap.put(id, DrawableDbBuildStatusEnum.UNKNOWN); + } + }); + + return dataSourceStatusMap; + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, "Image Gallery failed to get data source DB status.", ex); + return dataSourceStatusMap; + } + } + public boolean hasTooManyFiles(DataSource datasource) throws TskCoreException { String whereClause = (datasource == null) ? "1 = 1" @@ -393,6 +442,30 @@ public final class ImageGalleryController { } + /** + * Checks if the given data source has any files with no mimetype + * + * @param datasource + * + * @return true if the datasource has any files with no mime type + * + * @throws TskCoreException + */ + public boolean hasFilesWithNoMimetype(Content datasource) throws TskCoreException { + + // There are some special files/attributes in the root folder, like $BadClus:$Bad and $Security:$SDS + // The IngestTasksScheduler does not push them down to the ingest modules, + // and hence they do not have any assigned mimetype + String whereClause = "data_source_obj_id = " + datasource.getId() + + " AND ( meta_type = " + TskData.TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_REG.getValue() + ")" + + " AND ( mime_type IS NULL )" + + " AND ( meta_addr >= 32 ) " + + " AND ( parent_path <> '/' )" + + " AND ( name NOT like '$%:%' )"; + + return sleuthKitCase.countFilesWhere(whereClause) > 0; + } + synchronized private void shutDownDBExecutor() { if (dbExecutor != null) { dbExecutor.shutdownNow(); @@ -526,10 +599,11 @@ public final class ImageGalleryController { } } + /** - * Abstract base class for tasks associated with a file in the database + * task that updates one file in database with results from ingest */ - static abstract class FileTask extends BackgroundTask { + static class UpdateFileTask extends BackgroundTask { private final AbstractFile file; private final DrawableDB taskDB; @@ -541,22 +615,12 @@ public final class ImageGalleryController { public AbstractFile getFile() { return file; } - - FileTask(AbstractFile f, DrawableDB taskDB) { + + UpdateFileTask(AbstractFile f, DrawableDB taskDB) { super(); this.file = f; this.taskDB = taskDB; } - } - - /** - * task that updates one file in database with results from ingest - */ - static class UpdateFileTask extends FileTask { - - UpdateFileTask(AbstractFile f, DrawableDB taskDB) { - super(f, taskDB); - } /** * Update a file in the database @@ -566,41 +630,12 @@ public final class ImageGalleryController { try { DrawableFile drawableFile = DrawableFile.create(getFile(), true, false); getTaskDB().updateFile(drawableFile); - } catch (NullPointerException ex) { - // This is one of the places where we get many errors if the case is closed during processing. - // We don't want to print out a ton of exceptions if this is the case. - if (Case.isCaseOpen()) { - Logger.getLogger(UpdateFileTask.class.getName()).log(Level.SEVERE, "Error in UpdateFile task"); //NON-NLS - } + } catch (TskCoreException | SQLException ex) { + Logger.getLogger(UpdateFileTask.class.getName()).log(Level.SEVERE, "Error in update file task", ex); //NON-NLS } } } - /** - * task that updates one file in database with results from ingest - */ - static class RemoveFileTask extends FileTask { - - RemoveFileTask(AbstractFile f, DrawableDB taskDB) { - super(f, taskDB); - } - - /** - * Update a file in the database - */ - @Override - public void run() { - try { - getTaskDB().removeFile(getFile().getId()); - } catch (NullPointerException ex) { - // This is one of the places where we get many errors if the case is closed during processing. - // We don't want to print out a ton of exceptions if this is the case. - if (Case.isCaseOpen()) { - Logger.getLogger(RemoveFileTask.class.getName()).log(Level.SEVERE, "Case was closed out from underneath RemoveFile task"); //NON-NLS - } - } - } - } /** * Base abstract class for various methods of copying image files data, for @@ -613,13 +648,13 @@ public final class ImageGalleryController { static private final String FILE_EXTENSION_CLAUSE = "(extension LIKE '" //NON-NLS - + String.join("' OR extension LIKE '", FileTypeUtils.getAllSupportedExtensions()) //NON-NLS - + "') "; + + String.join("' OR extension LIKE '", FileTypeUtils.getAllSupportedExtensions()) //NON-NLS + + "') "; static private final String MIMETYPE_CLAUSE = "(mime_type LIKE '" //NON-NLS - + String.join("' OR mime_type LIKE '", FileTypeUtils.getAllSupportedMimeTypes()) //NON-NLS - + "') "; + + String.join("' OR mime_type LIKE '", FileTypeUtils.getAllSupportedMimeTypes()) //NON-NLS + + "') "; private final String DRAWABLE_QUERY; private final String DATASOURCE_CLAUSE; @@ -642,14 +677,14 @@ public final class ImageGalleryController { DRAWABLE_QUERY = DATASOURCE_CLAUSE - + " AND ( meta_type = " + TskData.TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_REG.getValue() + ")" - + " AND ( " - + //grab files with supported extension + + " AND ( meta_type = " + TskData.TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_REG.getValue() + ")" + + " AND ( " + + //grab files with supported extension FILE_EXTENSION_CLAUSE - //grab files with supported mime-types - + " OR " + MIMETYPE_CLAUSE //NON-NLS - //grab files with image or video mime-types even if we don't officially support them - + " OR mime_type LIKE 'video/%' OR mime_type LIKE 'image/%' )"; //NON-NLS + //grab files with supported mime-types + + " OR " + MIMETYPE_CLAUSE //NON-NLS + //grab files with image or video mime-types even if we don't officially support them + + " OR mime_type LIKE 'video/%' OR mime_type LIKE 'image/%' )"; //NON-NLS } /** @@ -695,7 +730,8 @@ public final class ImageGalleryController { //do in transaction drawableDbTransaction = taskDB.beginTransaction(); - /* We are going to periodically commit the CaseDB transaction + /* + * We are going to periodically commit the CaseDB transaction * and sleep so that the user can have Autopsy do other stuff * while these bulk tasks are ongoing. */ @@ -744,27 +780,34 @@ public final class ImageGalleryController { taskDB.commitTransaction(drawableDbTransaction, true); drawableDbTransaction = null; - } catch (TskCoreException | InterruptedException ex) { - progressHandle.progress(Bundle.BulkTask_stopCopy_status()); - logger.log(Level.WARNING, "Stopping copy to drawable db task. Failed to transfer all database contents", ex); //NON-NLS - MessageNotifyUtil.Notify.warn(Bundle.BulkTask_errPopulating_errMsg(), ex.getMessage()); - cleanup(false); - return; - } finally { - if (null != drawableDbTransaction) { - taskDB.rollbackTransaction(drawableDbTransaction); - } + } catch (TskCoreException | SQLException | InterruptedException ex) { if (null != caseDbTransaction) { try { caseDbTransaction.rollback(); } catch (TskCoreException ex2) { - logger.log(Level.SEVERE, "Error in trying to rollback transaction", ex2); //NON-NLS + logger.log(Level.SEVERE, String.format("Failed to roll back case db transaction after error: %s", ex.getMessage()), ex2); //NON-NLS } } - progressHandle.finish(); - if (taskCompletionStatus) { - taskDB.insertOrUpdateDataSource(dataSourceObjId, DrawableDB.DrawableDbBuildStatusEnum.COMPLETE); + if (null != drawableDbTransaction) { + try { + taskDB.rollbackTransaction(drawableDbTransaction); + } catch (SQLException ex2) { + logger.log(Level.SEVERE, String.format("Failed to roll back drawables db transaction after error: %s", ex.getMessage()), ex2); //NON-NLS + } } + progressHandle.progress(Bundle.BulkTask_stopCopy_status()); + logger.log(Level.WARNING, "Stopping copy to drawable db task. Failed to transfer all database contents", ex); //NON-NLS + MessageNotifyUtil.Notify.warn(Bundle.BulkTask_errPopulating_errMsg(), ex.getMessage()); + cleanup(false); + } finally { + progressHandle.finish(); + + DrawableDB.DrawableDbBuildStatusEnum datasourceDrawableDBStatus + = (taskCompletionStatus) + ? DrawableDB.DrawableDbBuildStatusEnum.COMPLETE + : DrawableDB.DrawableDbBuildStatusEnum.DEFAULT; + taskDB.insertOrUpdateDataSource(dataSourceObjId, datasourceDrawableDBStatus); + updateMessage(""); updateProgress(-1.0); } @@ -831,36 +874,4 @@ public final class ImageGalleryController { } } - /** - * Copy files from a newly added data source into the DB. Get all "drawable" - * files, based on extension and mime-type. After ingest we use file type id - * module and if necessary jpeg/png signature matching to add/remove files - */ - @NbBundle.Messages({"PrePopulateDataSourceFiles.committingDb.status=committing image/video database"}) - static class PrePopulateDataSourceFiles extends BulkTransferTask { - - /** - * @param dataSourceObjId The object ID of the DataSource that is being - * pre-populated into the DrawableDB. - * @param controller The controller for this task. - */ - PrePopulateDataSourceFiles(long dataSourceObjId, ImageGalleryController controller) { - super(dataSourceObjId, controller); - } - - @Override - protected void cleanup(boolean success) { - } - - @Override - void processFile(final AbstractFile f, DrawableDB.DrawableTransaction tr, CaseDbTransaction caseDBTransaction) { - taskDB.insertBasicFileData(DrawableFile.create(f, false, false), tr, caseDBTransaction); - } - - @Override - @NbBundle.Messages({"PrePopulateDataSourceFiles.prepopulatingDb.status=prepopulating image/video database",}) - ProgressHandle getInitialProgressHandle() { - return ProgressHandle.createHandle(Bundle.PrePopulateDataSourceFiles_prepopulatingDb_status(), this); - } - } } diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryModule.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryModule.java index c0336b76b1..5341e0be27 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryModule.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryModule.java @@ -43,6 +43,8 @@ import org.sleuthkit.autopsy.ingest.IngestManager.IngestJobEvent; import static org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent.DATA_ADDED; import static org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent.FILE_DONE; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; +import org.sleuthkit.autopsy.ingest.events.DataSourceAnalysisCompletedEvent; +import org.sleuthkit.autopsy.ingest.events.DataSourceAnalysisStartedEvent; import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; @@ -187,14 +189,7 @@ public class ImageGalleryModule { if (isDrawableAndNotKnown(file)) { con.queueDBTask(new ImageGalleryController.UpdateFileTask(file, controller.getDatabase())); } - // Remove it from the DB if it is no longer relevant, but had the correct extension - else if (FileTypeUtils.getAllSupportedExtensions().contains(file.getNameExtension())) { - /* Doing this check results in fewer tasks queued - * up, and faster completion of db update. This file - * would have gotten scooped up in initial grab, but - * actually we don't need it */ - con.queueDBTask(new ImageGalleryController.RemoveFileTask(file, controller.getDatabase())); - } + } catch (FileTypeDetector.FileTypeDetectorInitException ex) { logger.log(Level.SEVERE, "Unable to determine if file is drawable and not known. Not making any changes to DB", ex); //NON-NLS MessageNotifyUtil.Notify.error("Image Gallery Error", @@ -281,8 +276,8 @@ public class ImageGalleryModule { //For a data source added on the local node, prepopulate all file data to drawable database if (((AutopsyEvent) evt).getSourceType() == AutopsyEvent.SourceType.LOCAL) { Content newDataSource = (Content) evt.getNewValue(); - if (con.isListeningEnabled()) { - con.queueDBTask(new ImageGalleryController.PrePopulateDataSourceFiles(newDataSource.getId(), controller)); + if (con.isListeningEnabled()) { + controller.getDatabase().insertOrUpdateDataSource(newDataSource.getId(), DrawableDB.DrawableDbBuildStatusEnum.DEFAULT); } } break; @@ -326,40 +321,68 @@ public class ImageGalleryModule { @Override public void propertyChange(PropertyChangeEvent evt) { IngestJobEvent eventType = IngestJobEvent.valueOf(evt.getPropertyName()); - if (eventType != IngestJobEvent.DATA_SOURCE_ANALYSIS_COMPLETED - || ((AutopsyEvent) evt).getSourceType() != AutopsyEvent.SourceType.REMOTE) { - return; - } - // A remote node added a new data source and just finished ingest on it. - //drawable db is stale, and if ImageGallery is open, ask user what to do - + try { - ImageGalleryController con = getController(); - con.setStale(true); - if (con.isListeningEnabled()) { - SwingUtilities.invokeLater(() -> { - if (ImageGalleryTopComponent.isImageGalleryOpen()) { - int showAnswer = JOptionPane.showConfirmDialog(ImageGalleryTopComponent.getTopComponent(), - Bundle.ImageGalleryController_dataSourceAnalyzed_confDlg_msg(), - Bundle.ImageGalleryController_dataSourceAnalyzed_confDlg_title(), - JOptionPane.YES_NO_CANCEL_OPTION, JOptionPane.WARNING_MESSAGE); - - switch (showAnswer) { - case JOptionPane.YES_OPTION: - con.rebuildDB(); - break; - case JOptionPane.NO_OPTION: - case JOptionPane.CANCEL_OPTION: - default: - break; //do nothing - } + ImageGalleryController controller = getController(); + + if (eventType == IngestJobEvent.DATA_SOURCE_ANALYSIS_STARTED) { + + if (((AutopsyEvent) evt).getSourceType() == AutopsyEvent.SourceType.LOCAL) { + if (controller.isListeningEnabled()) { + DataSourceAnalysisStartedEvent dataSourceAnalysisStartedEvent = (DataSourceAnalysisStartedEvent) evt; + Content dataSource = dataSourceAnalysisStartedEvent.getDataSource(); + + controller.getDatabase().insertOrUpdateDataSource(dataSource.getId(), DrawableDB.DrawableDbBuildStatusEnum.IN_PROGRESS); } - }); + } + } else if (eventType == IngestJobEvent.DATA_SOURCE_ANALYSIS_COMPLETED) { + + if (((AutopsyEvent) evt).getSourceType() == AutopsyEvent.SourceType.LOCAL) { + if (controller.isListeningEnabled()) { + DataSourceAnalysisCompletedEvent dataSourceAnalysisCompletedEvent = (DataSourceAnalysisCompletedEvent) evt; + Content dataSource = dataSourceAnalysisCompletedEvent.getDataSource(); + + DrawableDB.DrawableDbBuildStatusEnum datasourceDrawableDBStatus = + controller.hasFilesWithNoMimetype(dataSource) ? + DrawableDB.DrawableDbBuildStatusEnum.DEFAULT : + DrawableDB.DrawableDbBuildStatusEnum.COMPLETE; + + controller.getDatabase().insertOrUpdateDataSource(dataSource.getId(), datasourceDrawableDBStatus); + } + return; + } + + if (((AutopsyEvent) evt).getSourceType() == AutopsyEvent.SourceType.REMOTE) { + // A remote node added a new data source and just finished ingest on it. + //drawable db is stale, and if ImageGallery is open, ask user what to do + controller.setStale(true); + if (controller.isListeningEnabled()) { + SwingUtilities.invokeLater(() -> { + if (ImageGalleryTopComponent.isImageGalleryOpen()) { + int showAnswer = JOptionPane.showConfirmDialog(ImageGalleryTopComponent.getTopComponent(), + Bundle.ImageGalleryController_dataSourceAnalyzed_confDlg_msg(), + Bundle.ImageGalleryController_dataSourceAnalyzed_confDlg_title(), + JOptionPane.YES_NO_CANCEL_OPTION, JOptionPane.WARNING_MESSAGE); + + switch (showAnswer) { + case JOptionPane.YES_OPTION: + controller.rebuildDB(); + break; + case JOptionPane.NO_OPTION: + case JOptionPane.CANCEL_OPTION: + default: + break; //do nothing + } + } + }); + } + } } - } catch (NoCurrentCaseException ex) { - logger.log(Level.SEVERE, "Attempted to access ImageGallery with no case open.", ex); //NON-NLS + } + catch (NoCurrentCaseException ex) { + logger.log(Level.SEVERE, "Attempted to access ImageGallery with no case open.", ex); //NON-NLS } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error getting ImageGalleryController.", ex); //NON-NLS + logger.log(Level.SEVERE, "Error getting ImageGalleryController.", ex); //NON-NLS } } } diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryTopComponent.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryTopComponent.java index e51f27cdef..927dc6b70e 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryTopComponent.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryTopComponent.java @@ -18,13 +18,10 @@ */ package org.sleuthkit.autopsy.imagegallery; -import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Objects; import java.util.Optional; -import java.util.function.Consumer; import java.util.logging.Level; import java.util.stream.Collectors; import javafx.application.Platform; @@ -55,10 +52,8 @@ import javafx.stage.Modality; import javax.swing.SwingUtilities; import static org.apache.commons.collections4.CollectionUtils.isNotEmpty; import static org.apache.commons.lang3.ObjectUtils.notEqual; -import org.apache.commons.lang3.StringUtils; import org.openide.explorer.ExplorerManager; import org.openide.explorer.ExplorerUtils; -import org.openide.util.Exceptions; import org.openide.util.Lookup; import org.openide.util.NbBundle; import org.openide.util.NbBundle.Messages; @@ -232,8 +227,8 @@ public final class ImageGalleryTopComponent extends TopComponent implements Expl @SuppressWarnings(value = "unchecked") ComboBox> comboBox = (ComboBox>) datasourceDialog.getDialogPane().lookup(".combo-box"); //set custom cell renderer - comboBox.setCellFactory((ListView> param) -> new DataSourceCell(dataSourcesTooManyFiles)); - comboBox.setButtonCell(new DataSourceCell(dataSourcesTooManyFiles)); + comboBox.setCellFactory((ListView> param) -> new DataSourceCell(dataSourcesTooManyFiles, controller.getAllDataSourcesDrawableDBStatus())); + comboBox.setButtonCell(new DataSourceCell(dataSourcesTooManyFiles, controller.getAllDataSourcesDrawableDBStatus())); DataSource dataSource = datasourceDialog.showAndWait().orElse(Optional.empty()).orElse(null); try { diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/actions/OpenAction.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/actions/OpenAction.java index 81431b771e..5a921e6130 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/actions/OpenAction.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/actions/OpenAction.java @@ -19,11 +19,10 @@ package org.sleuthkit.autopsy.imagegallery.actions; import com.google.common.util.concurrent.ListenableFuture; -import static com.google.common.util.concurrent.MoreExecutors.listeningDecorator; import java.awt.Component; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; -import static java.util.concurrent.Executors.newSingleThreadExecutor; +import java.util.Map; import java.util.logging.Level; import javafx.application.Platform; import javafx.scene.control.Alert; @@ -32,7 +31,6 @@ import javafx.scene.control.CheckBox; import javafx.scene.control.Label; import javafx.scene.layout.VBox; import javafx.stage.Modality; -import javax.annotation.concurrent.ThreadSafe; import javax.swing.ImageIcon; import javax.swing.JButton; import javax.swing.JMenuItem; @@ -50,11 +48,12 @@ import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.core.Installer; import org.sleuthkit.autopsy.core.RuntimeProperties; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.coreutils.ThreadConfined; import org.sleuthkit.autopsy.imagegallery.ImageGalleryController; import org.sleuthkit.autopsy.imagegallery.ImageGalleryModule; import org.sleuthkit.autopsy.imagegallery.ImageGalleryPreferences; import org.sleuthkit.autopsy.imagegallery.ImageGalleryTopComponent; +import org.sleuthkit.autopsy.imagegallery.datamodel.DrawableDB; +import org.sleuthkit.autopsy.imagegallery.datamodel.DrawableDB.DrawableDbBuildStatusEnum; import org.sleuthkit.autopsy.imagegallery.gui.GuiUtils; import org.sleuthkit.autopsy.imagegallery.utils.TaskUtils; import static org.sleuthkit.autopsy.imagegallery.utils.TaskUtils.addFXCallback; @@ -70,6 +69,8 @@ import org.sleuthkit.datamodel.TskCoreException; "OpenAction.stale.confDlg.msg=The image / video database may be out of date. " + "Do you want to update and listen for further ingest results?\n" + "Choosing 'yes' will update the database and enable listening to future ingests.", + "OpenAction.notAnalyzedDlg.msg=No image/video files available to display yet.\n" + + "Please run FileType and EXIF ingest modules.", "OpenAction.stale.confDlg.title=Image Gallery"}) public final class OpenAction extends CallableSystemAction { @@ -184,17 +185,43 @@ public final class OpenAction extends CallableSystemAction { } private void checkDBStale(ImageGalleryController controller) { - //check if db is stale on throw away bg thread and then react back on jfx thread. - ListenableFuture staleFuture = TaskUtils.getExecutorForClass(OpenAction.class) - .submit(controller::isDataSourcesTableStale); - addFXCallback(staleFuture, - dbIsStale -> { + + ListenableFuture> dataSourceStatusMapFuture = TaskUtils.getExecutorForClass(OpenAction.class) + .submit(controller::getAllDataSourcesDrawableDBStatus); + + addFXCallback(dataSourceStatusMapFuture, + dataSourceStatusMap -> { + + boolean dbIsStale = false; + for (Map.Entry entry : dataSourceStatusMap.entrySet()) { + DrawableDbBuildStatusEnum status = entry.getValue(); + if (DrawableDbBuildStatusEnum.COMPLETE != status) { + dbIsStale = true; + } + } + //back on fx thread. if (false == dbIsStale) { //drawable db is not stale, just open it openTopComponent(); } else { - //drawable db is stale, ask what to do + + // If there is only one datasource and it's in DEFAULT State - + // ingest modules need to be run on the data source + if (dataSourceStatusMap.size()== 1) { + Map.Entry entry = dataSourceStatusMap.entrySet().iterator().next(); + if (entry.getValue() == DrawableDbBuildStatusEnum.DEFAULT ) { + Alert alert = new Alert(Alert.AlertType.WARNING, Bundle.OpenAction_notAnalyzedDlg_msg(), ButtonType.OK); + alert.setTitle(Bundle.OpenAction_stale_confDlg_title()); + alert.initModality(Modality.APPLICATION_MODAL); + + alert.showAndWait(); + return; + } + } + + //drawable db is stale, + //ask what to do Alert alert = new Alert(Alert.AlertType.WARNING, Bundle.OpenAction_stale_confDlg_msg(), ButtonType.YES, ButtonType.NO, ButtonType.CANCEL); diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/DrawableDB.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/DrawableDB.java index 8c27f6cce5..06f88aca93 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/DrawableDB.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/DrawableDB.java @@ -80,9 +80,8 @@ import org.sleuthkit.datamodel.TskDataException; import org.sqlite.SQLiteJDBCLoader; /** - * This class is the public interface to the Image Gallery SQLite database. This - * class borrows a lot of ideas and techniques (for good or ill) from - * SleuthkitCase + * Provides access to the drawables database and selected tables in the case + * database. */ public final class DrawableDB { @@ -98,42 +97,42 @@ public final class DrawableDB { private static final String GROUPS_TABLENAME = "image_gallery_groups"; //NON-NLS private static final String GROUPS_SEEN_TABLENAME = "image_gallery_groups_seen"; //NON-NLS - private final PreparedStatement insertHashSetStmt; + private PreparedStatement insertHashSetStmt; - private final List preparedStatements = new ArrayList<>(); + private List preparedStatements = new ArrayList<>(); - private final PreparedStatement removeFileStmt; + private PreparedStatement removeFileStmt; - private final PreparedStatement selectHashSetStmt; + private PreparedStatement selectHashSetStmt; - private final PreparedStatement selectHashSetNamesStmt; + private PreparedStatement selectHashSetNamesStmt; - private final PreparedStatement insertHashHitStmt; + private PreparedStatement insertHashHitStmt; - private final PreparedStatement removeHashHitStmt; + private PreparedStatement removeHashHitStmt; - private final PreparedStatement updateDataSourceStmt; + private PreparedStatement updateDataSourceStmt; - private final PreparedStatement updateFileStmt; - private final PreparedStatement insertFileStmt; + private PreparedStatement updateFileStmt; + private PreparedStatement insertFileStmt; - private final PreparedStatement pathGroupStmt; + private PreparedStatement pathGroupStmt; - private final PreparedStatement nameGroupStmt; + private PreparedStatement nameGroupStmt; - private final PreparedStatement created_timeGroupStmt; + private PreparedStatement created_timeGroupStmt; - private final PreparedStatement modified_timeGroupStmt; + private PreparedStatement modified_timeGroupStmt; - private final PreparedStatement makeGroupStmt; + private PreparedStatement makeGroupStmt; - private final PreparedStatement modelGroupStmt; + private PreparedStatement modelGroupStmt; - private final PreparedStatement analyzedGroupStmt; + private PreparedStatement analyzedGroupStmt; - private final PreparedStatement hashSetGroupStmt; + private PreparedStatement hashSetGroupStmt; - private final PreparedStatement pathGroupFilterByDataSrcStmt; + private PreparedStatement pathGroupFilterByDataSrcStmt; /** * map from {@link DrawableAttribute} to the {@link PreparedStatement} that @@ -146,11 +145,12 @@ public final class DrawableDB { private final Path dbPath; - volatile private Connection con; + @GuardedBy("DBLock") + private Connection con; private final ReentrantReadWriteLock rwLock = new ReentrantReadWriteLock(true); //use fairness policy - private final Lock DBLock = rwLock.writeLock(); //using exclusing lock for all db ops for now + private final Lock DBLock = rwLock.writeLock(); // Currently serializing everything with one database connection // caches to make inserts / updates faster private Cache groupCache = CacheBuilder.newBuilder().expireAfterWrite(5, TimeUnit.MINUTES).build(); @@ -160,8 +160,7 @@ public final class DrawableDB { private Set hasHashCache = new HashSet<>(); // obj id of files with hash set hits private Set hasExifCache = new HashSet<>(); // obj id of files with EXIF (make/model) private int cacheBuildCount = 0; // number of tasks taht requested the caches be built - - + static {//make sure sqlite driver is loaded // possibly redundant try { Class.forName("org.sqlite.JDBC"); @@ -174,80 +173,69 @@ public final class DrawableDB { /** * Enum to track Image gallery db rebuild status for a data source + * + * DO NOT add in the middle. */ public enum DrawableDbBuildStatusEnum { UNKNOWN, /// no known status - IN_PROGRESS, /// drawable db rebuild has been started for the data source - COMPLETE; /// drawable db rebuild is complete for the data source + IN_PROGRESS, /// ingest or db rebuild is in progress + COMPLETE, /// All files in the data source have had file type detected + DEFAULT; /// Not all files in the data source have had file type detected } - //////////////general database logic , mostly borrowed from sleuthkitcase - /** - * Lock to protect against concurrent write accesses to case database and to - * block readers while database is in write transaction. Should be utilized - * by all db code where underlying storage supports max. 1 concurrent writer - * MUST always call dbWriteUnLock() as early as possible, in the same thread - * where dbWriteLock() was called - */ - public void dbWriteLock() { - //Logger.getLogger("LOCK").log(Level.INFO, "Locking " + rwLock.toString()); + private void dbWriteLock() { DBLock.lock(); } - /** - * Release previously acquired write lock acquired in this thread using - * dbWriteLock(). Call in "finally" block to ensure the lock is always - * released. - */ - public void dbWriteUnlock() { - //Logger.getLogger("LOCK").log(Level.INFO, "UNLocking " + rwLock.toString()); + private void dbWriteUnlock() { DBLock.unlock(); } /** - * Lock to protect against read while it is in a write transaction state. - * Supports multiple concurrent readers if there is no writer. MUST always - * call dbReadUnLock() as early as possible, in the same thread where - * dbReadLock() was called. - */ - void dbReadLock() { - DBLock.lock(); - } - - /** - * Release previously acquired read lock acquired in this thread using - * dbReadLock(). Call in "finally" block to ensure the lock is always - * released. - */ - void dbReadUnlock() { - DBLock.unlock(); - } - - /** - * @param dbPath the path to the db file + * Constructs an object that provides access to the drawables database and + * selected tables in the case database. If the specified drawables database + * does not already exist, it is created. * - * @throws SQLException if there is problem creating or configuring the db + * @param dbPath The path to the drawables database file. + * @param controller The controller for the IMage Gallery tool. + * + * @throws IOException The database directory could not be created. + * @throws SQLException The drawables database could not be created or + * opened. + * @throws TskCoreException The drawables database or the case database + * could not be correctly initialized for Image + * Gallery use. */ - private DrawableDB(Path dbPath, ImageGalleryController controller) throws TskCoreException, SQLException, IOException { + private DrawableDB(Path dbPath, ImageGalleryController controller) throws IOException, SQLException, TskCoreException { this.dbPath = dbPath; this.controller = controller; - this.tskCase = controller.getSleuthKitCase(); - this.groupManager = controller.getGroupManager(); - Files.createDirectories(dbPath.getParent()); - if (initializeDBSchema()) { + tskCase = this.controller.getSleuthKitCase(); + groupManager = this.controller.getGroupManager(); + Files.createDirectories(this.dbPath.getParent()); + dbWriteLock(); + try { + con = DriverManager.getConnection("jdbc:sqlite:" + dbPath.toString()); //NON-NLS + if (!initializeDBSchema() || !prepareStatements() || !initializeStandardGroups() || !initializeImageList()) { + close(); + throw new TskCoreException("Failed to initialize drawables database for Image Gallery use"); //NON-NLS + } + } finally { + dbWriteUnlock(); + } + } + + private boolean prepareStatements() { + try { updateFileStmt = prepareStatement( "INSERT OR REPLACE INTO drawable_files (obj_id, data_source_obj_id, path, name, created_time, modified_time, make, model, analyzed) " //NON-NLS + "VALUES (?,?,?,?,?,?,?,?,?)"); //NON-NLS insertFileStmt = prepareStatement( "INSERT OR IGNORE INTO drawable_files (obj_id, data_source_obj_id, path, name, created_time, modified_time, make, model, analyzed) " //NON-NLS + "VALUES (?,?,?,?,?,?,?,?,?)"); //NON-NLS - updateDataSourceStmt = prepareStatement( "INSERT OR REPLACE INTO datasources (ds_obj_id, drawable_db_build_status) " //NON-NLS + " VALUES (?,?)"); //NON-NLS - removeFileStmt = prepareStatement("DELETE FROM drawable_files WHERE obj_id = ?"); //NON-NLS - pathGroupStmt = prepareStatement("SELECT obj_id , analyzed FROM drawable_files WHERE path = ? ", DrawableAttribute.PATH); //NON-NLS nameGroupStmt = prepareStatement("SELECT obj_id , analyzed FROM drawable_files WHERE name = ? ", DrawableAttribute.NAME); //NON-NLS created_timeGroupStmt = prepareStatement("SELECT obj_id , analyzed FROM drawable_files WHERE created_time = ? ", DrawableAttribute.CREATED_TIME); //NON-NLS @@ -256,39 +244,38 @@ public final class DrawableDB { modelGroupStmt = prepareStatement("SELECT obj_id , analyzed FROM drawable_files WHERE model = ? ", DrawableAttribute.MODEL); //NON-NLS analyzedGroupStmt = prepareStatement("SELECT obj_id , analyzed FROM drawable_files WHERE analyzed = ?", DrawableAttribute.ANALYZED); //NON-NLS hashSetGroupStmt = prepareStatement("SELECT drawable_files.obj_id AS obj_id, analyzed FROM drawable_files , hash_sets , hash_set_hits WHERE drawable_files.obj_id = hash_set_hits.obj_id AND hash_sets.hash_set_id = hash_set_hits.hash_set_id AND hash_sets.hash_set_name = ?", DrawableAttribute.HASHSET); //NON-NLS - - //add other xyzFilterByDataSrc prepared statments as we add support for filtering by DS to other groups pathGroupFilterByDataSrcStmt = prepareFilterByDataSrcStatement("SELECT obj_id , analyzed FROM drawable_files WHERE path = ? AND data_source_obj_id = ?", DrawableAttribute.PATH); - selectHashSetNamesStmt = prepareStatement("SELECT DISTINCT hash_set_name FROM hash_sets"); //NON-NLS insertHashSetStmt = prepareStatement("INSERT OR IGNORE INTO hash_sets (hash_set_name) VALUES (?)"); //NON-NLS selectHashSetStmt = prepareStatement("SELECT hash_set_id FROM hash_sets WHERE hash_set_name = ?"); //NON-NLS - insertHashHitStmt = prepareStatement("INSERT OR IGNORE INTO hash_set_hits (hash_set_id, obj_id) VALUES (?,?)"); //NON-NLS removeHashHitStmt = prepareStatement("DELETE FROM hash_set_hits WHERE obj_id = ?"); //NON-NLS + return true; + } catch (TskCoreException | SQLException ex) { + logger.log(Level.SEVERE, "Failed to prepare all statements", ex); //NON-NLS + return false; + } + } - CaseDbTransaction caseDbTransaction = null; - try { - caseDbTransaction = tskCase.beginTransaction(); - for (DhsImageCategory cat : DhsImageCategory.values()) { - insertGroup(cat.getDisplayName(), DrawableAttribute.CATEGORY, caseDbTransaction); - } - caseDbTransaction.commit(); - caseDbTransaction = null; - } - finally { - if (null != caseDbTransaction) { - try { - caseDbTransaction.rollback(); - } catch (TskCoreException ex2) { - logger.log(Level.SEVERE, "Error in trying to rollback transaction", ex2); - } + private boolean initializeStandardGroups() { + CaseDbTransaction caseDbTransaction = null; + try { + caseDbTransaction = tskCase.beginTransaction(); + for (DhsImageCategory cat : DhsImageCategory.values()) { + insertGroup(cat.getDisplayName(), DrawableAttribute.CATEGORY, caseDbTransaction); + } + caseDbTransaction.commit(); + return true; + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, "Failed to insert standard groups", ex); //NON-NLS + if (null != caseDbTransaction) { + try { + caseDbTransaction.rollback(); + } catch (TskCoreException ex2) { + logger.log(Level.SEVERE, "Failed to roll back case DB transaction", ex2); } } - - initializeImageList(); - } else { - throw new TskCoreException("Failed to initialize Image Gallery db schema"); + return false; } } @@ -303,10 +290,20 @@ public final class DrawableDB { * * @throws SQLException if unable to prepare the statement */ - private PreparedStatement prepareStatement(String stmtString) throws SQLException { - PreparedStatement prepareStatement = con.prepareStatement(stmtString); - preparedStatements.add(prepareStatement); - return prepareStatement; + private PreparedStatement prepareStatement(String stmtString) throws TskCoreException, SQLException { + dbWriteLock(); + try { + if (isClosed()) { + throw new TskCoreException("The drawables database is closed"); + } + PreparedStatement statement = con.prepareStatement(stmtString); + preparedStatements.add(statement); + return statement; + } catch (SQLException ex) { + throw new SQLException(String.format("Error preparing statement %s", stmtString, ex)); + } finally { + dbWriteUnlock(); + } } /** @@ -322,13 +319,12 @@ public final class DrawableDB { * * @throws SQLExceptionif unable to prepare the statement */ - private PreparedStatement prepareStatement(String stmtString, DrawableAttribute attr) throws SQLException { - PreparedStatement prepareStatement = prepareStatement(stmtString); + private PreparedStatement prepareStatement(String stmtString, DrawableAttribute attr) throws TskCoreException, SQLException { + PreparedStatement statement = prepareStatement(stmtString); if (attr != null) { - groupStatementMap.put(attr, prepareStatement); + groupStatementMap.put(attr, statement); } - - return prepareStatement; + return statement; } /** @@ -344,13 +340,12 @@ public final class DrawableDB { * * @throws SQLExceptionif unable to prepare the statement */ - private PreparedStatement prepareFilterByDataSrcStatement(String stmtString, DrawableAttribute attr) throws SQLException { - PreparedStatement prepareStatement = prepareStatement(stmtString); + private PreparedStatement prepareFilterByDataSrcStatement(String stmtString, DrawableAttribute attr) throws TskCoreException, SQLException { + PreparedStatement statement = prepareStatement(stmtString); if (attr != null) { - groupStatementFilterByDataSrcMap.put(attr, prepareStatement); + groupStatementFilterByDataSrcMap.put(attr, statement); } - - return prepareStatement; + return statement; } private void setQueryParams(PreparedStatement statement, GroupKey groupKey) throws SQLException { @@ -358,7 +353,7 @@ public final class DrawableDB { statement.setObject(1, groupKey.getValue()); if (groupKey.getDataSource().isPresent() - && (groupKey.getAttribute() == DrawableAttribute.PATH)) { + && (groupKey.getAttribute() == DrawableAttribute.PATH)) { statement.setObject(2, groupKey.getDataSourceObjId()); } } @@ -377,95 +372,96 @@ public final class DrawableDB { */ public static DrawableDB getDrawableDB(ImageGalleryController controller) throws TskCoreException { Path dbPath = ImageGalleryModule.getModuleOutputDir(controller.getAutopsyCase()).resolve("drawable.db"); - boolean hasDataSourceObjIdColumn = hasDataSourceObjIdColumn(dbPath); try { - if (hasDataSourceObjIdColumn == false) { - Files.deleteIfExists(dbPath); - } + deleteDatabaseIfOlderVersion(dbPath); + } catch (SQLException ex) { + throw new TskCoreException("Failed to check for obsolete drawables database schema", ex); //NON-NLS } catch (IOException ex) { - throw new TskCoreException("Error deleting old database", ex); //NON-NLS + throw new TskCoreException("Failed to delete obsolete drawables database", ex); //NON-NLS } try { - return new DrawableDB(dbPath, controller); //NON-NLS - } catch (SQLException ex) { - throw new TskCoreException("SQL error creating database connection", ex); //NON-NLS + return new DrawableDB(dbPath, controller); } catch (IOException ex) { - throw new TskCoreException("Error creating database connection", ex); //NON-NLS + throw new TskCoreException("Failed to create drawables database directory", ex); //NON-NLS + } catch (SQLException ex) { + throw new TskCoreException("Failed to create/open the drawables database", ex); //NON-NLS } } - /** - * Check if the db at the given path has the data_source_obj_id column. If - * the db doesn't exist or doesn't even have the drawable_files table, this - * method returns false. - * - * NOTE: This method makes an ad-hoc connection to db, which has the side - * effect of creating the drawable.db file if it didn't already exist. - */ - private static boolean hasDataSourceObjIdColumn(Path dbPath) throws TskCoreException { - - try (Connection con = DriverManager.getConnection("jdbc:sqlite:" + dbPath.toString()); //NON-NLS - Statement stmt = con.createStatement();) { - boolean tableExists = false; - try (ResultSet results = stmt.executeQuery("SELECT name FROM sqlite_master WHERE type='table'");) {//NON-NLS - while (results.next()) { - if ("drawable_files".equals(results.getString("name"))) { - tableExists = true; - break; + private static void deleteDatabaseIfOlderVersion(Path dbPath) throws SQLException, IOException { + if (Files.exists(dbPath)) { + boolean hasDrawableFilesTable = false; + boolean hasDataSourceIdColumn = false; + try (Connection con = DriverManager.getConnection("jdbc:sqlite:" + dbPath.toString())) { + Statement stmt = con.createStatement(); + try (ResultSet tableQueryResults = stmt.executeQuery("SELECT name FROM sqlite_master WHERE type='table'")) { //NON-NLS + while (tableQueryResults.next()) { + if ("drawable_files".equals(tableQueryResults.getString("name"))) { + hasDrawableFilesTable = true; + break; + } + } + } + if (hasDrawableFilesTable) { + try (ResultSet results = stmt.executeQuery("PRAGMA table_info('drawable_files')")) { + while (results.next()) { + if ("data_source_obj_id".equals(results.getString("name"))) { + hasDataSourceIdColumn = true; + break; + } + } } } } - if (false == tableExists) { - return false; + if (!hasDrawableFilesTable || !hasDataSourceIdColumn) { + Files.delete(dbPath); } - try (ResultSet results = stmt.executeQuery("PRAGMA table_info('drawable_files')");) { //NON-NLS - while (results.next()) { - if ("data_source_obj_id".equals(results.getString("name"))) { - return true; - } - } - } - } catch (SQLException ex) { - throw new TskCoreException("SQL error checking database compatibility", ex); //NON-NLS } - return false; } private void setPragmas() throws SQLException { - - //this should match Sleuthkit db setupt - try (Statement statement = con.createStatement()) { - //reduce i/o operations, we have no OS crash recovery anyway - statement.execute("PRAGMA synchronous = OFF;"); //NON-NLS - //allow to query while in transaction - no need read locks - statement.execute("PRAGMA read_uncommitted = True;"); //NON-NLS - - //TODO: do we need this? - statement.execute("PRAGMA foreign_keys = ON"); //NON-NLS - - //TODO: test this - statement.execute("PRAGMA journal_mode = MEMORY"); //NON-NLS -// - //we don't use this feature, so turn it off for minimal speed up on queries - //this is deprecated and not recomended - statement.execute("PRAGMA count_changes = OFF;"); //NON-NLS - //this made a big difference to query speed - statement.execute("PRAGMA temp_store = MEMORY"); //NON-NLS - //this made a modest improvement in query speeds - statement.execute("PRAGMA cache_size = 50000"); //NON-NLS - //we never delete anything so... - statement.execute("PRAGMA auto_vacuum = 0"); //NON-NLS - } - + dbWriteLock(); try { - logger.log(Level.INFO, String.format("sqlite-jdbc version %s loaded in %s mode", //NON-NLS - SQLiteJDBCLoader.getVersion(), SQLiteJDBCLoader.isNativeMode() - ? "native" : "pure-java")); //NON-NLS - } catch (Exception exception) { - logger.log(Level.WARNING, "exception while checking sqlite-jdbc version and mode", exception); //NON-NLS - } + if (isClosed()) { + throw new SQLException("The drawables database is closed"); + } + //this should match Sleuthkit db setupt + try (Statement statement = con.createStatement()) { + //reduce i/o operations, we have no OS crash recovery anyway + statement.execute("PRAGMA synchronous = OFF;"); //NON-NLS + //allow to query while in transaction - no need read locks + statement.execute("PRAGMA read_uncommitted = True;"); //NON-NLS + + //TODO: do we need this? + statement.execute("PRAGMA foreign_keys = ON"); //NON-NLS + + //TODO: test this + statement.execute("PRAGMA journal_mode = MEMORY"); //NON-NLS + + //we don't use this feature, so turn it off for minimal speed up on queries + //this is deprecated and not recomended + statement.execute("PRAGMA count_changes = OFF;"); //NON-NLS + //this made a big difference to query speed + statement.execute("PRAGMA temp_store = MEMORY"); //NON-NLS + //this made a modest improvement in query speeds + statement.execute("PRAGMA cache_size = 50000"); //NON-NLS + //we never delete anything so... + statement.execute("PRAGMA auto_vacuum = 0"); //NON-NLS + } + + try { + logger.log(Level.INFO, String.format("sqlite-jdbc version %s loaded in %s mode", //NON-NLS + SQLiteJDBCLoader.getVersion(), SQLiteJDBCLoader.isNativeMode() + ? "native" : "pure-java")); //NON-NLS + } catch (Exception exception) { + logger.log(Level.SEVERE, "exception while checking sqlite-jdbc version and mode", exception); //NON-NLS + } + + } finally { + dbWriteUnlock(); + } } /** @@ -475,176 +471,208 @@ public final class DrawableDB { * existing table */ private boolean initializeDBSchema() { + dbWriteLock(); try { if (isClosed()) { - openDBCon(); + logger.log(Level.SEVERE, "The drawables database is closed"); //NON-NLS + return false; } - setPragmas(); - } catch (SQLException ex) { - logger.log(Level.SEVERE, "problem accessing database", ex); //NON-NLS - return false; + try { + setPragmas(); + } catch (SQLException ex) { + logger.log(Level.SEVERE, "Failed to set pragmas", ex); //NON-NLS + return false; + } + + /* + * Create tables in the drawables database. + */ + try (Statement stmt = con.createStatement()) { + try { + String sql = "CREATE TABLE IF NOT EXISTS datasources " //NON-NLS + + "( id INTEGER PRIMARY KEY, " //NON-NLS + + " ds_obj_id integer UNIQUE NOT NULL, " + + " drawable_db_build_status VARCHAR(128) )"; //NON-NLS + stmt.execute(sql); + } catch (SQLException ex) { + logger.log(Level.SEVERE, "Failed to create datasources table", ex); //NON-NLS + return false; + } + + try { + String sql = "CREATE TABLE if not exists drawable_files " //NON-NLS + + "( obj_id INTEGER PRIMARY KEY, " //NON-NLS + + " data_source_obj_id INTEGER NOT NULL, " + + " path VARCHAR(255), " //NON-NLS + + " name VARCHAR(255), " //NON-NLS + + " created_time integer, " //NON-NLS + + " modified_time integer, " //NON-NLS + + " make VARCHAR(255), " //NON-NLS + + " model VARCHAR(255), " //NON-NLS + + " analyzed integer DEFAULT 0)"; //NON-NLS + stmt.execute(sql); + } catch (SQLException ex) { + logger.log(Level.SEVERE, "Failed to create drawable_files table", ex); //NON-NLS + return false; + } + + try { + String sql = "CREATE TABLE if not exists hash_sets " //NON-NLS + + "( hash_set_id INTEGER primary key," //NON-NLS + + " hash_set_name VARCHAR(255) UNIQUE NOT NULL)"; //NON-NLS + stmt.execute(sql); + } catch (SQLException ex) { + logger.log(Level.SEVERE, "Failed to create hash_sets table", ex); //NON-NLS + return false; + } + + try { + String sql = "CREATE TABLE if not exists hash_set_hits " //NON-NLS + + "(hash_set_id INTEGER REFERENCES hash_sets(hash_set_id) not null, " //NON-NLS + + " obj_id INTEGER REFERENCES drawable_files(obj_id) not null, " //NON-NLS + + " PRIMARY KEY (hash_set_id, obj_id))"; //NON-NLS + stmt.execute(sql); + } catch (SQLException ex) { + logger.log(Level.SEVERE, "Failed to create hash_set_hits table", ex); //NON-NLS + return false; + } + + try { + String sql = "CREATE INDEX if not exists path_idx ON drawable_files(path)"; //NON-NLS + stmt.execute(sql); + } catch (SQLException ex) { + logger.log(Level.WARNING, "Failed to create path_idx", ex); //NON-NLS + } + + try { + String sql = "CREATE INDEX if not exists name_idx ON drawable_files(name)"; //NON-NLS + stmt.execute(sql); + } catch (SQLException ex) { + logger.log(Level.WARNING, "Failed to create name_idx", ex); //NON-NLS + } + + try { + String sql = "CREATE INDEX if not exists make_idx ON drawable_files(make)"; //NON-NLS + stmt.execute(sql); + } catch (SQLException ex) { + logger.log(Level.WARNING, "Failed to create make_idx", ex); //NON-NLS + } + + try { + String sql = "CREATE INDEX if not exists model_idx ON drawable_files(model)"; //NON-NLS + stmt.execute(sql); + } catch (SQLException ex) { + logger.log(Level.WARNING, "Failed to create model_idx", ex); //NON-NLS + } + + try { + String sql = "CREATE INDEX if not exists analyzed_idx ON drawable_files(analyzed)"; //NON-NLS + stmt.execute(sql); + } catch (SQLException ex) { + logger.log(Level.WARNING, "Failed to create analyzed_idx", ex); //NON-NLS + } + } catch (SQLException ex) { + logger.log(Level.SEVERE, "Failed to create statement", ex); //NON-NLS + return false; + } + + /* + * Create tables in the case database. + */ + String autogenKeyType = (DbType.POSTGRESQL == tskCase.getDatabaseType()) ? "BIGSERIAL" : "INTEGER"; + try { + String tableSchema + = "( group_id " + autogenKeyType + " PRIMARY KEY, " //NON-NLS + + " data_source_obj_id integer DEFAULT 0, " + + " value VARCHAR(255) not null, " //NON-NLS + + " attribute VARCHAR(255) not null, " //NON-NLS + + " UNIQUE(data_source_obj_id, value, attribute) )"; //NON-NLS + + tskCase.getCaseDbAccessManager().createTable(GROUPS_TABLENAME, tableSchema); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Failed to create %s table in case database", GROUPS_TABLENAME), ex); //NON-NLS + return false; + } + try { + + String tableSchema + = "( id " + autogenKeyType + " PRIMARY KEY, " //NON-NLS + + " group_id integer not null, " //NON-NLS + + " examiner_id integer not null, " //NON-NLS + + " seen integer DEFAULT 0, " //NON-NLS + + " UNIQUE(group_id, examiner_id)," + + " FOREIGN KEY(group_id) REFERENCES " + GROUPS_TABLENAME + "(group_id)," + + " FOREIGN KEY(examiner_id) REFERENCES tsk_examiners(examiner_id)" + + " )"; //NON-NLS + + tskCase.getCaseDbAccessManager().createTable(GROUPS_SEEN_TABLENAME, tableSchema); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Failed to create %s table in case database", GROUPS_SEEN_TABLENAME), ex); //NON-NLS + return false; + } + + return true; + + } finally { + dbWriteUnlock(); } - - try (Statement stmt = con.createStatement()) { - String sql = "CREATE TABLE IF NOT EXISTS datasources " //NON-NLS - + "( id INTEGER PRIMARY KEY, " //NON-NLS - + " ds_obj_id integer UNIQUE NOT NULL, " - + " drawable_db_build_status VARCHAR(128) )"; //NON-NLS - stmt.execute(sql); - } catch (SQLException ex) { - logger.log(Level.SEVERE, "problem creating datasources table", ex); //NON-NLS - return false; - } - - try (Statement stmt = con.createStatement()) { - String sql = "CREATE TABLE if not exists drawable_files " //NON-NLS - + "( obj_id INTEGER PRIMARY KEY, " //NON-NLS - + " data_source_obj_id INTEGER NOT NULL, " - + " path VARCHAR(255), " //NON-NLS - + " name VARCHAR(255), " //NON-NLS - + " created_time integer, " //NON-NLS - + " modified_time integer, " //NON-NLS - + " make VARCHAR(255), " //NON-NLS - + " model VARCHAR(255), " //NON-NLS - + " analyzed integer DEFAULT 0)"; //NON-NLS - stmt.execute(sql); - } catch (SQLException ex) { - logger.log(Level.SEVERE, "problem creating drawable_files table", ex); //NON-NLS - return false; - } - - String autogenKeyType = (DbType.POSTGRESQL == tskCase.getDatabaseType()) ? "BIGSERIAL" : "INTEGER"; - - // The image_gallery_groups table is created in the Case Database - try { - String tableSchema - = "( group_id " + autogenKeyType + " PRIMARY KEY, " //NON-NLS - + " data_source_obj_id integer DEFAULT 0, " - + " value VARCHAR(255) not null, " //NON-NLS - + " attribute VARCHAR(255) not null, " //NON-NLS - + " UNIQUE(data_source_obj_id, value, attribute) )"; //NON-NLS - - tskCase.getCaseDbAccessManager().createTable(GROUPS_TABLENAME, tableSchema); - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "problem creating groups table", ex); //NON-NLS - return false; - } - - // The image_gallery_groups_seen table is created in the Case Database - try { - - String tableSchema - = "( id " + autogenKeyType + " PRIMARY KEY, " //NON-NLS - + " group_id integer not null, " //NON-NLS - + " examiner_id integer not null, " //NON-NLS - + " seen integer DEFAULT 0, " //NON-NLS - + " UNIQUE(group_id, examiner_id)," - + " FOREIGN KEY(group_id) REFERENCES " + GROUPS_TABLENAME + "(group_id)," - + " FOREIGN KEY(examiner_id) REFERENCES tsk_examiners(examiner_id)" - + " )"; //NON-NLS - - tskCase.getCaseDbAccessManager().createTable(GROUPS_SEEN_TABLENAME, tableSchema); - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "problem creating image_gallery_groups_seen table", ex); //NON-NLS - return false; - } - - try (Statement stmt = con.createStatement()) { - String sql = "CREATE TABLE if not exists hash_sets " //NON-NLS - + "( hash_set_id INTEGER primary key," //NON-NLS - + " hash_set_name VARCHAR(255) UNIQUE NOT NULL)"; //NON-NLS - stmt.execute(sql); - } catch (SQLException ex) { - logger.log(Level.SEVERE, "problem creating hash_sets table", ex); //NON-NLS - return false; - } - - try (Statement stmt = con.createStatement()) { - String sql = "CREATE TABLE if not exists hash_set_hits " //NON-NLS - + "(hash_set_id INTEGER REFERENCES hash_sets(hash_set_id) not null, " //NON-NLS - + " obj_id INTEGER REFERENCES drawable_files(obj_id) not null, " //NON-NLS - + " PRIMARY KEY (hash_set_id, obj_id))"; //NON-NLS - stmt.execute(sql); - } catch (SQLException ex) { - logger.log(Level.SEVERE, "problem creating hash_set_hits table", ex); //NON-NLS - return false; - } - - try (Statement stmt = con.createStatement()) { - String sql = "CREATE INDEX if not exists path_idx ON drawable_files(path)"; //NON-NLS - stmt.execute(sql); - } catch (SQLException ex) { - logger.log(Level.WARNING, "problem creating path_idx", ex); //NON-NLS - } - - try (Statement stmt = con.createStatement()) { - String sql = "CREATE INDEX if not exists name_idx ON drawable_files(name)"; //NON-NLS - stmt.execute(sql); - } catch (SQLException ex) { - logger.log(Level.WARNING, "problem creating name_idx", ex); //NON-NLS - } - - try (Statement stmt = con.createStatement()) { - String sql = "CREATE INDEX if not exists make_idx ON drawable_files(make)"; //NON-NLS - stmt.execute(sql); - } catch (SQLException ex) { - logger.log(Level.WARNING, "problem creating make_idx", ex); //NON-NLS - } - - try (Statement stmt = con.createStatement()) { - String sql = "CREATE INDEX if not exists model_idx ON drawable_files(model)"; //NON-NLS - stmt.execute(sql); - } catch (SQLException ex) { - logger.log(Level.WARNING, "problem creating model_idx", ex); //NON-NLS - } - - try (Statement stmt = con.createStatement()) { - String sql = "CREATE INDEX if not exists analyzed_idx ON drawable_files(analyzed)"; //NON-NLS - stmt.execute(sql); - } catch (SQLException ex) { - logger.log(Level.WARNING, "problem creating analyzed_idx", ex); //NON-NLS - } - - return true; } @Override - public void finalize() throws Throwable { + protected void finalize() throws Throwable { + /* + * This finalizer is a safety net for freeing this resource. See + * "Effective Java" by Joshua Block, Item #7. + */ + dbWriteLock(); try { - closeDBCon(); + if (!isClosed()) { + logger.log(Level.SEVERE, "Closing drawable.db in finalizer, this should never be necessary"); //NON-NLS + try { + close(); + } finally { + super.finalize(); + } + } } finally { - super.finalize(); + dbWriteUnlock(); } } - public void closeDBCon() { - if (con != null) { - try { - closeStatements(); - con.close(); - } catch (SQLException ex) { - logger.log(Level.WARNING, "Failed to close connection to drawable.db", ex); //NON-NLS - } - } - con = null; - } - - public void openDBCon() { + public void close() { + dbWriteLock(); try { - if (con == null || con.isClosed()) { - con = DriverManager.getConnection("jdbc:sqlite:" + dbPath.toString()); //NON-NLS + if (!isClosed()) { + logger.log(Level.INFO, "Closing the drawable.db"); //NON-NLS + for (PreparedStatement pStmt : preparedStatements) { + try { + pStmt.close(); + } catch (SQLException ex) { + logger.log(Level.SEVERE, String.format("Failed to close prepared statement %s for drawable.db", pStmt.toString()), ex); //NON-NLS + } + } + try { + con.close(); + } catch (SQLException ex) { + logger.log(Level.SEVERE, "Failed to close connection to drawable.db", ex); //NON-NLS + } } - } catch (SQLException ex) { - logger.log(Level.WARNING, "Failed to open connection to drawable.db", ex); //NON-NLS + } finally { + con = null; + dbWriteUnlock(); } } - public boolean isClosed() throws SQLException { - if (con == null) { - return true; + private boolean isClosed() { + dbWriteLock(); + try { + return ((con == null) || (con.isClosed())); + } catch (SQLException unused) { + return false; + } finally { + dbWriteUnlock(); } - return con.isClosed(); } /** @@ -681,7 +709,7 @@ public final class DrawableDB { public Set getHashSetNames() { Set names = new HashSet<>(); // "SELECT DISTINCT hash_set_name FROM hash_sets" - dbReadLock(); + dbWriteLock(); try (ResultSet rs = selectHashSetNamesStmt.executeQuery();) { while (rs.next()) { names.add(rs.getString(HASH_SET_NAME)); @@ -689,7 +717,7 @@ public final class DrawableDB { } catch (SQLException sQLException) { logger.log(Level.WARNING, "failed to get hash set names", sQLException); //NON-NLS } finally { - dbReadUnlock(); + dbWriteUnlock(); } return names; } @@ -697,7 +725,7 @@ public final class DrawableDB { static private String getGroupIdQuery(GroupKey groupKey) { // query to find the group id from attribute/value return String.format(" SELECT group_id FROM " + GROUPS_TABLENAME - + " WHERE attribute = \'%s\' AND value = \'%s\' AND data_source_obj_id = %d", + + " WHERE attribute = \'%s\' AND value = \'%s\' AND data_source_obj_id = %d", SleuthkitCase.escapeSingleQuotes(groupKey.getAttribute().attrName.toString()), SleuthkitCase.escapeSingleQuotes(groupKey.getValueDisplayName()), (groupKey.getAttribute() == DrawableAttribute.PATH) ? groupKey.getDataSourceObjId() : 0); @@ -742,14 +770,14 @@ public final class DrawableDB { } } } - // Callback to process result of seen query +// Callback to process result of seen query GroupSeenQueryResultProcessor queryResultProcessor = new GroupSeenQueryResultProcessor(); try { String groupSeenQueryStmt = "COUNT(*) as count FROM " + GROUPS_SEEN_TABLENAME - + " WHERE seen = 1 " - + " AND group_id in ( " + getGroupIdQuery(groupKey) + ")" - + (examinerId > 0 ? " AND examiner_id = " + examinerId : "");// query to find the group id from attribute/value + + " WHERE seen = 1 " + + " AND group_id in ( " + getGroupIdQuery(groupKey) + ")" + + (examinerId > 0 ? " AND examiner_id = " + examinerId : "");// query to find the group id from attribute/value tskCase.getCaseDbAccessManager().select(groupSeenQueryStmt, queryResultProcessor); return queryResultProcessor.get(); @@ -775,7 +803,7 @@ public final class DrawableDB { // query to find the group id from attribute/value String innerQuery = String.format("( SELECT group_id FROM " + GROUPS_TABLENAME - + " WHERE attribute = \'%s\' AND value = \'%s\' and data_source_obj_id = %d )", + + " WHERE attribute = \'%s\' AND value = \'%s\' and data_source_obj_id = %d )", SleuthkitCase.escapeSingleQuotes(groupKey.getAttribute().attrName.toString()), SleuthkitCase.escapeSingleQuotes(groupKey.getValueDisplayName()), groupKey.getAttribute() == DrawableAttribute.PATH ? groupKey.getDataSourceObjId() : 0); @@ -790,76 +818,97 @@ public final class DrawableDB { } - public boolean removeFile(long id) { - DrawableTransaction trans = beginTransaction(); - boolean removeFile = removeFile(id, trans); - commitTransaction(trans, true); - return removeFile; + /** + * Removes a file from the drawables databse. + * + * @param id The object id of the file. + * + * @return True or false. + * + * @throws TskCoreException + * @throws SQLException + */ + public void removeFile(long id) throws TskCoreException, SQLException { + DrawableTransaction trans = null; + try { + trans = beginTransaction(); + removeFile(id, trans); + commitTransaction(trans, true); + } catch (TskCoreException | SQLException ex) { + if (null != trans) { + try { + rollbackTransaction(trans); + } catch (SQLException ex2) { + logger.log(Level.SEVERE, String.format("Failed to roll back drawables db transaction after error: %s", ex.getMessage()), ex2); //NON-NLS + } + } + throw ex; + } } - public void updateFile(DrawableFile f) { + public void updateFile(DrawableFile f) throws TskCoreException, SQLException { DrawableTransaction trans = null; CaseDbTransaction caseDbTransaction = null; - try { trans = beginTransaction(); caseDbTransaction = tskCase.beginTransaction(); updateFile(f, trans, caseDbTransaction); caseDbTransaction.commit(); - caseDbTransaction = null; commitTransaction(trans, true); - trans = null; - - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error updating file", ex); //NON-NLS - } - finally { + } catch (TskCoreException | SQLException ex) { if (null != caseDbTransaction) { try { caseDbTransaction.rollback(); } catch (TskCoreException ex2) { - logger.log(Level.SEVERE, "Error in trying to rollback transaction", ex2); //NON-NLS + logger.log(Level.SEVERE, String.format("Failed to roll back case db transaction after error: %s", ex.getMessage()), ex2); //NON-NLS } } if (null != trans) { - rollbackTransaction(trans); + try { + rollbackTransaction(trans); + } catch (SQLException ex2) { + logger.log(Level.SEVERE, String.format("Failed to roll back drawables db transaction after error: %s", ex.getMessage()), ex2); //NON-NLS + } } + throw ex; } - } /** - * Insert basic file data (no groups) into the DB during pre-population phase + * Insert basic file data (no groups) into the DB during pre-population + * phase + * * @param f * @param tr - * @param caseDbTransaction + * @param caseDbTransaction */ public void insertBasicFileData(DrawableFile f, DrawableTransaction tr, CaseDbTransaction caseDbTransaction) { insertOrUpdateFile(f, tr, caseDbTransaction, false); } /** - * Update an existing entry (or make a new one) into the DB that includes group information. - * Called when a file has been analyzed or during a bulk rebuild - * + * Update an existing entry (or make a new one) into the DB that includes + * group information. Called when a file has been analyzed or during a bulk + * rebuild + * * @param f * @param tr - * @param caseDbTransaction + * @param caseDbTransaction */ public void updateFile(DrawableFile f, DrawableTransaction tr, CaseDbTransaction caseDbTransaction) { insertOrUpdateFile(f, tr, caseDbTransaction, true); } - - + /** * Populate caches based on current state of Case DB */ public void buildFileMetaDataCache() { - - synchronized (cacheLock) { + + synchronized (cacheLock) { cacheBuildCount++; - if (areCachesLoaded == true) + if (areCachesLoaded == true) { return; + } try { // get tags @@ -911,54 +960,61 @@ public final class DrawableDB { areCachesLoaded = true; } } - + /** * Add a file to cache of files that have EXIF data + * * @param objectID ObjId of file with EXIF */ public void addExifCache(long objectID) { synchronized (cacheLock) { // bail out if we are not maintaining caches - if (cacheBuildCount == 0) + if (cacheBuildCount == 0) { return; + } hasExifCache.add(objectID); } } - + /** * Add a file to cache of files that have hash set hits + * * @param objectID ObjId of file with hash set */ public void addHashSetCache(long objectID) { synchronized (cacheLock) { // bail out if we are not maintaining caches - if (cacheBuildCount == 0) + if (cacheBuildCount == 0) { return; + } hasHashCache.add(objectID); } } - + /** * Add a file to cache of files that have tags + * * @param objectID ObjId of file with tags */ public void addTagCache(long objectID) { - synchronized (cacheLock) { + synchronized (cacheLock) { // bail out if we are not maintaining caches - if (cacheBuildCount == 0) + if (cacheBuildCount == 0) { return; + } hasTagCache.add(objectID); - } + } } - + /** * Free the cached case DB data */ public void freeFileMetaDataCache() { synchronized (cacheLock) { // dont' free these if there is another task still using them - if (--cacheBuildCount > 0) + if (--cacheBuildCount > 0) { return; + } areCachesLoaded = false; hasTagCache.clear(); @@ -976,26 +1032,27 @@ public final class DrawableDB { * //TODO: this is a kinda weird design, is their a better way? //TODO: * implement batch version -jm * - * @param f The file to insert. - * @param tr a transaction to use, must not be null + * @param f The file to insert. + * @param tr a transaction to use, must not be null * @param caseDbTransaction - * @param addGroups True if groups for file should be inserted into db too + * @param addGroups True if groups for file should be inserted into + * db too */ private void insertOrUpdateFile(DrawableFile f, @Nonnull DrawableTransaction tr, @Nonnull CaseDbTransaction caseDbTransaction, boolean addGroups) { PreparedStatement stmt; - - if (tr.isClosed()) { + + if (tr.isCompleted()) { throw new IllegalArgumentException("can't update database with closed transaction"); } - + // assume that we are doing an update if we are adding groups - i.e. not pre-populating if (addGroups) { stmt = updateFileStmt; } else { stmt = insertFileStmt; } - + // get data from caches. Default to true and force the DB lookup if we don't have caches boolean hasExif = true; boolean hasHashSet = true; @@ -1007,7 +1064,7 @@ public final class DrawableDB { hasTag = hasTagCache.contains(f.getId()); } } - + // if we are going to just add basic data, then mark flags that we do not have metadata to prevent lookups if (addGroups == false) { hasExif = false; @@ -1033,13 +1090,13 @@ public final class DrawableDB { } stmt.setBoolean(9, f.isAnalyzed()); stmt.executeUpdate(); - + // Update the list of file IDs in memory addImageFileToList(f.getId()); // update the groups if we are not doing pre-populating if (addGroups) { - + // Update the hash set tables if (hasHashSet) { try { @@ -1073,8 +1130,7 @@ public final class DrawableDB { // skip attributes that we do not have data for if ((attr == DrawableAttribute.TAGS) && (hasTag == false)) { continue; - } - else if ((attr == DrawableAttribute.MAKE || attr == DrawableAttribute.MODEL) && (hasExif == false)) { + } else if ((attr == DrawableAttribute.MAKE || attr == DrawableAttribute.MODEL) && (hasExif == false)) { continue; } Collection> vals = attr.getValue(f); @@ -1082,8 +1138,7 @@ public final class DrawableDB { if ((null != val) && (val.toString().isEmpty() == false)) { if (attr == DrawableAttribute.PATH) { insertGroup(f.getAbstractFile().getDataSource().getId(), val.toString(), attr, caseDbTransaction); - } - else { + } else { insertGroup(val.toString(), attr, caseDbTransaction); } } @@ -1118,25 +1173,20 @@ public final class DrawableDB { */ public Map getDataSourceDbBuildStatus() throws TskCoreException { Statement statement = null; - ResultSet rs = null; Map map = new HashMap<>(); - dbReadLock(); + dbWriteLock(); try { + if (isClosed()) { + throw new TskCoreException("The drawables database is closed"); + } statement = con.createStatement(); - rs = statement.executeQuery("SELECT ds_obj_id, drawable_db_build_status FROM datasources "); //NON-NLS + ResultSet rs = statement.executeQuery("SELECT ds_obj_id, drawable_db_build_status FROM datasources "); //NON-NLS while (rs.next()) { map.put(rs.getLong("ds_obj_id"), DrawableDbBuildStatusEnum.valueOf(rs.getString("drawable_db_build_status"))); } } catch (SQLException e) { throw new TskCoreException("SQLException while getting data source object ids", e); } finally { - if (rs != null) { - try { - rs.close(); - } catch (SQLException ex) { - logger.log(Level.SEVERE, "Error closing resultset", ex); //NON-NLS - } - } if (statement != null) { try { statement.close(); @@ -1144,7 +1194,7 @@ public final class DrawableDB { logger.log(Level.SEVERE, "Error closing statement ", ex); //NON-NLS } } - dbReadUnlock(); + dbWriteUnlock(); } return map; } @@ -1173,94 +1223,71 @@ public final class DrawableDB { } } - public DrawableTransaction beginTransaction() { + public DrawableTransaction beginTransaction() throws TskCoreException, SQLException { return new DrawableTransaction(); } /** - * + * * @param tr * @param notifyGM If true, notify GroupManager about the changes. */ - public void commitTransaction(DrawableTransaction tr, Boolean notifyGM) { - if (tr.isClosed()) { - throw new IllegalArgumentException("can't close already closed transaction"); + public void commitTransaction(DrawableTransaction tr, Boolean notifyGM) throws SQLException { + if (tr.isCompleted()) { + throw new IllegalArgumentException("Attempt to commit completed transaction"); } tr.commit(notifyGM); } - public void rollbackTransaction(DrawableTransaction tr) { - if (tr.isClosed()) { - throw new IllegalArgumentException("can't rollback already closed transaction"); + public void rollbackTransaction(DrawableTransaction tr) throws SQLException { + if (tr.isCompleted()) { + throw new IllegalArgumentException("Attempt to roll back completed transaction"); } tr.rollback(); } - public Boolean isFileAnalyzed(DrawableFile f) { - return isFileAnalyzed(f.getId()); - } - - public Boolean isFileAnalyzed(long fileId) { - dbReadLock(); - try (Statement stmt = con.createStatement(); - ResultSet analyzedQuery = stmt.executeQuery("SELECT analyzed FROM drawable_files WHERE obj_id = " + fileId)) { //NON-NLS - while (analyzedQuery.next()) { - return analyzedQuery.getBoolean(ANALYZED); - } - } catch (SQLException ex) { - String msg = String.format("Failed to determine if file %s is finalized", String.valueOf(fileId)); //NON-NLS - logger.log(Level.WARNING, msg, ex); - } finally { - dbReadUnlock(); - } - - return false; - } - - public Boolean areFilesAnalyzed(Collection fileIds) { - - dbReadLock(); - try (Statement stmt = con.createStatement(); - //Can't make this a preprared statement because of the IN ( ... ) - ResultSet analyzedQuery = stmt.executeQuery("SELECT COUNT(analyzed) AS analyzed FROM drawable_files WHERE analyzed = 1 AND obj_id IN (" + StringUtils.join(fileIds, ", ") + ")")) { //NON-NLS - while (analyzedQuery.next()) { - return analyzedQuery.getInt(ANALYZED) == fileIds.size(); - } - } catch (SQLException ex) { - logger.log(Level.WARNING, "problem counting analyzed files: ", ex); //NON-NLS - } finally { - dbReadUnlock(); - } - - return false; - } - - public Boolean isGroupAnalyzed(GroupKey gk) { - dbReadLock(); + public Boolean areFilesAnalyzed(Collection fileIds) throws SQLException { + dbWriteLock(); try { - Set fileIDsInGroup = getFileIDsInGroup(gk); - try { + if (isClosed()) { + throw new SQLException("The drawables database is closed"); + } + try (Statement stmt = con.createStatement()) { + //Can't make this a preprared statement because of the IN ( ... ) + ResultSet analyzedQuery = stmt.executeQuery("SELECT COUNT(analyzed) AS analyzed FROM drawable_files WHERE analyzed = 1 AND obj_id IN (" + StringUtils.join(fileIds, ", ") + ")"); //NON-NLS + while (analyzedQuery.next()) { + return analyzedQuery.getInt(ANALYZED) == fileIds.size(); + } + return false; + } + } finally { + dbWriteUnlock(); + } + } + + public Boolean isGroupAnalyzed(GroupKey gk) throws SQLException, TskCoreException { + dbWriteLock(); + try { + if (isClosed()) { + throw new SQLException("The drawables database is closed"); + } + try (Statement stmt = con.createStatement()) { // In testing, this method appears to be a lot faster than doing one large select statement + Set fileIDsInGroup = getFileIDsInGroup(gk); for (Long fileID : fileIDsInGroup) { - Statement stmt = con.createStatement(); ResultSet analyzedQuery = stmt.executeQuery("SELECT analyzed FROM drawable_files WHERE obj_id = " + fileID); //NON-NLS while (analyzedQuery.next()) { if (analyzedQuery.getInt(ANALYZED) == 0) { return false; } } - return true; + return true; // THIS APPEARS TO BE A BUG (see JIRA-1130), THE FOR LOOP EXECUTES AT MOST ONCE } - - } catch (SQLException ex) { - logger.log(Level.WARNING, "problem counting analyzed files: ", ex); //NON-NLS } - } catch (TskCoreException tskCoreException) { - logger.log(Level.WARNING, "problem counting analyzed files: ", tskCoreException); //NON-NLS + return false; } finally { - dbReadUnlock(); + dbWriteUnlock(); } - return false; } /** @@ -1276,21 +1303,24 @@ public final class DrawableDB { * @throws TskCoreException */ public Set findAllFileIdsWhere(String sqlWhereClause) throws TskCoreException { - - Set ret = new HashSet<>(); - dbReadLock(); - try (Statement statement = con.createStatement(); - ResultSet rs = statement.executeQuery("SELECT obj_id FROM drawable_files WHERE " + sqlWhereClause);) { - while (rs.next()) { - ret.add(rs.getLong(1)); + dbWriteLock(); + try { + if (isClosed()) { + throw new TskCoreException("The drawables database is closed"); + } + try (Statement statement = con.createStatement()) { + ResultSet rs = statement.executeQuery("SELECT obj_id FROM drawable_files WHERE " + sqlWhereClause); + Set ret = new HashSet<>(); + while (rs.next()) { + ret.add(rs.getLong(1)); + } + return ret; + } catch (SQLException ex) { + throw new TskCoreException(String.format("Failed to query file id for WHERE clause %s", sqlWhereClause), ex); } - } catch (SQLException e) { - throw new TskCoreException("SQLException thrown when calling 'DrawableDB.findAllFileIdsWhere(): " + sqlWhereClause, e); } finally { - - dbReadUnlock(); + dbWriteUnlock(); } - return ret; } /** @@ -1305,14 +1335,19 @@ public final class DrawableDB { * @throws TskCoreException */ public long countFilesWhere(String sqlWhereClause) throws TskCoreException { - dbReadLock(); - try (Statement statement = con.createStatement(); - ResultSet rs = statement.executeQuery("SELECT COUNT(*) AS COUNT FROM drawable_files WHERE " + sqlWhereClause);) { - return rs.getLong("COUNT"); - } catch (SQLException e) { - throw new TskCoreException("SQLException thrown when calling 'DrawableDB.countFilesWhere(): " + sqlWhereClause, e); + dbWriteLock(); + try { + if (isClosed()) { + throw new TskCoreException("The drawables database is closed"); + } + try (Statement statement = con.createStatement()) { + ResultSet rs = statement.executeQuery("SELECT COUNT(*) AS COUNT FROM drawable_files WHERE " + sqlWhereClause); + return rs.getLong("COUNT"); + } catch (SQLException e) { + throw new TskCoreException("SQLException thrown when calling 'DrawableDB.countFilesWhere(): " + sqlWhereClause, e); + } } finally { - dbReadUnlock(); + dbWriteUnlock(); } } @@ -1327,15 +1362,14 @@ public final class DrawableDB { * @param sortOrder Sort ascending or descending. * @param dataSource * - * @return Map of data source (or null of group by attribute ignores data sources) to list of unique group values + * @return Map of data source (or null of group by attribute ignores data + * sources) to list of unique group values * * @throws org.sleuthkit.datamodel.TskCoreException */ @SuppressWarnings("unchecked") public > Multimap findValuesForAttribute(DrawableAttribute groupBy, GroupSortBy sortBy, SortOrder sortOrder, DataSource dataSource) throws TskCoreException { - Multimap values = HashMultimap.create(); - switch (groupBy.attrName) { case ANALYZED: case CATEGORY: @@ -1344,76 +1378,69 @@ public final class DrawableDB { //they should have special handling at a higher level of the stack. throw new UnsupportedOperationException(); default: - dbReadLock(); - //TODO: convert this to prepared statement - - StringBuilder query = new StringBuilder("SELECT data_source_obj_id, " + groupBy.attrName.toString() + ", COUNT(*) FROM drawable_files "); //NON-NLS - - if (dataSource != null) { - query.append(" WHERE data_source_obj_id = ").append(dataSource.getId()); - } - - query.append(" GROUP BY data_source_obj_id, ").append(groupBy.attrName.toString()); - - String orderByClause = ""; - - if (sortBy == GROUP_BY_VALUE) { - orderByClause = " ORDER BY " + groupBy.attrName.toString(); - } else if (sortBy == GroupSortBy.FILE_COUNT) { - orderByClause = " ORDER BY COUNT(*)"; - } - - query.append(orderByClause); - - if (orderByClause.isEmpty() == false) { - String sortOrderClause = ""; - - switch (sortOrder) { - case DESCENDING: - sortOrderClause = " DESC"; //NON-NLS - break; - case ASCENDING: - sortOrderClause = " ASC"; //NON-NLS - break; - default: - orderByClause = ""; + dbWriteLock(); + try { + if (isClosed()) { + throw new TskCoreException("The drawables database is closed"); } - query.append(sortOrderClause); - } + //TODO: convert this to prepared statement + StringBuilder query = new StringBuilder("SELECT data_source_obj_id, " + groupBy.attrName.toString() + ", COUNT(*) FROM drawable_files "); //NON-NLS - try (Statement stmt = con.createStatement(); - ResultSet results = stmt.executeQuery(query.toString())) { - while (results.next()) { - /* - * I don't like that we have to do this cast to A here, - * but can't think of a better alternative at the - * momment unless something has gone seriously wrong, we - * know this should be of type A even if JAVA doesn't - */ - values.put(tskCase.getDataSource(results.getLong("data_source_obj_id")), - (A) results.getObject(groupBy.attrName.toString())); + if (dataSource != null) { + query.append(" WHERE data_source_obj_id = ").append(dataSource.getId()); } - } catch (SQLException ex) { - if (!(ex.getCause() instanceof java.lang.InterruptedException)) { - /* It seems like this originaly comes out of c3p0 when - * its thread is intereupted (cancelled because of - * regroup). It should be safe to just swallow this and - * move on. - * - * see - * https://sourceforge.net/p/c3p0/mailman/c3p0-users/thread/EBB32BB8-6487-43AF-B291-9464C9051869@mchange.com/ - */ + query.append(" GROUP BY data_source_obj_id, ").append(groupBy.attrName.toString()); + + String orderByClause = ""; + + if (sortBy == GROUP_BY_VALUE) { + orderByClause = " ORDER BY " + groupBy.attrName.toString(); + } else if (sortBy == GroupSortBy.FILE_COUNT) { + orderByClause = " ORDER BY COUNT(*)"; + } + + query.append(orderByClause); + + if (orderByClause.isEmpty() == false) { + String sortOrderClause = ""; + + switch (sortOrder) { + case DESCENDING: + sortOrderClause = " DESC"; //NON-NLS + break; + case ASCENDING: + sortOrderClause = " ASC"; //NON-NLS + break; + default: + orderByClause = ""; + } + query.append(sortOrderClause); + } + + try (Statement stmt = con.createStatement()) { + ResultSet results = stmt.executeQuery(query.toString()); + Multimap values = HashMultimap.create(); + while (results.next()) { + /* + * I don't like that we have to do this cast to A + * here, but can't think of a better alternative at + * the momment unless something has gone seriously + * wrong, we know this should be of type A even if + * JAVA doesn't + */ + values.put(tskCase.getDataSource(results.getLong("data_source_obj_id")), + (A) results.getObject(groupBy.attrName.toString())); + } + return values; + } catch (SQLException | TskDataException ex) { throw new TskCoreException("Unable to get values for attribute", ex); //NON-NLS } - } catch (TskDataException ex) { - throw new TskCoreException("Unable to get values for attribute", ex); //NON-NLS + } finally { - dbReadUnlock(); + dbWriteUnlock(); } } - - return values; } /** @@ -1422,8 +1449,10 @@ public final class DrawableDB { * @param value Value of the group (unique to the type) * @param groupBy Type of the grouping (CATEGORY, MAKE, etc.) * @param caseDbTransaction transaction to use for CaseDB insert/updates + * + * @throws TskCoreException */ - private void insertGroup(final String value, DrawableAttribute groupBy, CaseDbTransaction caseDbTransaction) { + private void insertGroup(final String value, DrawableAttribute groupBy, CaseDbTransaction caseDbTransaction) throws TskCoreException { insertGroup(0, value, groupBy, caseDbTransaction); } @@ -1435,27 +1464,23 @@ public final class DrawableDB { * @param groupBy Type of the grouping (CATEGORY, MAKE, etc.) * @param caseDbTransaction transaction to use for CaseDB insert/updates */ - private void insertGroup(long ds_obj_id, final String value, DrawableAttribute groupBy, CaseDbTransaction caseDbTransaction) { - // don't waste DB round trip if we recently added it + private void insertGroup(long ds_obj_id, final String value, DrawableAttribute groupBy, CaseDbTransaction caseDbTransaction) throws TskCoreException { + /* + * Check the groups cache to see if the group has already been added to + * the case database. + */ String cacheKey = Long.toString(ds_obj_id) + "_" + value + "_" + groupBy.getDisplayName(); - if (groupCache.getIfPresent(cacheKey) != null) + if (groupCache.getIfPresent(cacheKey) != null) { return; - - try { - String insertSQL = String.format(" (data_source_obj_id, value, attribute) VALUES (%d, \'%s\', \'%s\')", - ds_obj_id, SleuthkitCase.escapeSingleQuotes(value), SleuthkitCase.escapeSingleQuotes(groupBy.attrName.toString())); - - if (DbType.POSTGRESQL == tskCase.getDatabaseType()) { - insertSQL += " ON CONFLICT DO NOTHING"; - } - tskCase.getCaseDbAccessManager().insert(GROUPS_TABLENAME, insertSQL, caseDbTransaction); - groupCache.put(cacheKey, Boolean.TRUE); - } catch (TskCoreException ex) { - // Don't need to report it if the case was closed - if (Case.isCaseOpen()) { - logger.log(Level.SEVERE, "Unable to insert group", ex); //NON-NLS - } } + + String insertSQL = String.format(" (data_source_obj_id, value, attribute) VALUES (%d, \'%s\', \'%s\')", + ds_obj_id, SleuthkitCase.escapeSingleQuotes(value), SleuthkitCase.escapeSingleQuotes(groupBy.attrName.toString())); + if (DbType.POSTGRESQL == tskCase.getDatabaseType()) { + insertSQL += " ON CONFLICT DO NOTHING"; + } + tskCase.getCaseDbAccessManager().insert(GROUPS_TABLENAME, insertSQL, caseDbTransaction); + groupCache.put(cacheKey, Boolean.TRUE); } /** @@ -1467,13 +1492,11 @@ public final class DrawableDB { * {@link SleuthkitCase} */ public DrawableFile getFileFromID(Long id) throws TskCoreException { + AbstractFile f = tskCase.getAbstractFileById(id); try { - AbstractFile f = tskCase.getAbstractFileById(id); - return DrawableFile.create(f, - areFilesAnalyzed(Collections.singleton(id)), isVideoFile(f)); - } catch (IllegalStateException ex) { - logger.log(Level.SEVERE, "there is no case open; failed to load file with id: {0}", id); //NON-NLS - throw new TskCoreException("there is no case open; failed to load file with id: " + id, ex); + return DrawableFile.create(f, areFilesAnalyzed(Collections.singleton(id)), isVideoFile(f)); + } catch (SQLException ex) { + throw new TskCoreException(String.format("Failed to get file (id=%d)", id), ex); } } @@ -1490,7 +1513,7 @@ public final class DrawableDB { } } Set files = new HashSet<>(); - dbReadLock(); + dbWriteLock(); try { PreparedStatement statement = getGroupStatment(groupKey); setQueryParams(statement, groupKey); @@ -1503,18 +1526,12 @@ public final class DrawableDB { } catch (SQLException ex) { logger.log(Level.WARNING, "failed to get file for group:" + groupKey.getAttribute() + " == " + groupKey.getValue(), ex); //NON-NLS } finally { - dbReadUnlock(); + dbWriteUnlock(); } return files; } - private void closeStatements() throws SQLException { - for (PreparedStatement pStmt : preparedStatements) { - pStmt.close(); - } - } - private PreparedStatement getGroupStatment(GroupKey groupKey) { DrawableAttribute groupBy = groupKey.getAttribute(); if ((groupBy == DrawableAttribute.PATH) && groupKey.getDataSource().isPresent()) { @@ -1541,16 +1558,12 @@ public final class DrawableDB { * delete the row with obj_id = id. * * @param id the obj_id of the row to be deleted - * - * @return true if a row was deleted, 0 if not. */ - public boolean removeFile(long id, DrawableTransaction tr) { - if (tr.isClosed()) { - throw new IllegalArgumentException("can't update database with closed transaction"); + public void removeFile(long id, DrawableTransaction tr) { + if (tr.isCompleted()) { + throw new IllegalArgumentException("Attempt to use a completed transaction"); } - int valsResults = 0; dbWriteLock(); - try { // Update the list of file IDs in memory removeImageFileFromList(id); @@ -1558,7 +1571,7 @@ public final class DrawableDB { //"delete from hash_set_hits where (obj_id = " + id + ")" removeHashHitStmt.setLong(1, id); removeHashHitStmt.executeUpdate(); - + //"delete from drawable_files where (obj_id = " + id + ")" removeFileStmt.setLong(1, id); removeFileStmt.executeUpdate(); @@ -1568,11 +1581,8 @@ public final class DrawableDB { logger.log(Level.WARNING, "failed to delete row for obj_id = " + id, ex); //NON-NLS } finally { dbWriteUnlock(); + } - - //indicates succesfull removal of 1 file - return valsResults == 1; - } public class MultipleTransactionException extends IllegalStateException { @@ -1614,19 +1624,25 @@ public final class DrawableDB { } } - private void initializeImageList() { - synchronized (fileIDsInDB) { - dbReadLock(); - try (Statement stmt = con.createStatement(); - ResultSet analyzedQuery = stmt.executeQuery("select obj_id from drawable_files");) { + private boolean initializeImageList() { + dbWriteLock(); + try { + if (isClosed()) { + logger.log(Level.SEVERE, "The drawables database is closed"); //NON-NLS + return false; + } + try (Statement stmt = con.createStatement()) { + ResultSet analyzedQuery = stmt.executeQuery("select obj_id from drawable_files"); while (analyzedQuery.next()) { addImageFileToList(analyzedQuery.getLong(OBJ_ID)); } + return true; } catch (SQLException ex) { - logger.log(Level.WARNING, "problem loading file IDs: ", ex); //NON-NLS - } finally { - dbReadUnlock(); + logger.log(Level.SEVERE, "Failed to add image file object ids in drawables database to cache", ex); //NON-NLS + return false; } + } finally { + dbWriteUnlock(); } } @@ -1718,7 +1734,7 @@ public final class DrawableDB { //count the file ids that are in the given list and don't have a non-zero category assigned to them. String name = "SELECT COUNT(obj_id) as obj_count FROM tsk_files where obj_id IN " + fileIdsList //NON-NLS - + " AND obj_id NOT IN (SELECT obj_id FROM content_tags WHERE content_tags.tag_name_id IN " + catTagNameIDs + ")"; //NON-NLS + + " AND obj_id NOT IN (SELECT obj_id FROM content_tags WHERE content_tags.tag_name_id IN " + catTagNameIDs + ")"; //NON-NLS try (SleuthkitCase.CaseDbQuery executeQuery = tskCase.executeQuery(name); ResultSet resultSet = executeQuery.getResultSet();) { while (resultSet.next()) { @@ -1729,66 +1745,67 @@ public final class DrawableDB { } return -1; + } /** - * inner class that can reference access database connection + * Encapsulates a drawables database transaction that uses the enclosing + * DrawableDB object's single JDBC connection. The transaction is begun when + * the DrawableTransaction object is created; clients MUST call either + * commit or rollback. + * + * IMPORTANT: This transaction must be thread-confined. It acquires and + * release a lock specific to a single thread. */ public class DrawableTransaction { - private final Set updatedFiles; + private final Set updatedFiles = new HashSet<>(); + private final Set removedFiles = new HashSet<>(); + private boolean completed; - private final Set removedFiles; - - private boolean closed = false; - - /** - * factory creation method - * - * @param con the {@link ava.sql.Connection} - * - * @return a LogicalFileTransaction for the given connection - * - * @throws SQLException - */ - private DrawableTransaction() { - this.updatedFiles = new HashSet<>(); - this.removedFiles = new HashSet<>(); - //get the write lock, released in close() - dbWriteLock(); + private DrawableTransaction() throws TskCoreException, SQLException { + dbWriteLock(); // Normally released when commit or rollback is called. + if (DrawableDB.this.isClosed()) { + dbWriteUnlock(); + throw new TskCoreException("The drawables database is closed"); + } try { con.setAutoCommit(false); - + completed = false; } catch (SQLException ex) { - logger.log(Level.SEVERE, "failed to set auto-commit to to false", ex); //NON-NLS + completed = true; + dbWriteUnlock(); + throw new SQLException("Failed to begin transaction", ex); } - } - synchronized public void rollback() { - if (!closed) { + synchronized public void rollback() throws SQLException { + if (!completed) { try { - con.rollback(); updatedFiles.clear(); - } catch (SQLException ex1) { - logger.log(Level.SEVERE, "Exception while attempting to rollback!!", ex1); //NON-NLS + con.rollback(); } finally { - close(); + complete(); } } } /** * Commit changes that happened during this transaction - * - * @param notifyGM If true, notify GroupManager about the changes. + * + * @param notifyGM If true, notify GroupManager about the changes. */ - synchronized private void commit(Boolean notifyGM) { - if (!closed) { + synchronized public void commit(Boolean notifyGM) throws SQLException { + if (!completed) { try { + con.commit(); - // make sure we close before we update, bc they'll need locks - close(); + + /* + * Need to close the transaction before notifying the Group + * Manager, so that the lock is released. + */ + complete(); if (notifyGM) { if (groupManager != null) { @@ -1797,35 +1814,27 @@ public final class DrawableDB { } } } catch (SQLException ex) { - if (Case.isCaseOpen()) { - logger.log(Level.SEVERE, "Error commiting drawable.db.", ex); //NON-NLS - } else { - logger.log(Level.WARNING, "Error commiting drawable.db - case is closed."); //NON-NLS - } + logger.log(Level.SEVERE, "Failed to commit transaction, will attempt rollback", ex); //NON-NLS rollback(); } } } - synchronized private void close() { - if (!closed) { + synchronized private void complete() { + if (!completed) { try { con.setAutoCommit(true); } catch (SQLException ex) { - if (Case.isCaseOpen()) { - logger.log(Level.SEVERE, "Error setting auto-commit to true.", ex); //NON-NLS - } else { - logger.log(Level.SEVERE, "Error setting auto-commit to true - case is closed"); //NON-NLS - } + logger.log(Level.SEVERE, "Failed to set auto-commit to false", ex); //NON-NLS } finally { - closed = true; + completed = true; dbWriteUnlock(); } } } - synchronized public Boolean isClosed() { - return closed; + synchronized private Boolean isCompleted() { + return completed; } synchronized private void addUpdatedFile(Long f) { diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/HashSetManager.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/HashSetManager.java index ffcf307f21..c94492c39f 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/HashSetManager.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/HashSetManager.java @@ -33,7 +33,9 @@ import org.sleuthkit.datamodel.TskCoreException; */ public class HashSetManager { - /** The db that initial values are loaded from. */ + /** + * The db that initial values are loaded from. + */ private final DrawableDB drawableDB; public HashSetManager(DrawableDB drawableDB) { @@ -54,14 +56,9 @@ public class HashSetManager { */ private Set getHashSetsForFileHelper(long fileID) { try { - if (drawableDB.isClosed()) { - Logger.getLogger(HashSetManager.class.getName()).log(Level.WARNING, "Failed to get Hash Sets for file. The Db connection was already closed."); //NON-NLS - return Collections.emptySet(); - } else { - return drawableDB.getHashSetsForFile(fileID); - } - } catch (TskCoreException | SQLException ex) { - Logger.getLogger(HashSetManager.class.getName()).log(Level.SEVERE, "Failed to get Hash Sets for file."); //NON-NLS + return drawableDB.getHashSetsForFile(fileID); + } catch (TskCoreException ex) { + Logger.getLogger(HashSetManager.class.getName()).log(Level.SEVERE, String.format("Failed to get hash sets for file (id=%d)", fileID), ex); //NON-NLS return Collections.emptySet(); } } diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/grouping/GroupManager.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/grouping/GroupManager.java index bfd0fab496..0b52e0ff0e 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/grouping/GroupManager.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/grouping/GroupManager.java @@ -99,18 +99,24 @@ public class GroupManager { private static final Logger logger = Logger.getLogger(GroupManager.class.getName()); - /** An executor to submit async UI related background tasks to. */ + /** + * An executor to submit async UI related background tasks to. + */ private final ListeningExecutorService exec = MoreExecutors.listeningDecorator(Executors.newSingleThreadExecutor( new BasicThreadFactory.Builder().namingPattern("GroupManager BG Thread-%d").build())); //NON-NLS private final ImageGalleryController controller; - /** list of all analyzed groups */ + /** + * list of all analyzed groups + */ @GuardedBy("this") //NOPMD private final ObservableList analyzedGroups = FXCollections.observableArrayList(); private final ObservableList unmodifiableAnalyzedGroups = FXCollections.unmodifiableObservableList(analyzedGroups); - /** list of unseen groups */ + /** + * list of unseen groups + */ @GuardedBy("this") //NOPMD private final ObservableList unSeenGroups = FXCollections.observableArrayList(); private final ObservableList unmodifiableUnSeenGroups = FXCollections.unmodifiableObservableList(unSeenGroups); @@ -273,7 +279,7 @@ public class GroupManager { updateUnSeenGroups(group); } } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error marking group as seen", ex); //NON-NLS + logger.log(Level.SEVERE, String.format("Error setting seen status for group: %s", group.getGroupKey().getValue().toString()), ex); //NON-NLS } }); } @@ -487,8 +493,8 @@ public class GroupManager { setSortOrder(sortOrder); //only re-query the db if the data source or group by attribute changed or it is forced if (dataSource != getDataSource() - || groupBy != getGroupBy() - || force) { + || groupBy != getGroupBy() + || force) { setDataSource(dataSource); setGroupBy(groupBy); @@ -645,9 +651,9 @@ public class GroupManager { * analyzed because we don't know all the files that will be a part of * that group. just show them no matter what. */ - if (groupKey.getAttribute() != DrawableAttribute.PATH - || getDrawableDB().isGroupAnalyzed(groupKey)) { - try { + try { + if (groupKey.getAttribute() != DrawableAttribute.PATH + || getDrawableDB().isGroupAnalyzed(groupKey)) { Set fileIDs = getFileIDsInGroup(groupKey); if (Objects.nonNull(fileIDs)) { @@ -673,9 +679,9 @@ public class GroupManager { return group; } - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "failed to get files for group: " + groupKey.getAttribute().attrName.toString() + " = " + groupKey.getValue(), ex); //NON-NLS } + } catch (SQLException | TskCoreException ex) { + logger.log(Level.SEVERE, "Failed to get files for group: " + groupKey.getAttribute().attrName.toString() + " = " + groupKey.getValue(), ex); //NON-NLS } return null; @@ -735,7 +741,7 @@ public class GroupManager { */ @SuppressWarnings({"unchecked", "rawtypes"}) @NbBundle.Messages({"# {0} - groupBy attribute Name", - "ReGroupTask.displayTitle=regrouping by {0}: " }) + "ReGroupTask.displayTitle=regrouping by {0}: "}) class ReGroupTask> extends LoggedTask { private final DataSource dataSource; @@ -744,13 +750,13 @@ public class GroupManager { private final SortOrder sortOrder; ReGroupTask(DataSource dataSource, DrawableAttribute groupBy, GroupSortBy sortBy, SortOrder sortOrder) { - super(Bundle.ReGroupTask_displayTitle(groupBy.attrName.toString() ), true); + super(Bundle.ReGroupTask_displayTitle(groupBy.attrName.toString()), true); this.dataSource = dataSource; this.groupBy = groupBy; this.sortBy = sortBy; this.sortOrder = sortOrder; - updateTitle(Bundle.ReGroupTask_displayTitle(groupBy.attrName.toString() )); + updateTitle(Bundle.ReGroupTask_displayTitle(groupBy.attrName.toString())); } @Override @@ -791,8 +797,8 @@ public class GroupManager { = viewedKey.map(GroupKey::getAttribute).orElse(null); if (viewedGroup.isPresent() == false //if no group was being viewed, - || (dataSource != null && notEqual(dataSourceOfCurrentGroup, dataSource)) //or the datasource of the viewed group is wrong, - || groupBy != attributeOfCurrentGroup) { // or the groupBy attribute is wrong... + || (dataSource != null && notEqual(dataSourceOfCurrentGroup, dataSource)) //or the datasource of the viewed group is wrong, + || groupBy != attributeOfCurrentGroup) { // or the groupBy attribute is wrong... //the current group should not be visible so ... if (isNotEmpty(unSeenGroups)) { diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/DataSourceCell.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/DataSourceCell.java index 08543a42d2..7626c4fd77 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/DataSourceCell.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/DataSourceCell.java @@ -21,6 +21,8 @@ package org.sleuthkit.autopsy.imagegallery.gui; import java.util.Map; import java.util.Optional; import javafx.scene.control.ListCell; +import org.sleuthkit.autopsy.imagegallery.datamodel.DrawableDB; +import org.sleuthkit.autopsy.imagegallery.datamodel.DrawableDB.DrawableDbBuildStatusEnum; import org.sleuthkit.datamodel.DataSource; /** @@ -29,9 +31,23 @@ import org.sleuthkit.datamodel.DataSource; public class DataSourceCell extends ListCell> { private final Map dataSourcesTooManyFiles; + private final Map dataSourcesDrawableDBStatus; - public DataSourceCell(Map dataSourcesViewable) { - this.dataSourcesTooManyFiles = dataSourcesViewable; + /** + * + * @param dataSourcesTooManyFiles: a map of too many files indicator for + * each data source. + * Data sources with too many files may substantially slow down + * the system and hence are disabled for selection. + * @param dataSourcesDrawableDBStatus a map of drawable DB status for + * each data sources. + * Data sources in DEFAULT state are not fully analyzed yet and are + * disabled for selection. + */ + public DataSourceCell(Map dataSourcesTooManyFiles, Map dataSourcesDrawableDBStatus) { + this.dataSourcesTooManyFiles = dataSourcesTooManyFiles; + this.dataSourcesDrawableDBStatus = dataSourcesDrawableDBStatus; + } @Override @@ -43,14 +59,28 @@ public class DataSourceCell extends ListCell> { DataSource dataSource = item.orElse(null); String text = (dataSource == null) ? "All" : dataSource.getName() + " (Id: " + dataSource.getId() + ")"; Boolean tooManyFilesInDataSource = dataSourcesTooManyFiles.getOrDefault(dataSource, false); + + DrawableDbBuildStatusEnum dataSourceDBStatus = (dataSource != null) ? + dataSourcesDrawableDBStatus.get(dataSource.getId()) : DrawableDbBuildStatusEnum.UNKNOWN; + + Boolean dataSourceNotAnalyzed = (dataSourceDBStatus == DrawableDbBuildStatusEnum.DEFAULT); if (tooManyFilesInDataSource) { text += " - Too many files"; + } + if (dataSourceNotAnalyzed) { + text += " - Not Analyzed"; + } + + // check if item should be disabled + if (tooManyFilesInDataSource || dataSourceNotAnalyzed) { + setDisable(true); setStyle("-fx-opacity : .5"); - } else { + } + else { setGraphic(null); setStyle("-fx-opacity : 1"); } - setDisable(tooManyFilesInDataSource); + setText(text); } } diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/Toolbar.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/Toolbar.java index bc0f5e5d84..4bcecbbecb 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/Toolbar.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/Toolbar.java @@ -240,8 +240,8 @@ public class Toolbar extends ToolBar { } private void initDataSourceComboBox() { - dataSourceComboBox.setCellFactory(param -> new DataSourceCell(dataSourcesViewable)); - dataSourceComboBox.setButtonCell(new DataSourceCell(dataSourcesViewable)); + dataSourceComboBox.setCellFactory(param -> new DataSourceCell(dataSourcesViewable, controller.getAllDataSourcesDrawableDBStatus())); + dataSourceComboBox.setButtonCell(new DataSourceCell(dataSourcesViewable, controller.getAllDataSourcesDrawableDBStatus())); dataSourceComboBox.setConverter(new StringConverter>() { @Override public String toString(Optional object) { diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java index c8bbe289e4..f7fff3c134 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java @@ -1,41 +1,35 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2018-2018 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. +/* + * Autopsy Forensic Browser + * + * Copyright 2018-2018 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package org.sleuthkit.autopsy.keywordsearch; import com.google.common.io.CharSource; import java.io.IOException; import java.io.Reader; -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.ResultSet; -import java.sql.ResultSetMetaData; -import java.sql.SQLException; -import java.sql.Statement; -import java.util.Collection; import java.util.Iterator; -import java.util.LinkedList; +import java.util.Objects; +import java.util.function.Consumer; import java.util.logging.Level; -import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; +import org.sleuthkit.autopsy.coreutils.SQLiteTableReaderException; import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.coreutils.SQLiteTableReader; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.TskCoreException; /** * Dedicated SqliteTextExtractor to solve the problems associated with Tika's @@ -49,7 +43,6 @@ class SqliteTextExtractor extends ContentTextExtractor { private static final String SQLITE_MIMETYPE = "application/x-sqlite3"; private static final Logger logger = Logger.getLogger(SqliteTextExtractor.class.getName()); - private static final CharSequence EMPTY_CHARACTER_SEQUENCE = ""; @Override boolean isContentTypeSpecific() { @@ -80,7 +73,7 @@ class SqliteTextExtractor extends ContentTextExtractor { } /** - * Returns an input stream that will read from a sqlite database. + * Returns a stream that will read from a sqlite database. * * @param source Content file * @@ -91,267 +84,250 @@ class SqliteTextExtractor extends ContentTextExtractor { */ @Override public Reader getReader(Content source) throws TextExtractorException { - try { - //Firewall for any content that is not an AbstractFile - if (!AbstractFile.class.isInstance(source)) { - return CharSource.wrap(EMPTY_CHARACTER_SEQUENCE).openStream(); + //Firewall for any content that is not an AbstractFile + if (!AbstractFile.class.isInstance(source)) { + try { + return CharSource.wrap("").openStream(); + } catch (IOException ex) { + throw new TextExtractorException("", ex); } - return new SQLiteTableReader((AbstractFile) source); - } catch (NoCurrentCaseException | IOException | TskCoreException - | ClassNotFoundException | SQLException ex) { - throw new TextExtractorException( - String.format("Encountered an issue while trying to initialize " //NON-NLS - + "a sqlite table steamer for abstract file with id: [%s], name: " //NON-NLS - + "[%s].", source.getId(), source.getName()), ex); //NON-NLS } + + return new SQLiteStreamReader((AbstractFile) source); } /** - * Lazily loads tables from the database during reading to conserve memory. + * Produces a continuous stream of characters from a database file. To + * achieve this, all table names are queues up and a SQLiteTableReader is + * used to do the actual queries and table iteration. */ - private class SQLiteTableReader extends Reader { + public class SQLiteStreamReader extends Reader { - private final Iterator tableIterator; - private final Connection connection; - private Reader currentTableReader; - private final AbstractFile source; + private final SQLiteTableReader reader; + private final AbstractFile file; + + private Iterator tableNames; + private String currentTableName; + + private char[] buf; + private ExcessBytes leftOvers; + private int totalColumns; + + private int bufIndex; /** - * Creates a reader that streams each table into memory and wraps a - * reader around it. Designed to save memory for large databases. + * Creates a new reader for the sqlite file. This table reader class + * will iterate through a table row by row and pass the values to + * different functions based on data type. Here we define what to do on + * the column names and we define what to do for all data types. * - * @param file Sqlite database file - * - * @throws NoCurrentCaseException Current case has closed - * @throws IOException Exception copying abstract file over - * to local temp directory - * @throws TskCoreException Exception using file manager to find - * meta files - * @throws ClassNotFoundException Could not find sqlite JDBC class - * @throws SQLException Could not establish jdbc connection + * @param file Sqlite file */ - public SQLiteTableReader(AbstractFile file) throws NoCurrentCaseException, - IOException, TskCoreException, ClassNotFoundException, SQLException { - source = file; - - String localDiskPath = SqliteUtil.writeAbstractFileToLocalDisk(file); - SqliteUtil.findAndCopySQLiteMetaFile(file); - Class.forName("org.sqlite.JDBC"); //NON-NLS - connection = DriverManager.getConnection("jdbc:sqlite:" + localDiskPath); //NON-NLS - tableIterator = getTables().iterator(); + public SQLiteStreamReader(AbstractFile file) { + this.file = file; + reader = new SQLiteTableReader.Builder(file) + .onColumnNames(getColumnNameStrategy()) + .forAll(getForAllTableValuesStrategy()).build(); } /** - * Gets the table names from the SQLite database file. + * On every item in the database we want to do the following series of + * steps: 1) Get it's string representation (ignore blobs with empty + * string). 2) Format it based on its positioning in the row. 3) Write + * it to buffer * - * @return Collection of table names from the database schema - */ - private Collection getTables() throws SQLException { - Collection tableNames = new LinkedList<>(); - try (Statement statement = connection.createStatement(); - ResultSet resultSet = statement.executeQuery( - "SELECT name FROM sqlite_master " - + " WHERE type= 'table' ")) { - while (resultSet.next()) { - tableNames.add(resultSet.getString("name")); //NON-NLS - } - } - return tableNames; - } - - /** - * Reads from a database table and loads the contents into a table - * builder so that its properly formatted during indexing. + * rowIndex is purely for keeping track of where the object is in the + * table, hence the bounds checking with the mod function. * - * @param tableName Database table to be read + * @return Our consumer class defined to do the steps above. */ - private String getTableAsString(String tableName) { - TableBuilder table = new TableBuilder(); - table.addTableName(tableName); - String quotedTableName = "\"" + tableName + "\""; + private Consumer getForAllTableValuesStrategy() { + return new Consumer() { + private int columnIndex = 0; - try (Statement statement = connection.createStatement(); - ResultSet resultSet = statement.executeQuery( - "SELECT * FROM " + quotedTableName)) { //NON-NLS - ResultSetMetaData metaData = resultSet.getMetaData(); - int columnCount = resultSet.getMetaData().getColumnCount(); - Collection row = new LinkedList<>(); + @Override + public void accept(Object value) { + columnIndex++; + //Ignore blobs + String objectStr = (value instanceof byte[]) ? "" : Objects.toString(value, ""); - //Add column names once from metadata - for (int i = 1; i <= columnCount; i++) { - row.add(metaData.getColumnName(i)); - } - - table.addHeader(row); - while (resultSet.next()) { - row = new LinkedList<>(); - for (int i = 1; i <= columnCount; i++) { - Object result = resultSet.getObject(i); - String type = metaData.getColumnTypeName(i); - if (isValuableResult(result, type)) { - row.add(resultSet.getObject(i).toString()); - } + if (columnIndex > 1 && columnIndex < totalColumns) { + objectStr += " "; } - table.addRow(row); + if (columnIndex == 1) { + objectStr = "\t" + objectStr + " "; + } + if (columnIndex == totalColumns) { + objectStr += "\n"; + } + + fillBuffer(objectStr); + columnIndex = columnIndex % totalColumns; } - table.addCell("\n"); - } catch (SQLException ex) { - logger.log(Level.WARNING, String.format( - "Error attempting to read file table: [%s]" //NON-NLS - + " for file: [%s] (id=%d).", tableName, //NON-NLS - source.getName(), source.getId()), ex); - } - - return table.toString(); + }; } /** - * Determines if the result from the result set is worth adding to the - * row. Ignores nulls and blobs for the time being. + * On every column name in the header do the following series of steps: + * 1) Write the tableName before the header. 2) Format the column name + * based on row positioning 3) Reset the count if we are at the end, + * that way if we want to read multiple tables we can do so without + * having to build new consumers. * - * @param result Object result retrieved from resultSet - * @param type Type of objet retrieved from resultSet + * columnIndex is purely for keeping track of where the column name is + * in the table, hence the bounds checking with the mod function. * - * @return boolean where true means valuable, false implies it can be - * skipped. + * @return Our consumer class defined to do the steps above. */ - private boolean isValuableResult(Object result, String type) { - //Ignore nulls and blobs - return result != null && type.compareToIgnoreCase("blob") != 0; + private Consumer getColumnNameStrategy() { + return new Consumer() { + private int columnIndex = 0; + + @Override + public void accept(String columnName) { + if (columnIndex == 0) { + fillBuffer("\n" + currentTableName + "\n\n\t"); + } + columnIndex++; + + fillBuffer(columnName + ((columnIndex == totalColumns) ? "\n" : " ")); + + //Reset the columnCount to 0 for next table read + columnIndex = columnIndex % totalColumns; + } + }; } /** - * Loads a database file into the character buffer. The underlying - * implementation here only loads one table at a time to conserve - * memory. + * This functions writes the string representation of a database value + * into the read buffer. If the buffer becomes full, we save the extra + * characters and hold on to them until the next call to read(). * - * @param cbuf Buffer to copy database content characters into - * @param off offset to begin loading in buffer - * @param len length of the buffer + * @param val Formatted database value string + */ + private void fillBuffer(String val) { + for (int i = 0; i < val.length(); i++) { + if (bufIndex != buf.length) { + buf[bufIndex++] = val.charAt(i); + } else { + leftOvers = new ExcessBytes(val, i); + break; + } + } + } + + /** + * Reads database values into the buffer. This function is responsible for + * getting the next table in the queue, initiating calls to the SQLiteTableReader, + * and filling in any excess bytes that are lingering from the previous call. * - * @return The number of characters read from the reader - * - * @throws IOException If there is an error with the CharSource wrapping + * @throws IOException */ @Override public int read(char[] cbuf, int off, int len) throws IOException { - if (currentTableReader == null) { - String tableResults = getNextTable(); - if (tableResults == null) { + buf = cbuf; + + bufIndex = off; + + //Lazily wait to get table names until first call to read. + if (Objects.isNull(tableNames)) { + try { + tableNames = reader.getTableNames().iterator(); + } catch (SQLiteTableReaderException ex) { + //Can't get table names so can't read the file! return -1; } - currentTableReader = CharSource.wrap(tableResults).openStream(); } - int charactersRead = currentTableReader.read(cbuf, off, len); - while (charactersRead == -1) { - String tableResults = getNextTable(); - if (tableResults == null) { - return -1; + //If there are excess bytes from last read, then copy thoses in. + if (Objects.nonNull(leftOvers) && !leftOvers.isFinished()) { + bufIndex += leftOvers.read(cbuf, off, len); + } + + //Keep grabbing table names from the queue and reading them until + //our buffer is full. + while (bufIndex != len) { + if (Objects.isNull(currentTableName) || reader.isFinished()) { + if (tableNames.hasNext()) { + currentTableName = tableNames.next(); + try { + totalColumns = reader.getColumnCount(currentTableName); + reader.read(currentTableName, () -> bufIndex == len); + } catch (SQLiteTableReaderException ex) { + logger.log(Level.WARNING, String.format( + "Error attempting to read file table: [%s]" //NON-NLS + + " for file: [%s] (id=%d).", currentTableName, //NON-NLS + file.getName(), file.getId()), ex.getMessage()); + } + } else { + if (bufIndex == off) { + return -1; + } + return bufIndex; + } + } else { + try { + reader.read(currentTableName, () -> bufIndex == len); + } catch (SQLiteTableReaderException ex) { + logger.log(Level.WARNING, String.format( + "Error attempting to read file table: [%s]" //NON-NLS + + " for file: [%s] (id=%d).", currentTableName, //NON-NLS + file.getName(), file.getId()), ex.getMessage()); + } } - currentTableReader = CharSource.wrap(tableResults).openStream(); - charactersRead = currentTableReader.read(cbuf, off, len); } - return charactersRead; + return bufIndex; } - /** - * Grab the next table name from the collection of all table names, once - * we no longer have a table to process, return null which will be - * understood to mean the end of parsing. - * - * @return Current table contents or null meaning there are not more - * tables to process - */ - private String getNextTable() { - if (tableIterator.hasNext()) { - return getTableAsString(tableIterator.next()); - } else { - return null; - } - } - - /** - * Close the underlying connection to the database. - * - * @throws IOException Not applicable, we can just catch the - * SQLException - */ @Override public void close() throws IOException { try { - connection.close(); - } catch (SQLException ex) { - //Non-essential exception, user has no need for the connection - //object at this stage so closing details are not important - logger.log(Level.WARNING, "Could not close JDBC connection", ex); + reader.close(); + } catch (SQLiteTableReaderException ex) { + logger.log(Level.WARNING, "Could not close SQliteTableReader.", ex.getMessage()); } } - } - - /** - * Formats input so that it reads as a table in the console or in a text - * viewer - */ - private class TableBuilder { - - private final Integer DEFAULT_CAPACITY = 32000; - private final StringBuilder table = new StringBuilder(DEFAULT_CAPACITY); - - private static final String TAB = "\t"; - private static final String NEW_LINE = "\n"; - private static final String SPACE = " "; - /** - * Add the section to the top left corner of the table. This is where - * the name of the table should go - * - * @param tableName Table name + * Wrapper that holds the excess bytes that were left over from the previous + * call to read(). */ - public void addTableName(String tableName) { - table.append(tableName) - .append(NEW_LINE) - .append(NEW_LINE); - } + private class ExcessBytes { - /** - * Adds a formatted header row to the underlying StringBuilder - * - * @param vals - */ - public void addHeader(Collection vals) { - addRow(vals); - } + private final String entity; + private Integer pointer; - /** - * Adds a formatted row to the underlying StringBuilder - * - * @param vals - */ - public void addRow(Collection vals) { - table.append(TAB); - vals.forEach((val) -> { - table.append(val); - table.append(SPACE); - }); - table.append(NEW_LINE); - } + public ExcessBytes(String entity, Integer pointer) { + this.entity = entity; + this.pointer = pointer; + } - public void addCell(String cell) { - table.append(cell); - } + public boolean isFinished() { + return entity.length() == pointer; + } - /** - * Returns a string version of the table, with all of the escape - * sequences necessary to print nicely in the console output. - * - * @return Formated table contents - */ - @Override - public String toString() { - return table.toString(); + /** + * Copies the excess bytes this instance is holding onto into the + * buffer. + * + * @param buf buffer to write into + * @param off index in buffer to start the write + * @param len length of the write + * + * @return number of characters read into the buffer + */ + public int read(char[] buf, int off, int len) { + for (int i = off; i < len; i++) { + if (isFinished()) { + return i - off; + } + + buf[i] = entity.charAt(pointer++); + } + + return len - off; + } } } } diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteUtil.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteUtil.java deleted file mode 100755 index 08eefe7232..0000000000 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteUtil.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2018-2018 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.keywordsearch; - -import java.io.File; -import java.io.IOException; -import java.util.List; -import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; -import org.sleuthkit.autopsy.casemodule.services.FileManager; -import org.sleuthkit.autopsy.casemodule.services.Services; -import org.sleuthkit.autopsy.datamodel.ContentUtils; -import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.SleuthkitCase; -import org.sleuthkit.datamodel.TskCoreException; - -/** - * Sqlite utility class. Find and copy metafiles, write sqlite abstract files to - * temp directory, and generate unique temp directory paths. - */ -final class SqliteUtil { - - private SqliteUtil() { - - } - - /** - * Overloaded implementation of - * {@link #findAndCopySQLiteMetaFile(AbstractFile, String) findAndCopySQLiteMetaFile} - * , automatically tries to copy -wal and -shm files without needing to know - * their existence. - * - * @param sqliteFile file which has -wal and -shm meta files - * - * @throws NoCurrentCaseException Case has been closed. - * @throws TskCoreException fileManager cannot find AbstractFile - * files. - * @throws IOException Issue during writing to file. - */ - public static void findAndCopySQLiteMetaFile(AbstractFile sqliteFile) - throws NoCurrentCaseException, TskCoreException, IOException { - - findAndCopySQLiteMetaFile(sqliteFile, sqliteFile.getName() + "-wal"); - findAndCopySQLiteMetaFile(sqliteFile, sqliteFile.getName() + "-shm"); - } - - /** - * Searches for a meta file associated with the give SQLite database. If - * found, it copies this file into the temp directory of the current case. - * - * @param sqliteFile file being processed - * @param metaFileName name of meta file to look for - * - * @throws NoCurrentCaseException Case has been closed. - * @throws TskCoreException fileManager cannot find AbstractFile - * files. - * @throws IOException Issue during writing to file. - */ - public static void findAndCopySQLiteMetaFile(AbstractFile sqliteFile, - String metaFileName) throws NoCurrentCaseException, TskCoreException, IOException { - - Case openCase = Case.getCurrentCaseThrows(); - SleuthkitCase sleuthkitCase = openCase.getSleuthkitCase(); - Services services = new Services(sleuthkitCase); - FileManager fileManager = services.getFileManager(); - - List metaFiles = fileManager.findFiles( - sqliteFile.getDataSource(), metaFileName, - sqliteFile.getParent().getName()); - - if (metaFiles != null) { - for (AbstractFile metaFile : metaFiles) { - writeAbstractFileToLocalDisk(metaFile); - } - } - } - - /** - * Copies the file contents into a unique path in the current case temp - * directory. - * - * @param file AbstractFile from the data source - * - * @return The path of the file on disk - * - * @throws IOException Exception writing file contents - * @throws NoCurrentCaseException Current case closed during file copying - */ - public static String writeAbstractFileToLocalDisk(AbstractFile file) - throws IOException, NoCurrentCaseException { - - String localDiskPath = getUniqueTempDirectoryPath(file); - File localDatabaseFile = new File(localDiskPath); - if (!localDatabaseFile.exists()) { - ContentUtils.writeToFile(file, localDatabaseFile); - } - return localDiskPath; - } - - /** - * Generates a unique local disk path that resides in the temp directory of - * the current case. - * - * @param file The database abstract file - * - * @return Unique local disk path living in the temp directory of the case - * - * @throws org.sleuthkit.autopsy.casemodule.NoCurrentCaseException - */ - public static String getUniqueTempDirectoryPath(AbstractFile file) throws NoCurrentCaseException { - return Case.getCurrentCaseThrows().getTempDirectory() - + File.separator + file.getId() + file.getName(); - } -} diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java index 3c27945488..432da5f072 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java @@ -810,6 +810,7 @@ class ExtractRegistry extends Extract { installtime = Long.valueOf(Tempdate) / 1000; } catch (ParseException e) { logger.log(Level.SEVERE, "RegRipper::Conversion on DateTime -> ", e); //NON-NLS + wifiBBartifacts.add(bbart); } break; default: @@ -817,6 +818,9 @@ class ExtractRegistry extends Extract { } } }//for + } + if (!wifiBBartifacts.isEmpty()){ + IngestServices.getInstance().fireModuleDataEvent(new ModuleDataEvent(moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_WIFI_NETWORK, wifiBBartifacts)); try { List bbattributes = Lists.newArrayList( new BlackboardAttribute( diff --git a/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties b/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties index 91571da9bf..202947f033 100644 --- a/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties +++ b/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties @@ -1,5 +1,5 @@ #Updated by build script -#Sat, 13 Oct 2018 21:02:18 -0400 +#Tue, 13 Nov 2018 17:30:09 -0500 LBL_splash_window_title=Starting Autopsy SPLASH_HEIGHT=314 SPLASH_WIDTH=538 @@ -8,4 +8,4 @@ SplashRunningTextBounds=0,289,538,18 SplashRunningTextColor=0x0 SplashRunningTextFontSize=19 -currentVersion=Autopsy 4.9.0 +currentVersion=Autopsy 4.9.1 diff --git a/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties b/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties index 90fb6cf276..11be888847 100644 --- a/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties +++ b/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties @@ -1,4 +1,4 @@ #Updated by build script -#Sat, 13 Oct 2018 21:02:18 -0400 -CTL_MainWindow_Title=Autopsy 4.9.0 -CTL_MainWindow_Title_No_Project=Autopsy 4.9.0 +#Tue, 13 Nov 2018 17:30:09 -0500 +CTL_MainWindow_Title=Autopsy 4.9.1 +CTL_MainWindow_Title_No_Project=Autopsy 4.9.1 diff --git a/nbproject/project.properties b/nbproject/project.properties index 8f3f577177..96658e548c 100644 --- a/nbproject/project.properties +++ b/nbproject/project.properties @@ -6,8 +6,8 @@ app.name=${branding.token} ### if left unset, version will default to today's date app.version=4.9.1 ### build.type must be one of: DEVELOPMENT, RELEASE -build.type=RELEASE -#build.type=DEVELOPMENT +#build.type=RELEASE +build.type=DEVELOPMENT project.org.netbeans.progress=org-netbeans-api-progress project.org.sleuthkit.autopsy.experimental=Experimental diff --git a/test/script/tskdbdiff.py b/test/script/tskdbdiff.py index baefc55e6a..8380424bb5 100644 --- a/test/script/tskdbdiff.py +++ b/test/script/tskdbdiff.py @@ -333,7 +333,7 @@ class TskDbDiff(object): for line in postgreSQL_db: line = line.strip('\r\n ') # Deal with pg_dump result file - if line.startswith('--') or line.lower().startswith('alter') or not line: # It's comment or alter statement or empty line + if line.startswith('--') or line.lower().startswith('alter') or "pg_catalog" in line or not line: # It's comment or alter statement or catalog entry or empty line continue elif not line.endswith(';'): # Statement not finished dump_line += line