From e352817801ade27d55784c3bd388f2b89874a7eb Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Mon, 27 Aug 2018 13:33:17 -0400 Subject: [PATCH 01/33] 4163 add caches to case, type, and datasource for eamdb queries --- .../datamodel/AbstractSqlEamDb.java | 188 +++++++++++------- 1 file changed, 114 insertions(+), 74 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java index 288363fe95..0ce4941c09 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java @@ -18,6 +18,8 @@ */ package org.sleuthkit.autopsy.centralrepository.datamodel; +import com.google.common.cache.Cache; +import com.google.common.cache.CacheBuilder; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.List; @@ -34,6 +36,8 @@ import java.time.LocalDate; import java.util.HashMap; import java.util.Map; import java.util.Set; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; import java.util.logging.Level; import org.sleuthkit.autopsy.casemodule.Case; import static org.sleuthkit.autopsy.centralrepository.datamodel.EamDbUtil.updateSchemaVersion; @@ -57,6 +61,15 @@ abstract class AbstractSqlEamDb implements EamDb { private int bulkArtifactsCount; protected int bulkArtifactsThreshold; private final Map> bulkArtifacts; + private static final int CASE_CACHE_TIMEOUT = 5; + private static final int DATA_SOURCE_CACHE_TIMEOUT = 5; + private static final Cache typeCache = CacheBuilder.newBuilder().build(); + private static final Cache caseCache = CacheBuilder.newBuilder() + .expireAfterWrite(CASE_CACHE_TIMEOUT, TimeUnit.MINUTES). + build(); + private static final Cache dataSourceCache = CacheBuilder.newBuilder() + .expireAfterWrite(DATA_SOURCE_CACHE_TIMEOUT, TimeUnit.MINUTES). + build(); // Maximum length for the value column in the instance tables static final int MAX_VALUE_LENGTH = 256; @@ -88,7 +101,7 @@ abstract class AbstractSqlEamDb implements EamDb { /** * Add a new name/value pair in the db_info table. * - * @param name Key to set + * @param name Key to set * @param value Value to set * * @throws EamDbException @@ -149,10 +162,19 @@ abstract class AbstractSqlEamDb implements EamDb { return value; } + /** + * Reset the contents of the caches associated with EamDb results. + */ + protected final void clearCaches() { + typeCache.invalidateAll(); + caseCache.invalidateAll(); + dataSourceCache.invalidateAll(); + } + /** * Update the value for a name in the name/value db_info table. * - * @param name Name to find + * @param name Name to find * @param value Value to assign to name. * * @throws EamDbException @@ -278,7 +300,11 @@ abstract class AbstractSqlEamDb implements EamDb { @Override public CorrelationCase getCase(Case autopsyCase) throws EamDbException { - return getCaseByUUID(autopsyCase.getName()); + try { + return caseCache.get(autopsyCase.getName(), () -> getCaseByUUID(autopsyCase.getName())); + } catch (ExecutionException ex) { + throw new EamDbException("Error getting autopsy case from Central repo", ex); + } } /** @@ -505,51 +531,57 @@ abstract class AbstractSqlEamDb implements EamDb { /** * Retrieves Data Source details based on data source device ID * - * @param correlationCase the current CorrelationCase used for ensuring - * uniqueness of DataSource + * @param correlationCase the current CorrelationCase used for ensuring + * uniqueness of DataSource * @param dataSourceDeviceId the data source device ID number * * @return The data source */ @Override public CorrelationDataSource getDataSource(CorrelationCase correlationCase, String dataSourceDeviceId) throws EamDbException { + if (correlationCase == null) { throw new EamDbException("Correlation case is null"); } - - Connection conn = connect(); - - CorrelationDataSource eamDataSourceResult = null; - PreparedStatement preparedStatement = null; - ResultSet resultSet = null; - - String sql = "SELECT * FROM data_sources WHERE device_id=? AND case_id=?"; // NON-NLS - try { - preparedStatement = conn.prepareStatement(sql); - preparedStatement.setString(1, dataSourceDeviceId); - preparedStatement.setInt(2, correlationCase.getID()); - resultSet = preparedStatement.executeQuery(); - if (resultSet.next()) { - eamDataSourceResult = getEamDataSourceFromResultSet(resultSet); - } - } catch (SQLException ex) { - throw new EamDbException("Error getting data source.", ex); // NON-NLS - } finally { - EamDbUtil.closeStatement(preparedStatement); - EamDbUtil.closeResultSet(resultSet); - EamDbUtil.closeConnection(conn); - } + return dataSourceCache.get(correlationCase.getCaseUUID() + dataSourceDeviceId, () -> { + Connection conn = connect(); - return eamDataSourceResult; + CorrelationDataSource eamDataSourceResult = null; + PreparedStatement preparedStatement = null; + ResultSet resultSet = null; + + String sql = "SELECT * FROM data_sources WHERE device_id=? AND case_id=?"; // NON-NLS + + try { + preparedStatement = conn.prepareStatement(sql); + preparedStatement.setString(1, dataSourceDeviceId); + preparedStatement.setInt(2, correlationCase.getID()); + resultSet = preparedStatement.executeQuery(); + if (resultSet.next()) { + eamDataSourceResult = getEamDataSourceFromResultSet(resultSet); + } + } catch (SQLException ex) { + throw new EamDbException("Error getting data source.", ex); // NON-NLS + } finally { + EamDbUtil.closeStatement(preparedStatement); + EamDbUtil.closeResultSet(resultSet); + EamDbUtil.closeConnection(conn); + } + + return eamDataSourceResult; + }); + } catch (ExecutionException ex) { + throw new EamDbException("Error getting data source from central repository", ex); + } } /** * Retrieves Data Source details based on data source ID * * @param correlationCase the current CorrelationCase used for ensuring - * uniqueness of DataSource - * @param dataSourceId the data source ID number + * uniqueness of DataSource + * @param dataSourceId the data source ID number * * @return The data source */ @@ -764,7 +796,7 @@ abstract class AbstractSqlEamDb implements EamDb { * Retrieves eamArtifact instances from the database that are associated * with the aType and filePath * - * @param aType EamArtifact.Type to search for + * @param aType EamArtifact.Type to search for * @param filePath File path to search for * * @return List of 0 or more EamArtifactInstances @@ -831,7 +863,7 @@ abstract class AbstractSqlEamDb implements EamDb { * @param value The correlation value * * @return Number of artifact instances having ArtifactType and - * ArtifactValue. + * ArtifactValue. */ @Override public Long getCountArtifactInstancesByTypeValue(CorrelationAttributeInstance.Type aType, String value) throws EamDbException { @@ -960,11 +992,11 @@ abstract class AbstractSqlEamDb implements EamDb { * associated with the caseDisplayName and dataSource of the given * eamArtifact instance. * - * @param caseUUID Case ID to search for + * @param caseUUID Case ID to search for * @param dataSourceID Data source ID to search for * * @return Number of artifact instances having caseDisplayName and - * dataSource + * dataSource */ @Override public Long getCountArtifactInstancesByCaseDataSource(String caseUUID, String dataSourceID) throws EamDbException { @@ -1227,7 +1259,7 @@ abstract class AbstractSqlEamDb implements EamDb { * associated CorrelationAttribute object. * * @param eamArtifact The correlation attribute whose database instance will - * be updated. + * be updated. * * @throws EamDbException */ @@ -1277,11 +1309,11 @@ abstract class AbstractSqlEamDb implements EamDb { * Find a correlation attribute in the Central Repository database given the * instance type, case, data source, value, and file path. * - * @param type The type of instance. - * @param correlationCase The case tied to the instance. + * @param type The type of instance. + * @param correlationCase The case tied to the instance. * @param correlationDataSource The data source tied to the instance. - * @param value The value tied to the instance. - * @param filePath The file path tied to the instance. + * @param value The value tied to the instance. + * @param filePath The file path tied to the instance. * * @return The correlation attribute if it exists; otherwise null. * @@ -1356,7 +1388,7 @@ abstract class AbstractSqlEamDb implements EamDb { * * @param eamArtifact Artifact containing exactly one (1) ArtifactInstance. * @param knownStatus The status to change the artifact to. Should never be - * KNOWN + * KNOWN */ @Override public void setAttributeInstanceKnownStatus(CorrelationAttributeInstance eamArtifact, TskData.FileKnown knownStatus) throws EamDbException { @@ -1618,7 +1650,7 @@ abstract class AbstractSqlEamDb implements EamDb { * @param value Value to search for * * @return List of cases containing this artifact with instances marked as - * bad + * bad * * @throws EamDbException */ @@ -1858,7 +1890,7 @@ abstract class AbstractSqlEamDb implements EamDb { /** * Process the Artifact instance in the EamDb * - * @param type EamArtifact.Type to search for + * @param type EamArtifact.Type to search for * @param instanceTableCallback callback to process the instance * * @throws EamDbException @@ -1897,9 +1929,10 @@ abstract class AbstractSqlEamDb implements EamDb { /** * Process the Artifact instance in the EamDb give a where clause * - * @param type EamArtifact.Type to search for + * @param type EamArtifact.Type to search for * @param instanceTableCallback callback to process the instance - * @param whereClause query string to execute + * @param whereClause query string to execute + * * @throws EamDbException */ @Override @@ -2081,7 +2114,7 @@ abstract class AbstractSqlEamDb implements EamDb { * Update an existing organization. * * @param updatedOrganization the values the Organization with the same ID - * will be updated to in the database. + * will be updated to in the database. * * @throws EamDbException */ @@ -2284,7 +2317,8 @@ abstract class AbstractSqlEamDb implements EamDb { * Add a new reference instance * * @param eamGlobalFileInstance The reference instance to add - * @param correlationType Correlation Type that this Reference Instance is + * @param correlationType Correlation Type that this Reference + * Instance is * * @throws EamDbException */ @@ -2412,7 +2446,7 @@ abstract class AbstractSqlEamDb implements EamDb { /** * Get all reference entries having a given correlation type and value * - * @param aType Type to use for matching + * @param aType Type to use for matching * @param aValue Value to use for matching * * @return List of all global file instances with a type and value @@ -2613,7 +2647,7 @@ abstract class AbstractSqlEamDb implements EamDb { * artifacts. * * @return List of enabled EamArtifact.Type's. If none are defined in the - * database, the default list will be returned. + * database, the default list will be returned. * * @throws EamDbException */ @@ -2648,7 +2682,7 @@ abstract class AbstractSqlEamDb implements EamDb { * correlate artifacts. * * @return List of supported EamArtifact.Type's. If none are defined in the - * database, the default list will be returned. + * database, the default list will be returned. * * @throws EamDbException */ @@ -2721,30 +2755,36 @@ abstract class AbstractSqlEamDb implements EamDb { */ @Override public CorrelationAttributeInstance.Type getCorrelationTypeById(int typeId) throws EamDbException { - Connection conn = connect(); - - CorrelationAttributeInstance.Type aType; - PreparedStatement preparedStatement = null; - ResultSet resultSet = null; - String sql = "SELECT * FROM correlation_types WHERE id=?"; - try { - preparedStatement = conn.prepareStatement(sql); - preparedStatement.setInt(1, typeId); - resultSet = preparedStatement.executeQuery(); - if (resultSet.next()) { - aType = getCorrelationTypeFromResultSet(resultSet); - return aType; - } else { - throw new EamDbException("Failed to find entry for correlation type ID = " + typeId); - } + return typeCache.get(CorrelationAttributeInstance.FILES_TYPE_ID, () -> { + Connection conn = connect(); - } catch (SQLException ex) { - throw new EamDbException("Error getting correlation type by id.", ex); // NON-NLS - } finally { - EamDbUtil.closeStatement(preparedStatement); - EamDbUtil.closeResultSet(resultSet); - EamDbUtil.closeConnection(conn); + CorrelationAttributeInstance.Type aType; + PreparedStatement preparedStatement = null; + ResultSet resultSet = null; + String sql = "SELECT * FROM correlation_types WHERE id=?"; + + try { + preparedStatement = conn.prepareStatement(sql); + preparedStatement.setInt(1, typeId); + resultSet = preparedStatement.executeQuery(); + if (resultSet.next()) { + aType = getCorrelationTypeFromResultSet(resultSet); + return aType; + } else { + throw new EamDbException("Failed to find entry for correlation type ID = " + typeId); + } + + } catch (SQLException ex) { + throw new EamDbException("Error getting correlation type by id.", ex); // NON-NLS + } finally { + EamDbUtil.closeStatement(preparedStatement); + EamDbUtil.closeResultSet(resultSet); + EamDbUtil.closeConnection(conn); + } + }); + } catch (ExecutionException ex) { + throw new EamDbException("Error getting correlation type", ex); } } @@ -2752,7 +2792,7 @@ abstract class AbstractSqlEamDb implements EamDb { * Convert a ResultSet to a EamCase object * * @param resultSet A resultSet with a set of values to create a EamCase - * object. + * object. * * @return fully populated EamCase object, or null * @@ -2822,7 +2862,7 @@ abstract class AbstractSqlEamDb implements EamDb { * Convert a ResultSet to a EamArtifactInstance object * * @param resultSet A resultSet with a set of values to create a - * EamArtifactInstance object. + * EamArtifactInstance object. * * @return fully populated EamArtifactInstance, or null * From 2e195cf16efb6db69d056759bf1b50061f1aeac1 Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Mon, 27 Aug 2018 13:34:23 -0400 Subject: [PATCH 02/33] 4163 reset caches when CR connections are shutdown --- .../autopsy/centralrepository/datamodel/PostgresEamDb.java | 1 + .../autopsy/centralrepository/datamodel/SqliteEamDb.java | 1 + 2 files changed, 2 insertions(+) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDb.java index 5db232b51e..9e701fad3a 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDb.java @@ -78,6 +78,7 @@ final class PostgresEamDb extends AbstractSqlEamDb { connectionPool.close(); connectionPool = null; // force it to be re-created on next connect() } + clearCaches(); } } catch (SQLException ex) { throw new EamDbException("Failed to close existing database connections.", ex); // NON-NLS diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java index d300964b5f..34157dea68 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java @@ -85,6 +85,7 @@ final class SqliteEamDb extends AbstractSqlEamDb { connectionPool.close(); connectionPool = null; // force it to be re-created on next connect() } + clearCaches(); } } catch (SQLException ex) { throw new EamDbException("Failed to close existing database connections.", ex); // NON-NLS From 0b3a9fab0eaf3520ff30f9effc6c086757b1a2a0 Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Mon, 27 Aug 2018 13:47:33 -0400 Subject: [PATCH 03/33] 4163 move queries to helper functions when caching for readability --- .../datamodel/AbstractSqlEamDb.java | 135 +++++++++++------- 1 file changed, 81 insertions(+), 54 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java index 0ce4941c09..825c61f837 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java @@ -536,6 +536,8 @@ abstract class AbstractSqlEamDb implements EamDb { * @param dataSourceDeviceId the data source device ID number * * @return The data source + * + * @throws EamDbException */ @Override public CorrelationDataSource getDataSource(CorrelationCase correlationCase, String dataSourceDeviceId) throws EamDbException { @@ -544,38 +546,52 @@ abstract class AbstractSqlEamDb implements EamDb { throw new EamDbException("Correlation case is null"); } try { - return dataSourceCache.get(correlationCase.getCaseUUID() + dataSourceDeviceId, () -> { - Connection conn = connect(); - - CorrelationDataSource eamDataSourceResult = null; - PreparedStatement preparedStatement = null; - ResultSet resultSet = null; - - String sql = "SELECT * FROM data_sources WHERE device_id=? AND case_id=?"; // NON-NLS - - try { - preparedStatement = conn.prepareStatement(sql); - preparedStatement.setString(1, dataSourceDeviceId); - preparedStatement.setInt(2, correlationCase.getID()); - resultSet = preparedStatement.executeQuery(); - if (resultSet.next()) { - eamDataSourceResult = getEamDataSourceFromResultSet(resultSet); - } - } catch (SQLException ex) { - throw new EamDbException("Error getting data source.", ex); // NON-NLS - } finally { - EamDbUtil.closeStatement(preparedStatement); - EamDbUtil.closeResultSet(resultSet); - EamDbUtil.closeConnection(conn); - } - - return eamDataSourceResult; - }); + return dataSourceCache.get(correlationCase.getCaseUUID() + dataSourceDeviceId, () -> getDataSourceFromCr(correlationCase, dataSourceDeviceId)); } catch (ExecutionException ex) { throw new EamDbException("Error getting data source from central repository", ex); } } + /** + * Gets the Data Source details based on data source device ID from the + * central repository. + * + * @param correlationCase the current CorrelationCase used for ensuring + * uniqueness of DataSource + * @param dataSourceDeviceId the data source device ID number + * + * @return The data source + * + * @throws EamDbException + */ + private CorrelationDataSource getDataSourceFromCr(CorrelationCase correlationCase, String dataSourceDeviceId) throws EamDbException { + Connection conn = connect(); + + CorrelationDataSource eamDataSourceResult = null; + PreparedStatement preparedStatement = null; + ResultSet resultSet = null; + + String sql = "SELECT * FROM data_sources WHERE device_id=? AND case_id=?"; // NON-NLS + + try { + preparedStatement = conn.prepareStatement(sql); + preparedStatement.setString(1, dataSourceDeviceId); + preparedStatement.setInt(2, correlationCase.getID()); + resultSet = preparedStatement.executeQuery(); + if (resultSet.next()) { + eamDataSourceResult = getEamDataSourceFromResultSet(resultSet); + } + } catch (SQLException ex) { + throw new EamDbException("Error getting data source.", ex); // NON-NLS + } finally { + EamDbUtil.closeStatement(preparedStatement); + EamDbUtil.closeResultSet(resultSet); + EamDbUtil.closeConnection(conn); + } + + return eamDataSourceResult; + } + /** * Retrieves Data Source details based on data source ID * @@ -2756,37 +2772,48 @@ abstract class AbstractSqlEamDb implements EamDb { @Override public CorrelationAttributeInstance.Type getCorrelationTypeById(int typeId) throws EamDbException { try { - return typeCache.get(CorrelationAttributeInstance.FILES_TYPE_ID, () -> { - Connection conn = connect(); - - CorrelationAttributeInstance.Type aType; - PreparedStatement preparedStatement = null; - ResultSet resultSet = null; - String sql = "SELECT * FROM correlation_types WHERE id=?"; - - try { - preparedStatement = conn.prepareStatement(sql); - preparedStatement.setInt(1, typeId); - resultSet = preparedStatement.executeQuery(); - if (resultSet.next()) { - aType = getCorrelationTypeFromResultSet(resultSet); - return aType; - } else { - throw new EamDbException("Failed to find entry for correlation type ID = " + typeId); - } - - } catch (SQLException ex) { - throw new EamDbException("Error getting correlation type by id.", ex); // NON-NLS - } finally { - EamDbUtil.closeStatement(preparedStatement); - EamDbUtil.closeResultSet(resultSet); - EamDbUtil.closeConnection(conn); - } - }); + return typeCache.get(CorrelationAttributeInstance.FILES_TYPE_ID, () ->getCorrelationTypeByIdFromCr(typeId)); } catch (ExecutionException ex) { throw new EamDbException("Error getting correlation type", ex); } } + + /** + * Get the EamArtifact.Type that has the given Type.Id from the central repo + * + * @param typeId Type.Id of Correlation Type to get + * + * @return EamArtifact.Type or null if it doesn't exist. + * + * @throws EamDbException + */ + private CorrelationAttributeInstance.Type getCorrelationTypeByIdFromCr(int typeId) throws EamDbException { + Connection conn = connect(); + + CorrelationAttributeInstance.Type aType; + PreparedStatement preparedStatement = null; + ResultSet resultSet = null; + String sql = "SELECT * FROM correlation_types WHERE id=?"; + + try { + preparedStatement = conn.prepareStatement(sql); + preparedStatement.setInt(1, typeId); + resultSet = preparedStatement.executeQuery(); + if (resultSet.next()) { + aType = getCorrelationTypeFromResultSet(resultSet); + return aType; + } else { + throw new EamDbException("Failed to find entry for correlation type ID = " + typeId); + } + + } catch (SQLException ex) { + throw new EamDbException("Error getting correlation type by id.", ex); // NON-NLS + } finally { + EamDbUtil.closeStatement(preparedStatement); + EamDbUtil.closeResultSet(resultSet); + EamDbUtil.closeConnection(conn); + } + } /** * Convert a ResultSet to a EamCase object From 288391f7d0dc4a211323eafed6e70af9eaa234d9 Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Mon, 27 Aug 2018 17:13:45 -0400 Subject: [PATCH 04/33] 4163 cache based on both criteria we use to get case and datasource --- .../datamodel/AbstractSqlEamDb.java | 135 +++++++++++++++--- 1 file changed, 112 insertions(+), 23 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java index 825c61f837..d088cd76d6 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java @@ -45,6 +45,7 @@ import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.healthmonitor.HealthMonitor; import org.sleuthkit.autopsy.healthmonitor.TimingMetric; import org.sleuthkit.datamodel.CaseDbSchemaVersionNumber; +import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; /** @@ -64,13 +65,18 @@ abstract class AbstractSqlEamDb implements EamDb { private static final int CASE_CACHE_TIMEOUT = 5; private static final int DATA_SOURCE_CACHE_TIMEOUT = 5; private static final Cache typeCache = CacheBuilder.newBuilder().build(); - private static final Cache caseCache = CacheBuilder.newBuilder() + private static final Cache caseCacheByUUID = CacheBuilder.newBuilder() .expireAfterWrite(CASE_CACHE_TIMEOUT, TimeUnit.MINUTES). build(); - private static final Cache dataSourceCache = CacheBuilder.newBuilder() + private static final Cache caseCacheById = CacheBuilder.newBuilder() + .expireAfterWrite(CASE_CACHE_TIMEOUT, TimeUnit.MINUTES). + build(); + private static final Cache dataSourceCacheByDeviceId = CacheBuilder.newBuilder() + .expireAfterWrite(DATA_SOURCE_CACHE_TIMEOUT, TimeUnit.MINUTES). + build(); + private static final Cache dataSourceCacheById = CacheBuilder.newBuilder() .expireAfterWrite(DATA_SOURCE_CACHE_TIMEOUT, TimeUnit.MINUTES). build(); - // Maximum length for the value column in the instance tables static final int MAX_VALUE_LENGTH = 256; @@ -167,8 +173,10 @@ abstract class AbstractSqlEamDb implements EamDb { */ protected final void clearCaches() { typeCache.invalidateAll(); - caseCache.invalidateAll(); - dataSourceCache.invalidateAll(); + caseCacheByUUID.invalidateAll(); + caseCacheById.invalidateAll(); + dataSourceCacheByDeviceId.invalidateAll(); + dataSourceCacheById.invalidateAll(); } /** @@ -225,7 +233,7 @@ abstract class AbstractSqlEamDb implements EamDb { + getConflictClause(); try { - preparedStatement = conn.prepareStatement(sql); + preparedStatement = conn.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS); preparedStatement.setString(1, eamCase.getCaseUUID()); if (null == eamCase.getOrg()) { @@ -262,6 +270,17 @@ abstract class AbstractSqlEamDb implements EamDb { } preparedStatement.executeUpdate(); + //update the case in the caches + ResultSet resultSet = preparedStatement.getGeneratedKeys(); + if (!resultSet.next()) { + throw new EamDbException(String.format("Failed to INSERT case %s in central repo", eamCase.getCaseUUID())); + } + int caseID = resultSet.getInt(1); //last_insert_rowid() + CorrelationCase correlationCase = new CorrelationCase(caseID, eamCase.getCaseUUID(), eamCase.getOrg(), + eamCase.getDisplayName(), eamCase.getCreationDate(), eamCase.getCaseNumber(), eamCase.getExaminerName(), + eamCase.getExaminerEmail(), eamCase.getExaminerPhone(), eamCase.getNotes()); + caseCacheByUUID.put(eamCase.getCaseUUID(), correlationCase); + caseCacheById.put(caseID, correlationCase); } catch (SQLException ex) { throw new EamDbException("Error inserting new case.", ex); // NON-NLS } finally { @@ -300,11 +319,7 @@ abstract class AbstractSqlEamDb implements EamDb { @Override public CorrelationCase getCase(Case autopsyCase) throws EamDbException { - try { - return caseCache.get(autopsyCase.getName(), () -> getCaseByUUID(autopsyCase.getName())); - } catch (ExecutionException ex) { - throw new EamDbException("Error getting autopsy case from Central repo", ex); - } + return getCaseByUUID(autopsyCase.getName()); } /** @@ -365,6 +380,9 @@ abstract class AbstractSqlEamDb implements EamDb { preparedStatement.setString(9, eamCase.getCaseUUID()); preparedStatement.executeUpdate(); + //update the case in the cache + caseCacheById.put(eamCase.getID(), eamCase); + caseCacheByUUID.put(eamCase.getCaseUUID(), eamCase); } catch (SQLException ex) { throw new EamDbException("Error updating case.", ex); // NON-NLS } finally { @@ -373,6 +391,22 @@ abstract class AbstractSqlEamDb implements EamDb { } } + /** + * Retrieves Case details based on Case UUID from the central repo + * + * @param caseUUID unique identifier for a case + * + * @return The retrieved case + */ + @Override + public CorrelationCase getCaseByUUID(String caseUUID) throws EamDbException { + try { + return caseCacheByUUID.get(caseUUID, () -> getCaseByUUIDFromCr(caseUUID)); + } catch (ExecutionException ex) { + throw new EamDbException("Error getting autopsy case from Central repo", ex); + } + } + /** * Retrieves Case details based on Case UUID * @@ -380,10 +414,7 @@ abstract class AbstractSqlEamDb implements EamDb { * * @return The retrieved case */ - @Override - public CorrelationCase getCaseByUUID(String caseUUID) throws EamDbException { - // @@@ We should have a cache here... - + private CorrelationCase getCaseByUUIDFromCr(String caseUUID) throws EamDbException { Connection conn = connect(); CorrelationCase eamCaseResult = null; @@ -403,6 +434,10 @@ abstract class AbstractSqlEamDb implements EamDb { if (resultSet.next()) { eamCaseResult = getEamCaseFromResultSet(resultSet); } + if (eamCaseResult != null) { + //Update the version in the other cache + caseCacheById.put(eamCaseResult.getID(), eamCaseResult); + } } catch (SQLException ex) { throw new EamDbException("Error getting case details.", ex); // NON-NLS } finally { @@ -423,8 +458,21 @@ abstract class AbstractSqlEamDb implements EamDb { */ @Override public CorrelationCase getCaseById(int caseId) throws EamDbException { - // @@@ We should have a cache here... + try { + return caseCacheById.get(caseId, () -> getCaseByIdFromCr(caseId)); + } catch (ExecutionException ex) { + throw new EamDbException("Error getting autopsy case from Central repo", ex); + } + } + /** + * Retrieves Case details based on Case ID + * + * @param caseID unique identifier for a case + * + * @return The retrieved case + */ + private CorrelationCase getCaseByIdFromCr(int caseId) throws EamDbException { Connection conn = connect(); CorrelationCase eamCaseResult = null; @@ -436,7 +484,6 @@ abstract class AbstractSqlEamDb implements EamDb { + "FROM cases " + "LEFT JOIN organizations ON cases.org_id=organizations.id " + "WHERE cases.id=?"; - try { preparedStatement = conn.prepareStatement(sql); preparedStatement.setInt(1, caseId); @@ -444,6 +491,10 @@ abstract class AbstractSqlEamDb implements EamDb { if (resultSet.next()) { eamCaseResult = getEamCaseFromResultSet(resultSet); } + if (eamCaseResult != null) { + //Update the version in the other cache + caseCacheByUUID.put(eamCaseResult.getCaseUUID(), eamCaseResult); + } } catch (SQLException ex) { throw new EamDbException("Error getting case details.", ex); // NON-NLS } finally { @@ -492,6 +543,14 @@ abstract class AbstractSqlEamDb implements EamDb { return cases; } + private static String getDataSourceCacheKey(int caseId, String dataSourceDeviceId) { + return "Case" + caseId + "DeviceId" + dataSourceDeviceId; + } + + private static String getDataSourceCacheKey(int caseId, int dataSourceId) { + return "Case" + caseId + "Id" + dataSourceId; + } + /** * Creates new Data Source in the database * @@ -513,13 +572,21 @@ abstract class AbstractSqlEamDb implements EamDb { + getConflictClause(); try { - preparedStatement = conn.prepareStatement(sql); + preparedStatement = conn.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS); preparedStatement.setString(1, eamDataSource.getDeviceID()); preparedStatement.setInt(2, eamDataSource.getCaseID()); preparedStatement.setString(3, eamDataSource.getName()); preparedStatement.executeUpdate(); + ResultSet resultSet = preparedStatement.getGeneratedKeys(); + if (!resultSet.next()) { + throw new EamDbException(String.format("Failed to INSERT data source %s in central repo", eamDataSource.getName())); + } + int dataSourceId = resultSet.getInt(1); //last_insert_rowid() + CorrelationDataSource dataSource = new CorrelationDataSource(eamDataSource.getCaseID(), dataSourceId, eamDataSource.getDeviceID(), eamDataSource.getName()); + dataSourceCacheByDeviceId.put(getDataSourceCacheKey(dataSource.getCaseID(), dataSource.getDeviceID()), dataSource); + dataSourceCacheById.put(getDataSourceCacheKey(dataSource.getCaseID(), dataSource.getID()), dataSource); } catch (SQLException ex) { throw new EamDbException("Error inserting new data source.", ex); // NON-NLS } finally { @@ -546,7 +613,7 @@ abstract class AbstractSqlEamDb implements EamDb { throw new EamDbException("Correlation case is null"); } try { - return dataSourceCache.get(correlationCase.getCaseUUID() + dataSourceDeviceId, () -> getDataSourceFromCr(correlationCase, dataSourceDeviceId)); + return dataSourceCacheByDeviceId.get(getDataSourceCacheKey(correlationCase.getID(), dataSourceDeviceId), () -> getDataSourceFromCr(correlationCase, dataSourceDeviceId)); } catch (ExecutionException ex) { throw new EamDbException("Error getting data source from central repository", ex); } @@ -581,6 +648,9 @@ abstract class AbstractSqlEamDb implements EamDb { if (resultSet.next()) { eamDataSourceResult = getEamDataSourceFromResultSet(resultSet); } + if (eamDataSourceResult != null) { + dataSourceCacheById.put(getDataSourceCacheKey(correlationCase.getID(), eamDataSourceResult.getID()), eamDataSourceResult); + } } catch (SQLException ex) { throw new EamDbException("Error getting data source.", ex); // NON-NLS } finally { @@ -606,7 +676,23 @@ abstract class AbstractSqlEamDb implements EamDb { if (correlationCase == null) { throw new EamDbException("Correlation case is null"); } + try { + return dataSourceCacheByDeviceId.get(getDataSourceCacheKey(correlationCase.getID(), dataSourceId), () -> getDataSourceByIdFromCr(correlationCase, dataSourceId)); + } catch (ExecutionException ex) { + throw new EamDbException("Error getting data source from central repository", ex); + } + } + /** + * Retrieves Data Source details based on data source ID + * + * @param correlationCase the current CorrelationCase used for ensuring + * uniqueness of DataSource + * @param dataSourceId the data source ID number + * + * @return The data source + */ + private CorrelationDataSource getDataSourceByIdFromCr(CorrelationCase correlationCase, int dataSourceId) throws EamDbException { Connection conn = connect(); CorrelationDataSource eamDataSourceResult = null; @@ -623,6 +709,9 @@ abstract class AbstractSqlEamDb implements EamDb { if (resultSet.next()) { eamDataSourceResult = getEamDataSourceFromResultSet(resultSet); } + if (eamDataSourceResult != null) { + dataSourceCacheByDeviceId.put(getDataSourceCacheKey(correlationCase.getID(), eamDataSourceResult.getDeviceID()), eamDataSourceResult); + } } catch (SQLException ex) { throw new EamDbException("Error getting data source.", ex); // NON-NLS } finally { @@ -2750,7 +2839,7 @@ abstract class AbstractSqlEamDb implements EamDb { preparedStatement.setInt(4, aType.isEnabled() ? 1 : 0); preparedStatement.setInt(5, aType.getId()); preparedStatement.executeUpdate(); - + typeCache.put(aType.getId(), aType); } catch (SQLException ex) { throw new EamDbException("Error updating correlation type.", ex); // NON-NLS } finally { @@ -2772,13 +2861,13 @@ abstract class AbstractSqlEamDb implements EamDb { @Override public CorrelationAttributeInstance.Type getCorrelationTypeById(int typeId) throws EamDbException { try { - return typeCache.get(CorrelationAttributeInstance.FILES_TYPE_ID, () ->getCorrelationTypeByIdFromCr(typeId)); + return typeCache.get(CorrelationAttributeInstance.FILES_TYPE_ID, () -> getCorrelationTypeByIdFromCr(typeId)); } catch (ExecutionException ex) { throw new EamDbException("Error getting correlation type", ex); } } - - /** + + /** * Get the EamArtifact.Type that has the given Type.Id from the central repo * * @param typeId Type.Id of Correlation Type to get From fc4e549dd160cfed31b84ca9293bb0da30774b79 Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Mon, 27 Aug 2018 17:15:54 -0400 Subject: [PATCH 05/33] 4163 remove unused import --- .../autopsy/centralrepository/datamodel/AbstractSqlEamDb.java | 1 - 1 file changed, 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java index d088cd76d6..be1518cb42 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java @@ -45,7 +45,6 @@ import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.healthmonitor.HealthMonitor; import org.sleuthkit.autopsy.healthmonitor.TimingMetric; import org.sleuthkit.datamodel.CaseDbSchemaVersionNumber; -import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; /** From ebcac1eb244ef04676b8c639eef6bbfb2a69714a Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Tue, 28 Aug 2018 12:01:39 -0400 Subject: [PATCH 06/33] 4163 comments and refactoring for clarity with new caches --- .../datamodel/AbstractSqlEamDb.java | 35 +++++++++++++------ 1 file changed, 25 insertions(+), 10 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java index be1518cb42..cfa39a8f32 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java @@ -542,12 +542,27 @@ abstract class AbstractSqlEamDb implements EamDb { return cases; } - private static String getDataSourceCacheKey(int caseId, String dataSourceDeviceId) { - return "Case" + caseId + "DeviceId" + dataSourceDeviceId; + /** + * Create a key to the DataSourceCacheByDeviceId + * + * @param caseId - the id of the CorrelationCase in the Central Repository + * @param dataSourceDeviceId - the device Id of the data source + * + * @return a String to be used as a key for the dataSourceCacheByDeviceId + */ + private static String getDataSourceByDeviceIdCacheKey(int caseId, String dataSourceDeviceId) { + return "Case" + caseId + "DeviceId" + dataSourceDeviceId; //NON-NLS } - private static String getDataSourceCacheKey(int caseId, int dataSourceId) { - return "Case" + caseId + "Id" + dataSourceId; + /** + * Create a key to the DataSourceCacheById + * + * @param caseId - the id of the CorrelationCase in the Central Repository + * @param dataSourceId - the id of the datasource in the central repository + * @return a String to be used as a key for the dataSourceCacheById + */ + private static String getDataSourceByIdCacheKey(int caseId, int dataSourceId) { + return "Case" + caseId + "Id" + dataSourceId; //NON-NLS } /** @@ -584,8 +599,8 @@ abstract class AbstractSqlEamDb implements EamDb { } int dataSourceId = resultSet.getInt(1); //last_insert_rowid() CorrelationDataSource dataSource = new CorrelationDataSource(eamDataSource.getCaseID(), dataSourceId, eamDataSource.getDeviceID(), eamDataSource.getName()); - dataSourceCacheByDeviceId.put(getDataSourceCacheKey(dataSource.getCaseID(), dataSource.getDeviceID()), dataSource); - dataSourceCacheById.put(getDataSourceCacheKey(dataSource.getCaseID(), dataSource.getID()), dataSource); + dataSourceCacheByDeviceId.put(getDataSourceByDeviceIdCacheKey(dataSource.getCaseID(), dataSource.getDeviceID()), dataSource); + dataSourceCacheById.put(getDataSourceByIdCacheKey(dataSource.getCaseID(), dataSource.getID()), dataSource); } catch (SQLException ex) { throw new EamDbException("Error inserting new data source.", ex); // NON-NLS } finally { @@ -612,7 +627,7 @@ abstract class AbstractSqlEamDb implements EamDb { throw new EamDbException("Correlation case is null"); } try { - return dataSourceCacheByDeviceId.get(getDataSourceCacheKey(correlationCase.getID(), dataSourceDeviceId), () -> getDataSourceFromCr(correlationCase, dataSourceDeviceId)); + return dataSourceCacheByDeviceId.get(getDataSourceByDeviceIdCacheKey(correlationCase.getID(), dataSourceDeviceId), () -> getDataSourceFromCr(correlationCase, dataSourceDeviceId)); } catch (ExecutionException ex) { throw new EamDbException("Error getting data source from central repository", ex); } @@ -648,7 +663,7 @@ abstract class AbstractSqlEamDb implements EamDb { eamDataSourceResult = getEamDataSourceFromResultSet(resultSet); } if (eamDataSourceResult != null) { - dataSourceCacheById.put(getDataSourceCacheKey(correlationCase.getID(), eamDataSourceResult.getID()), eamDataSourceResult); + dataSourceCacheById.put(getDataSourceByIdCacheKey(correlationCase.getID(), eamDataSourceResult.getID()), eamDataSourceResult); } } catch (SQLException ex) { throw new EamDbException("Error getting data source.", ex); // NON-NLS @@ -676,7 +691,7 @@ abstract class AbstractSqlEamDb implements EamDb { throw new EamDbException("Correlation case is null"); } try { - return dataSourceCacheByDeviceId.get(getDataSourceCacheKey(correlationCase.getID(), dataSourceId), () -> getDataSourceByIdFromCr(correlationCase, dataSourceId)); + return dataSourceCacheByDeviceId.get(getDataSourceByIdCacheKey(correlationCase.getID(), dataSourceId), () -> getDataSourceByIdFromCr(correlationCase, dataSourceId)); } catch (ExecutionException ex) { throw new EamDbException("Error getting data source from central repository", ex); } @@ -709,7 +724,7 @@ abstract class AbstractSqlEamDb implements EamDb { eamDataSourceResult = getEamDataSourceFromResultSet(resultSet); } if (eamDataSourceResult != null) { - dataSourceCacheByDeviceId.put(getDataSourceCacheKey(correlationCase.getID(), eamDataSourceResult.getDeviceID()), eamDataSourceResult); + dataSourceCacheByDeviceId.put(getDataSourceByDeviceIdCacheKey(correlationCase.getID(), eamDataSourceResult.getDeviceID()), eamDataSourceResult); } } catch (SQLException ex) { throw new EamDbException("Error getting data source.", ex); // NON-NLS From 056f4e953430fb275355e623d8881b8f1078a1bf Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Tue, 28 Aug 2018 12:04:15 -0400 Subject: [PATCH 07/33] 4163 update copyright dates --- .../autopsy/centralrepository/datamodel/PostgresEamDb.java | 2 +- .../autopsy/centralrepository/datamodel/SqliteEamDb.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDb.java index 9e701fad3a..97abd1dec9 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDb.java @@ -1,7 +1,7 @@ /* * Central Repository * - * Copyright 2015-2017 Basis Technology Corp. + * Copyright 2015-2018 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java index 34157dea68..e4d7b7bd9d 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java @@ -1,7 +1,7 @@ /* * Central Repository * - * Copyright 2015-2017 Basis Technology Corp. + * Copyright 2015-2018 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); From 2f57b436768c0bdedd17336c99504f9f5b34bd3f Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Tue, 28 Aug 2018 12:52:42 -0400 Subject: [PATCH 08/33] 4163 fix bug using wrong cache --- .../autopsy/centralrepository/datamodel/AbstractSqlEamDb.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java index cfa39a8f32..68f1b4587a 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java @@ -691,7 +691,7 @@ abstract class AbstractSqlEamDb implements EamDb { throw new EamDbException("Correlation case is null"); } try { - return dataSourceCacheByDeviceId.get(getDataSourceByIdCacheKey(correlationCase.getID(), dataSourceId), () -> getDataSourceByIdFromCr(correlationCase, dataSourceId)); + return dataSourceCacheById.get(getDataSourceByIdCacheKey(correlationCase.getID(), dataSourceId), () -> getDataSourceByIdFromCr(correlationCase, dataSourceId)); } catch (ExecutionException ex) { throw new EamDbException("Error getting data source from central repository", ex); } From ced6607b4c5a5de746b954240ee8f1b46fb205ba Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dsmyda" Date: Fri, 31 Aug 2018 18:04:21 -0400 Subject: [PATCH 09/33] Functional text extractor, looks functionally good. Need to test to make sure its good and also refactor and comment and clean up. this is first commit --- .../autopsy/contentviewers/SQLiteViewer.java | 9 ++---- .../tabulardatareader/AbstractReader.java | 30 +++++++++++++++++-- .../tabulardatareader/ExcelReader.java | 7 +++-- .../tabulardatareader/FileReaderFactory.java | 11 ++++--- .../tabulardatareader/SQLiteReader.java | 8 ++--- .../KeywordSearchIngestModule.java | 2 ++ .../keywordsearch/TikaTextExtractor.java | 3 ++ 7 files changed, 48 insertions(+), 22 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/SQLiteViewer.java b/Core/src/org/sleuthkit/autopsy/contentviewers/SQLiteViewer.java index 47a33dc578..74b2a90c86 100644 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/SQLiteViewer.java +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/SQLiteViewer.java @@ -361,10 +361,8 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer { private void processSQLiteFile() { tablesDropdownList.removeAllItems(); try { - String localDiskPath = Case.getCurrentCaseThrows().getTempDirectory() + - File.separator + sqliteDbFile.getName(); - sqliteReader = FileReaderFactory.createReader(SUPPORTED_MIMETYPES[0], sqliteDbFile, localDiskPath); + sqliteReader = FileReaderFactory.createReader(SUPPORTED_MIMETYPES[0], sqliteDbFile); Map dbTablesMap = sqliteReader.getTableSchemas(); @@ -376,9 +374,6 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer { tablesDropdownList.addItem(tableName); }); } - } catch (NoCurrentCaseException ex) { - logger.log(Level.SEVERE, "Current case has been closed", ex); //NON-NLS - MessageNotifyUtil.Message.error(Bundle.SQLiteViewer_errorMessage_noCurrentCase()); } catch (FileReaderException ex) { logger.log(Level.SEVERE, String.format( "Failed to get tables from DB file '%s' (objId=%d)", //NON-NLS @@ -387,7 +382,7 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer { Bundle.SQLiteViewer_errorMessage_failedToQueryDatabase()); } catch (FileReaderInitException ex) { logger.log(Level.SEVERE, String.format( - "Failed to create a SQLiteReader '%s' (objId=%d)", //NON-NLS + "Failed to create a SQLiteReader for file: '%s' (objId=%d)", //NON-NLS sqliteDbFile.getName(), sqliteDbFile.getId()), ex); } } diff --git a/Core/src/org/sleuthkit/autopsy/tabulardatareader/AbstractReader.java b/Core/src/org/sleuthkit/autopsy/tabulardatareader/AbstractReader.java index cb74819142..b5a3edb02e 100755 --- a/Core/src/org/sleuthkit/autopsy/tabulardatareader/AbstractReader.java +++ b/Core/src/org/sleuthkit/autopsy/tabulardatareader/AbstractReader.java @@ -22,6 +22,9 @@ import java.io.File; import java.io.IOException; import java.util.List; import java.util.Map; +import org.openide.util.Exceptions; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.datamodel.ContentUtils; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.TskCoreException; @@ -32,10 +35,33 @@ import org.sleuthkit.datamodel.TskCoreException; */ public abstract class AbstractReader implements AutoCloseable { - public AbstractReader(AbstractFile file, String localDiskPath) + public AbstractReader(AbstractFile file) throws FileReaderInitException { - writeDataSourceToLocalDisk(file, localDiskPath); + try { + writeDataSourceToLocalDisk(file, getLocalDiskPath(file)); + } catch (FileReaderException ex) { + throw new FileReaderInitException(ex); + } + + } + + /** + * Generates a local disk path for abstract file contents to be copied. + * All file sources must be copied to local disk to be opened by + * abstract reader. + * + * @param file The database abstract file + * @return Valid local path for copying + * @throws NoCurrentCaseException if the current case has been closed. + */ + final String getLocalDiskPath(AbstractFile file) throws FileReaderException { + try { + return Case.getCurrentCaseThrows().getTempDirectory() + + File.separator + file.getName(); + } catch (NoCurrentCaseException ex) { + throw new FileReaderException(ex); + } } /** diff --git a/Core/src/org/sleuthkit/autopsy/tabulardatareader/ExcelReader.java b/Core/src/org/sleuthkit/autopsy/tabulardatareader/ExcelReader.java index cf3ba49388..de76e39040 100755 --- a/Core/src/org/sleuthkit/autopsy/tabulardatareader/ExcelReader.java +++ b/Core/src/org/sleuthkit/autopsy/tabulardatareader/ExcelReader.java @@ -58,13 +58,14 @@ public final class ExcelReader extends AbstractReader { private final static String EMPTY_CELL_STRING = ""; private Map headerCache; - public ExcelReader(AbstractFile file, String localDiskPath, String mimeType) + public ExcelReader(AbstractFile file, String mimeType) throws FileReaderInitException { - super(file, localDiskPath); + super(file); try { + final String localDiskPath = super.getLocalDiskPath(file); this.workbook = createWorkbook(localDiskPath, mimeType); headerCache = new HashMap<>(); - } catch (IOException ex) { + } catch (IOException | FileReaderException ex) { throw new FileReaderInitException(ex); } } diff --git a/Core/src/org/sleuthkit/autopsy/tabulardatareader/FileReaderFactory.java b/Core/src/org/sleuthkit/autopsy/tabulardatareader/FileReaderFactory.java index e6af1673b2..2887f1cd95 100755 --- a/Core/src/org/sleuthkit/autopsy/tabulardatareader/FileReaderFactory.java +++ b/Core/src/org/sleuthkit/autopsy/tabulardatareader/FileReaderFactory.java @@ -34,20 +34,19 @@ public final class FileReaderFactory { * is not supported. * * @param mimeType mimeType passed in from the ingest module -g * @param file current file under inspection - * @param localDiskPath path for abstract file contents to be written + * @param file current file under inspection * @return The correct reader class needed to read the file contents * @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException */ - public static AbstractReader createReader(String mimeType, AbstractFile file, - String localDiskPath) throws FileReaderInitException { + public static AbstractReader createReader(String mimeType, AbstractFile file) + throws FileReaderInitException { switch (mimeType) { case "application/x-sqlite3": - return new SQLiteReader(file, localDiskPath); + return new SQLiteReader(file); case "application/vnd.ms-excel": case "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet": try { - return new ExcelReader(file, localDiskPath, mimeType); + return new ExcelReader(file, mimeType); //Catches runtime exceptions being emitted from Apache //POI (such as EncryptedDocumentException) and wraps them //into FileReaderInitException to be caught and logged diff --git a/Core/src/org/sleuthkit/autopsy/tabulardatareader/SQLiteReader.java b/Core/src/org/sleuthkit/autopsy/tabulardatareader/SQLiteReader.java index c08f571280..42c89c77c1 100755 --- a/Core/src/org/sleuthkit/autopsy/tabulardatareader/SQLiteReader.java +++ b/Core/src/org/sleuthkit/autopsy/tabulardatareader/SQLiteReader.java @@ -58,19 +58,19 @@ public final class SQLiteReader extends AbstractReader { * connection. * * @param sqliteDbFile Data source abstract file - * @param localDiskPath Location for database contents to be copied to * @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException */ - public SQLiteReader(AbstractFile sqliteDbFile, String localDiskPath) throws FileReaderInitException { - super(sqliteDbFile, localDiskPath); + public SQLiteReader(AbstractFile sqliteDbFile) throws FileReaderInitException { + super(sqliteDbFile); try { + final String localDiskPath = super.getLocalDiskPath(sqliteDbFile); // Look for any meta files associated with this DB - WAL, SHM, etc. findAndCopySQLiteMetaFile(sqliteDbFile, sqliteDbFile.getName() + "-wal"); findAndCopySQLiteMetaFile(sqliteDbFile, sqliteDbFile.getName() + "-shm"); connection = getDatabaseConnection(localDiskPath); } catch (ClassNotFoundException | SQLException |IOException | - NoCurrentCaseException | TskCoreException ex) { + NoCurrentCaseException | TskCoreException | FileReaderException ex) { throw new FileReaderInitException(ex); } } diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java index 01d95efe53..7eccd061ac 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java @@ -249,6 +249,7 @@ public final class KeywordSearchIngestModule implements FileIngestModule { textExtractors = new ArrayList<>(); //order matters, more specific extractors first textExtractors.add(new HtmlTextExtractor()); + textExtractors.add(new SqliteTextExtractor()); textExtractors.add(new TikaTextExtractor()); indexer = new Indexer(); @@ -438,6 +439,7 @@ public final class KeywordSearchIngestModule implements FileIngestModule { //go over available text extractors in order, and pick the first one (most specific one) for (ContentTextExtractor fe : textExtractors) { if (fe.isSupported(aFile, detectedFormat)) { + System.out.println(fe); extractor = fe; break; } diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/TikaTextExtractor.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/TikaTextExtractor.java index 0da7cb4b10..ea306ff9a9 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/TikaTextExtractor.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/TikaTextExtractor.java @@ -193,13 +193,16 @@ class TikaTextExtractor extends ContentTextExtractor { @Override public boolean isSupported(Content content, String detectedFormat) { + final String SQLITE_MIMETYPE = "application/x-sqlite3"; if (detectedFormat == null || ContentTextExtractor.BLOB_MIME_TYPES.contains(detectedFormat) //any binary unstructured blobs (string extraction will be used) || ContentTextExtractor.ARCHIVE_MIME_TYPES.contains(detectedFormat) || (detectedFormat.startsWith("video/") && !detectedFormat.equals("video/x-flv")) //skip video other than flv (tika supports flv only) //NON-NLS + || detectedFormat.equals(SQLITE_MIMETYPE) //Skip sqlite files, Tika cannot handle virtual tables and will fail with an exception. See SqliteTextExtractor class ) { return false; } + System.out.println(detectedFormat); return TIKA_SUPPORTED_TYPES.contains(detectedFormat); } From 532bace6c25fde69861bfe7605689c4f91dd80aa Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dsmyda" Date: Fri, 31 Aug 2018 18:04:53 -0400 Subject: [PATCH 10/33] Almost forgot this file too --- .../keywordsearch/SqliteTextExtractor.java | 121 ++++++++++++++++++ 1 file changed, 121 insertions(+) create mode 100755 KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java new file mode 100755 index 0000000000..09be770bd5 --- /dev/null +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java @@ -0,0 +1,121 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package org.sleuthkit.autopsy.keywordsearch; + +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.Reader; +import java.util.List; +import java.util.Map; +import java.util.logging.Level; +import org.sleuthkit.autopsy.tabulardatareader.AbstractReader; +import org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException; +import org.sleuthkit.autopsy.tabulardatareader.SQLiteReader; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.TskCoreException; + +/** + * + * @author dsmyda + */ +public class SqliteTextExtractor extends ContentTextExtractor { + + private final String SQLITE_MIMETYPE = "application/x-sqlite3"; + + @Override + boolean isContentTypeSpecific() { + return true; + } + + @Override + boolean isSupported(Content file, String detectedFormat) { + return SQLITE_MIMETYPE.equals(detectedFormat); + } + + @Override + public Reader getReader(Content source) throws TextExtractorException { + return new InputStreamReader(new SqliteTextReader(source)); + } + + @Override + public boolean isDisabled() { + return false; + } + + @Override + public void logWarning(String msg, Exception ex) { + //TODO - come back. + } + + private final class SqliteTextReader extends InputStream { + + private final Content source; + private final SQLiteReader reader; + private StringBuffer fileData; + private int currIndex; + private final int NO_CONTENT_LEFT = -1; + + public SqliteTextReader(Content source) throws TextExtractorException { + this.source = source; + try { + this.reader = new SQLiteReader((AbstractFile) source.getDataSource()); + } catch (TskCoreException ex) { + throw new TextExtractorException( + String.format("Encountered a TskCoreException when getting " + + "root data source for Content with id:[%s], name:[%s].", + source.getId(), source.getName())); + } catch (FileReaderInitException ex) { + throw new TextExtractorException( + String.format("Encountered a FileReaderInitException when trying " + + "to initialize a SQLiteReader for Content with id:[%s], " + + "name:[%s].", source.getId(), source.getName())); + } + this.fileData = new StringBuffer(); + //Fill the entire buffer on instantiation + copySqliteFileIntoStringBuffer(source); + } + + private void copySqliteFileIntoStringBuffer(Content source){ + Map tables; + try { + //Table name to table schema mapping + tables = reader.getTableSchemas(); + for(String tableName : tables.keySet()) { + try { + List> rowsInTable = reader.getRowsFromTable(tableName); + for(Map row : rowsInTable) { + //Only interested in row values, not the column name + row.values().forEach(cell -> { + fileData.append(cell.toString()); + }); + } + } catch(AbstractReader.FileReaderException ex) { + // logger.log(Level.WARNING, + // String.format("Error attempting to read file table: [%s]" //NON-NLS + // + " for file: [%s] (id=%d).", tableName, //NON-NLS + // source.getName(), source.getId()), + // ex); + } + } + } catch (AbstractReader.FileReaderException ex) { + //logger.log(Level.WARNING, String.format("Error attempting to get tables from " //NON-NLS + // + "file: [%s] (id=%d).", //NON-NLS + // source.getName(), source.getId()), ex); + } + } + + @Override + public int read() throws IOException { + if (currIndex == fileData.length() - 1) { + return NO_CONTENT_LEFT; + } + return fileData.charAt(currIndex++); + } + } + +} From 2ab4c04dcd8d6773e93e575fbc65df1770d01715 Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dsmyda" Date: Wed, 5 Sep 2018 16:05:41 -0400 Subject: [PATCH 11/33] Generalized the tabulardatareader package so that it can work with a Content object and not just AbstractFile object, added a dedicated SqliteTextExtractor to handle sqlite databases during keyword search --- .../tabulardatareader/AbstractReader.java | 7 +- .../tabulardatareader/ExcelReader.java | 3 +- .../tabulardatareader/FileReaderFactory.java | 3 +- .../tabulardatareader/SQLiteReader.java | 11 +- .../KeywordSearchIngestModule.java | 3 +- .../keywordsearch/SqliteTextExtractor.java | 392 +++++++++++++++--- .../keywordsearch/TikaTextExtractor.java | 2 +- 7 files changed, 347 insertions(+), 74 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/tabulardatareader/AbstractReader.java b/Core/src/org/sleuthkit/autopsy/tabulardatareader/AbstractReader.java index b5a3edb02e..917b862749 100755 --- a/Core/src/org/sleuthkit/autopsy/tabulardatareader/AbstractReader.java +++ b/Core/src/org/sleuthkit/autopsy/tabulardatareader/AbstractReader.java @@ -27,6 +27,7 @@ import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.datamodel.ContentUtils; import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.TskCoreException; /** @@ -35,7 +36,7 @@ import org.sleuthkit.datamodel.TskCoreException; */ public abstract class AbstractReader implements AutoCloseable { - public AbstractReader(AbstractFile file) + public AbstractReader(Content file) throws FileReaderInitException { try { @@ -55,7 +56,7 @@ public abstract class AbstractReader implements AutoCloseable { * @return Valid local path for copying * @throws NoCurrentCaseException if the current case has been closed. */ - final String getLocalDiskPath(AbstractFile file) throws FileReaderException { + final String getLocalDiskPath(Content file) throws FileReaderException { try { return Case.getCurrentCaseThrows().getTempDirectory() + File.separator + file.getName(); @@ -74,7 +75,7 @@ public abstract class AbstractReader implements AutoCloseable { * @throws NoCurrentCaseException Current case closed during file copying * @throws TskCoreException Exception finding files from abstract file */ - private void writeDataSourceToLocalDisk(AbstractFile file, String localDiskPath) + private void writeDataSourceToLocalDisk(Content file, String localDiskPath) throws FileReaderInitException { try { diff --git a/Core/src/org/sleuthkit/autopsy/tabulardatareader/ExcelReader.java b/Core/src/org/sleuthkit/autopsy/tabulardatareader/ExcelReader.java index de76e39040..711d4c0c60 100755 --- a/Core/src/org/sleuthkit/autopsy/tabulardatareader/ExcelReader.java +++ b/Core/src/org/sleuthkit/autopsy/tabulardatareader/ExcelReader.java @@ -39,6 +39,7 @@ import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.datamodel.AbstractFile; import com.monitorjbl.xlsx.StreamingReader; import org.apache.poi.hssf.OldExcelFormatException; +import org.sleuthkit.datamodel.Content; /** * Reads excel files and implements the abstract reader api for interfacing with @@ -58,7 +59,7 @@ public final class ExcelReader extends AbstractReader { private final static String EMPTY_CELL_STRING = ""; private Map headerCache; - public ExcelReader(AbstractFile file, String mimeType) + public ExcelReader(Content file, String mimeType) throws FileReaderInitException { super(file); try { diff --git a/Core/src/org/sleuthkit/autopsy/tabulardatareader/FileReaderFactory.java b/Core/src/org/sleuthkit/autopsy/tabulardatareader/FileReaderFactory.java index 2887f1cd95..56707617ff 100755 --- a/Core/src/org/sleuthkit/autopsy/tabulardatareader/FileReaderFactory.java +++ b/Core/src/org/sleuthkit/autopsy/tabulardatareader/FileReaderFactory.java @@ -20,6 +20,7 @@ package org.sleuthkit.autopsy.tabulardatareader; import org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException; import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.Content; /** * Factory for creating the correct reader given the mime type of a file. @@ -38,7 +39,7 @@ public final class FileReaderFactory { * @return The correct reader class needed to read the file contents * @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException */ - public static AbstractReader createReader(String mimeType, AbstractFile file) + public static AbstractReader createReader(String mimeType, Content file) throws FileReaderInitException { switch (mimeType) { case "application/x-sqlite3": diff --git a/Core/src/org/sleuthkit/autopsy/tabulardatareader/SQLiteReader.java b/Core/src/org/sleuthkit/autopsy/tabulardatareader/SQLiteReader.java index 42c89c77c1..1b4743faea 100755 --- a/Core/src/org/sleuthkit/autopsy/tabulardatareader/SQLiteReader.java +++ b/Core/src/org/sleuthkit/autopsy/tabulardatareader/SQLiteReader.java @@ -41,6 +41,7 @@ import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.datamodel.ContentUtils; import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; @@ -60,7 +61,7 @@ public final class SQLiteReader extends AbstractReader { * @param sqliteDbFile Data source abstract file * @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException */ - public SQLiteReader(AbstractFile sqliteDbFile) throws FileReaderInitException { + public SQLiteReader(Content sqliteDbFile) throws FileReaderInitException { super(sqliteDbFile); try { final String localDiskPath = super.getLocalDiskPath(sqliteDbFile); @@ -85,17 +86,17 @@ public final class SQLiteReader extends AbstractReader { * @throws TskCoreException fileManager cannot find AbstractFile files. * @throws IOException Issue during writing to file. */ - private void findAndCopySQLiteMetaFile(AbstractFile sqliteFile, + private void findAndCopySQLiteMetaFile(Content sqliteFile, String metaFileName) throws NoCurrentCaseException, TskCoreException, IOException { Case openCase = Case.getCurrentCaseThrows(); SleuthkitCase sleuthkitCase = openCase.getSleuthkitCase(); Services services = new Services(sleuthkitCase); FileManager fileManager = services.getFileManager(); - + List metaFiles = fileManager.findFiles( - sqliteFile.getDataSource(), metaFileName, - sqliteFile.getParent().getName()); + sqliteFile.getDataSource(), metaFileName, + sqliteFile.getParent().getName()); if (metaFiles != null) { for (AbstractFile metaFile : metaFiles) { diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java index 7eccd061ac..3c6b641eab 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java @@ -249,6 +249,8 @@ public final class KeywordSearchIngestModule implements FileIngestModule { textExtractors = new ArrayList<>(); //order matters, more specific extractors first textExtractors.add(new HtmlTextExtractor()); + //Add sqlite text extractor to be default for sqlite files, since tika stuggles + //with them. See SqliteTextExtractor class for specifics textExtractors.add(new SqliteTextExtractor()); textExtractors.add(new TikaTextExtractor()); @@ -439,7 +441,6 @@ public final class KeywordSearchIngestModule implements FileIngestModule { //go over available text extractors in order, and pick the first one (most specific one) for (ContentTextExtractor fe : textExtractors) { if (fe.isSupported(aFile, detectedFormat)) { - System.out.println(fe); extractor = fe; break; } diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java index 09be770bd5..50cd4143ab 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java @@ -1,7 +1,20 @@ /* - * To change this license header, choose License Headers in Project Properties. - * To change this template file, choose Tools | Templates - * and open the template in the editor. + * Autopsy Forensic Browser + * + * Copyright 2018-2018 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package org.sleuthkit.autopsy.keywordsearch; @@ -9,34 +22,63 @@ import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; +import java.util.ArrayList; +import java.util.Collection; +import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.logging.Level; +import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.tabulardatareader.AbstractReader; import org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException; -import org.sleuthkit.autopsy.tabulardatareader.SQLiteReader; -import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Content; -import org.sleuthkit.datamodel.TskCoreException; +import org.apache.commons.lang3.StringUtils; +import org.sleuthkit.autopsy.tabulardatareader.FileReaderFactory; /** + * Dedicated SqliteTextExtractor to solve the problems associated with Tika's + * Sqlite parser. + * + * Tika problems: + * 1) Tika fails to open virtual tables + * 2) Tika fails to open tables with spaces in table name + * 3) Tika fails to include the table names in output (except for the first table it parses) + * 4) BasisTech > Apache * - * @author dsmyda */ public class SqliteTextExtractor extends ContentTextExtractor { - + private final String SQLITE_MIMETYPE = "application/x-sqlite3"; + private static final Logger logger = Logger.getLogger(SqliteTextExtractor.class.getName()); @Override boolean isContentTypeSpecific() { return true; } + /** + * Supports only the sqlite mimetypes + * + * @param file Content file + * @param detectedFormat Mimetype of content file + * + * @return true if x-sqlite3 + */ @Override boolean isSupported(Content file, String detectedFormat) { return SQLITE_MIMETYPE.equals(detectedFormat); } + /** + * Returns an input stream that will read from a sqlite database. + * + * @param source Content file + * + * @return An InputStream that reads from a Sqlite database. + * + * @throws + * org.sleuthkit.autopsy.keywordsearch.TextExtractor.TextExtractorException + */ @Override public Reader getReader(Content source) throws TextExtractorException { return new InputStreamReader(new SqliteTextReader(source)); @@ -49,73 +91,299 @@ public class SqliteTextExtractor extends ContentTextExtractor { @Override public void logWarning(String msg, Exception ex) { - //TODO - come back. + logger.log(Level.WARNING, msg, ex); //NON-NLS } - + + /** + * InputStream that is returned from the getReader method. This stream opens + * a sqlite file and loads its contents into a buffer that can be read from + * the read function. + */ private final class SqliteTextReader extends InputStream { - - private final Content source; - private final SQLiteReader reader; - private StringBuffer fileData; - private int currIndex; + + private StringBuilder databaseBuffer; + private int currReadIndex; private final int NO_CONTENT_LEFT = -1; - + + /** + * The buffer is filled during initialization, meaning the whole sqlite + * file is read during construction. + * + * @param source Content file that is the sqlite database + * + * @throws + * org.sleuthkit.autopsy.keywordsearch.TextExtractor.TextExtractorException + */ public SqliteTextReader(Content source) throws TextExtractorException { - this.source = source; - try { - this.reader = new SQLiteReader((AbstractFile) source.getDataSource()); - } catch (TskCoreException ex) { - throw new TextExtractorException( - String.format("Encountered a TskCoreException when getting " - + "root data source for Content with id:[%s], name:[%s].", - source.getId(), source.getName())); + try (AbstractReader reader = FileReaderFactory.createReader( + SQLITE_MIMETYPE, source)) { + this.databaseBuffer = new StringBuilder(); + //Fill the entire buffer upon instantiation + copyDatabaseIntoBuffer(source, reader); } catch (FileReaderInitException ex) { throw new TextExtractorException( - String.format("Encountered a FileReaderInitException when trying " - + "to initialize a SQLiteReader for Content with id:[%s], " - + "name:[%s].", source.getId(), source.getName())); - } - this.fileData = new StringBuffer(); - //Fill the entire buffer on instantiation - copySqliteFileIntoStringBuffer(source); - } - - private void copySqliteFileIntoStringBuffer(Content source){ - Map tables; - try { - //Table name to table schema mapping - tables = reader.getTableSchemas(); - for(String tableName : tables.keySet()) { - try { - List> rowsInTable = reader.getRowsFromTable(tableName); - for(Map row : rowsInTable) { - //Only interested in row values, not the column name - row.values().forEach(cell -> { - fileData.append(cell.toString()); - }); - } - } catch(AbstractReader.FileReaderException ex) { - // logger.log(Level.WARNING, - // String.format("Error attempting to read file table: [%s]" //NON-NLS - // + " for file: [%s] (id=%d).", tableName, //NON-NLS - // source.getName(), source.getId()), - // ex); - } - } - } catch (AbstractReader.FileReaderException ex) { - //logger.log(Level.WARNING, String.format("Error attempting to get tables from " //NON-NLS - // + "file: [%s] (id=%d).", //NON-NLS - // source.getName(), source.getId()), ex); + String.format("Encountered a FileReaderInitException" //NON-NLS + + " when trying to initialize a SQLiteReader" //NON-NLS + + " for Content with id:[%s], name:[%s].", //NON-NLS + source.getId(), source.getName())); } } + /** + * Queries the sqlite database and adds all tables and rows to a + * TableBuilder, which formats the strings into a table view for clean + * results while searching for keywords in the application. + * + * @param reader + */ + private void copyDatabaseIntoBuffer(Content source, AbstractReader reader) { + try { + Map tables = reader.getTableSchemas(); + iterateTablesAndPopulateBuffer(tables, reader, source); + } catch (AbstractReader.FileReaderException ex) { + logger.log(Level.WARNING, String.format( + "Error attempting to get tables from file: " //NON-NLS + + "[%s] (id=%d).", source.getName(), //NON-NLS + source.getId()), ex); + } + } + + /** + * Iterates all of the tables and passes the rows to a helper function + * for reading. + * + * @param tables A map of table names to table schemas + * @param reader SqliteReader for interfacing with the database + * @param source Source database file for logging + */ + private void iterateTablesAndPopulateBuffer(Map tables, + AbstractReader reader, Content source) { + + for (String tableName : tables.keySet()) { + TableBuilder tableBuilder = new TableBuilder(); + tableBuilder.addSection(tableName); + try { + List> rowsInTable + = reader.getRowsFromTable(tableName); + addRowsToTableBuilder(tableBuilder, rowsInTable); + } catch (AbstractReader.FileReaderException ex) { + logger.log(Level.WARNING, String.format( + "Error attempting to read file table: [%s]" //NON-NLS + + " for file: [%s] (id=%d).", tableName, //NON-NLS + source.getName(), source.getId()), ex); + } + } + } + + /** + * Iterates all rows in the table and adds the rows to the TableBuilder + * class which formats the input into a table view. + * + * @param tableBuilder + * @param rowsInTable list of rows from the sqlite table + */ + private void addRowsToTableBuilder(TableBuilder tableBuilder, + List> rowsInTable) { + if (!rowsInTable.isEmpty()) { + //Create a collection from the header set, so that the TableBuilder + //can easily format it + tableBuilder.addHeader(new ArrayList<>( + rowsInTable.get(0).keySet())); + for (Map row : rowsInTable) { + tableBuilder.addRow(row.values()); + } + } + //If rowsInTable was empty, just append the table as is + databaseBuffer.append(tableBuilder); + } + + /** + * Returns one byte of the buffer at a time. This buffer was completely + * loaded during construction. Consider a lazy approach or a + * multi-threaded one if too slow. + * + * @return @throws IOException + */ @Override public int read() throws IOException { - if (currIndex == fileData.length() - 1) { + //End of the buffer if true + if (currReadIndex == databaseBuffer.length() - 1) { return NO_CONTENT_LEFT; } - return fileData.charAt(currIndex++); + + return databaseBuffer.charAt(currReadIndex++); + } + } + + /* + * Formats input so that it reads as a table in the console or in a text + * viewer + */ + private class TableBuilder { + + private List rows = new LinkedList<>(); + + //Formatters + private final String HORIZONTAL_DELIMITER = "-"; + private final String VERTICAL_DELIMITER = "|"; + private final String HEADER_CORNER = "+"; + + private final String TAB = "\t"; + private final String NEW_LINE = "\n"; + private final String SPACE = " "; + + private String section = ""; + + /** + * Add the section to the top left corner of the table. This is where + * the name of the table should go. + * + * @param section Table name + */ + public void addSection(String section) { + this.section = section + NEW_LINE + NEW_LINE; + } + + /** + * Creates a horizontal bar given the length param. These are used to + * box the header up and at the bottom of the table. + * + * @return Ex: \t+----------------------+\n + */ + private String buildHorizontalBar(int length) { + if (length == 0) { + return ""; + } + //Output: \t+----------------------+\n + return TAB + HEADER_CORNER + StringUtils.repeat( + HORIZONTAL_DELIMITER, length) + HEADER_CORNER + NEW_LINE; + } + + /** + * Add header row to underlying list collection, which will be formatted + * when toString is called. + * + * @param vals + */ + public void addHeader(Collection vals) { + addRow(vals); + } + + /** + * Add a row to the underlying list collection, which will be formatted + * when toString is called. + * + * @param vals + */ + public void addRow(Collection vals) { + List rowValues = new ArrayList<>(); + vals.forEach((val) -> { + rowValues.add(String.valueOf(val)); + }); + rows.add(rowValues.toArray( + new String[rowValues.size()])); + } + + /** + * Gets the max width of a cell in each column and the max number of + * columns in any given row. This ensures that there is enough space for + * even the longest entry and enough columns. + * + * @return + */ + private int[] getMaxWidthPerColumn() { + int maxNumberOfColumns = 0; + for (String[] row : rows) { + maxNumberOfColumns = Math.max( + maxNumberOfColumns, row.length); + } + + int[] widths = new int[maxNumberOfColumns]; + for (String[] row : rows) { + for (int colNum = 0; colNum < row.length; colNum++) { + widths[colNum] = Math.max( + widths[colNum], + StringUtils.length(row[colNum]) + ); + } + } + + return widths; + } + + /** + * Returns a string version of the table, when printed to console it + * will be fully formatted. + * + * @return + */ + @Override + public String toString() { + StringBuilder outputTable = new StringBuilder(); + + int barLength = 0; + int[] colMaxWidths = getMaxWidthPerColumn(); + boolean header = true; + for (String[] row : rows) { + addFormattedRowToBuffer(row, colMaxWidths, outputTable); + if (header) { + //Get the length of the horizontal bar from the length of the + //formatted header, minus the one tab added at the beginning + //of the row (we want to count the vertical delimiters since + //we want it all to line up. + barLength = outputTable.length() - 2; + } + addFormattedHeaderToBuffer(outputTable, barLength, header); + header = false; + } + outputTable.append(buildHorizontalBar(barLength)); + outputTable.append(NEW_LINE); + + return outputTable.toString(); + } + + /** + * Outputs a fully formatted row in the table + * + * Example: \t| John | 12345678 | john@email.com |\n + * + * @param row + * @param colMaxWidths + * @param buf + */ + private void addFormattedRowToBuffer(String[] row, + int[] colMaxWidths, StringBuilder outputTable) { + outputTable.append(TAB); + for (int colNum = 0; colNum < row.length; colNum++) { + outputTable.append(VERTICAL_DELIMITER); + outputTable.append(SPACE); + outputTable.append(StringUtils.rightPad( + StringUtils.defaultString(row[colNum]), + colMaxWidths[colNum])); + outputTable.append(SPACE); + } + outputTable.append(VERTICAL_DELIMITER); + outputTable.append(NEW_LINE); + } + + /** + * Outputs a fully formatted header. + * + * Example: \t+----------------------+\n + * \t| Email | Phone | Name |\n + * \t+----------------------+\n + * + * @param buf + * @param barLength + * @param header + */ + private void addFormattedHeaderToBuffer(StringBuilder outputTable, + int barLength, boolean header) { + if (header) { + outputTable.insert(0, buildHorizontalBar(barLength)); + outputTable.insert(0, section); + outputTable.append(buildHorizontalBar(barLength)); + } } } - } diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/TikaTextExtractor.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/TikaTextExtractor.java index ea306ff9a9..512f28a599 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/TikaTextExtractor.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/TikaTextExtractor.java @@ -198,7 +198,7 @@ class TikaTextExtractor extends ContentTextExtractor { || ContentTextExtractor.BLOB_MIME_TYPES.contains(detectedFormat) //any binary unstructured blobs (string extraction will be used) || ContentTextExtractor.ARCHIVE_MIME_TYPES.contains(detectedFormat) || (detectedFormat.startsWith("video/") && !detectedFormat.equals("video/x-flv")) //skip video other than flv (tika supports flv only) //NON-NLS - || detectedFormat.equals(SQLITE_MIMETYPE) //Skip sqlite files, Tika cannot handle virtual tables and will fail with an exception. See SqliteTextExtractor class + || detectedFormat.equals(SQLITE_MIMETYPE) //Skip sqlite files, Tika cannot handle virtual tables and will fail with an exception. //NON-NLS ) { return false; } From 333910f16b270c89b88800882c6f66b74be983ca Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dsmyda" Date: Wed, 5 Sep 2018 16:06:47 -0400 Subject: [PATCH 12/33] Fixed the off by 2 error --- .../sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java index 50cd4143ab..f88be01bee 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java @@ -331,7 +331,7 @@ public class SqliteTextExtractor extends ContentTextExtractor { //formatted header, minus the one tab added at the beginning //of the row (we want to count the vertical delimiters since //we want it all to line up. - barLength = outputTable.length() - 2; + barLength = outputTable.length() - 4; } addFormattedHeaderToBuffer(outputTable, barLength, header); header = false; From 341ae826fe38b4f6ed188df1aaf4f446a2fdd68d Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dsmyda" Date: Fri, 7 Sep 2018 08:40:35 -0400 Subject: [PATCH 13/33] Fixed the collision during writing in temp storage. Need to still fix arabic issues and investigate streaming of database files to speed up keyword search. --- .../org/sleuthkit/autopsy/tabulardatareader/AbstractReader.java | 2 +- .../org/sleuthkit/autopsy/tabulardatareader/SQLiteReader.java | 2 +- .../sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java | 1 + 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/tabulardatareader/AbstractReader.java b/Core/src/org/sleuthkit/autopsy/tabulardatareader/AbstractReader.java index 917b862749..248a6f0e25 100755 --- a/Core/src/org/sleuthkit/autopsy/tabulardatareader/AbstractReader.java +++ b/Core/src/org/sleuthkit/autopsy/tabulardatareader/AbstractReader.java @@ -59,7 +59,7 @@ public abstract class AbstractReader implements AutoCloseable { final String getLocalDiskPath(Content file) throws FileReaderException { try { return Case.getCurrentCaseThrows().getTempDirectory() + - File.separator + file.getName(); + File.separator + file.getId() + "-" + file.getName(); } catch (NoCurrentCaseException ex) { throw new FileReaderException(ex); } diff --git a/Core/src/org/sleuthkit/autopsy/tabulardatareader/SQLiteReader.java b/Core/src/org/sleuthkit/autopsy/tabulardatareader/SQLiteReader.java index 1b4743faea..7ce8260c6e 100755 --- a/Core/src/org/sleuthkit/autopsy/tabulardatareader/SQLiteReader.java +++ b/Core/src/org/sleuthkit/autopsy/tabulardatareader/SQLiteReader.java @@ -101,7 +101,7 @@ public final class SQLiteReader extends AbstractReader { if (metaFiles != null) { for (AbstractFile metaFile : metaFiles) { String tmpMetafilePathName = openCase.getTempDirectory() + - File.separator + metaFile.getName(); + File.separator + metaFile.getId() + "-" + metaFile.getName(); File tmpMetafile = new File(tmpMetafilePathName); ContentUtils.writeToFile(metaFile, tmpMetafile); } diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java index f88be01bee..7305aac511 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java @@ -22,6 +22,7 @@ import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedList; From a942b87adbe450eb2d78fdb625e021965e50f263 Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dsmyda" Date: Fri, 7 Sep 2018 13:27:14 -0400 Subject: [PATCH 14/33] Fixed arabic not rendering, I was using the wrong input stream.... CharSource was the way to go --- .../tabulardatareader/AbstractReader.java | 2 +- .../tabulardatareader/SQLiteReader.java | 2 +- .../keywordsearch/SqliteTextExtractor.java | 190 ++++++++---------- .../keywordsearch/TikaTextExtractor.java | 1 - 4 files changed, 84 insertions(+), 111 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/tabulardatareader/AbstractReader.java b/Core/src/org/sleuthkit/autopsy/tabulardatareader/AbstractReader.java index 248a6f0e25..f39f4c85ba 100755 --- a/Core/src/org/sleuthkit/autopsy/tabulardatareader/AbstractReader.java +++ b/Core/src/org/sleuthkit/autopsy/tabulardatareader/AbstractReader.java @@ -59,7 +59,7 @@ public abstract class AbstractReader implements AutoCloseable { final String getLocalDiskPath(Content file) throws FileReaderException { try { return Case.getCurrentCaseThrows().getTempDirectory() + - File.separator + file.getId() + "-" + file.getName(); + File.separator + file.getId() + file.getName(); } catch (NoCurrentCaseException ex) { throw new FileReaderException(ex); } diff --git a/Core/src/org/sleuthkit/autopsy/tabulardatareader/SQLiteReader.java b/Core/src/org/sleuthkit/autopsy/tabulardatareader/SQLiteReader.java index 7ce8260c6e..10e49d6f0b 100755 --- a/Core/src/org/sleuthkit/autopsy/tabulardatareader/SQLiteReader.java +++ b/Core/src/org/sleuthkit/autopsy/tabulardatareader/SQLiteReader.java @@ -101,7 +101,7 @@ public final class SQLiteReader extends AbstractReader { if (metaFiles != null) { for (AbstractFile metaFile : metaFiles) { String tmpMetafilePathName = openCase.getTempDirectory() + - File.separator + metaFile.getId() + "-" + metaFile.getName(); + File.separator + metaFile.getId() + metaFile.getName(); File tmpMetafile = new File(tmpMetafilePathName); ContentUtils.writeToFile(metaFile, tmpMetafile); } diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java index 7305aac511..8d4f16b50f 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java @@ -18,11 +18,9 @@ */ package org.sleuthkit.autopsy.keywordsearch; +import com.google.common.io.CharSource; import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; import java.io.Reader; -import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedList; @@ -82,30 +80,8 @@ public class SqliteTextExtractor extends ContentTextExtractor { */ @Override public Reader getReader(Content source) throws TextExtractorException { - return new InputStreamReader(new SqliteTextReader(source)); - } - - @Override - public boolean isDisabled() { - return false; - } - - @Override - public void logWarning(String msg, Exception ex) { - logger.log(Level.WARNING, msg, ex); //NON-NLS - } - - /** - * InputStream that is returned from the getReader method. This stream opens - * a sqlite file and loads its contents into a buffer that can be read from - * the read function. - */ - private final class SqliteTextReader extends InputStream { - - private StringBuilder databaseBuffer; - private int currReadIndex; - private final int NO_CONTENT_LEFT = -1; - + StringBuilder databaseBuffer = new StringBuilder(); + /** * The buffer is filled during initialization, meaning the whole sqlite * file is read during construction. @@ -115,12 +91,11 @@ public class SqliteTextExtractor extends ContentTextExtractor { * @throws * org.sleuthkit.autopsy.keywordsearch.TextExtractor.TextExtractorException */ - public SqliteTextReader(Content source) throws TextExtractorException { - try (AbstractReader reader = FileReaderFactory.createReader( + try (AbstractReader reader = FileReaderFactory.createReader( SQLITE_MIMETYPE, source)) { - this.databaseBuffer = new StringBuilder(); + databaseBuffer = new StringBuilder(); //Fill the entire buffer upon instantiation - copyDatabaseIntoBuffer(source, reader); + copyDatabaseIntoBuffer(source, reader, databaseBuffer); } catch (FileReaderInitException ex) { throw new TextExtractorException( String.format("Encountered a FileReaderInitException" //NON-NLS @@ -128,92 +103,91 @@ public class SqliteTextExtractor extends ContentTextExtractor { + " for Content with id:[%s], name:[%s].", //NON-NLS source.getId(), source.getName())); } + + try { + return CharSource.wrap(databaseBuffer.toString()).openStream(); + } catch (IOException ex) { + throw new TextExtractorException(String.format("Unable to open CharSource stream on the databaseBuffer" + + "for content source name: [%s] with id: [%d]", source.getName(), source.getId())); } + } + + /** + * Queries the sqlite database and adds all tables and rows to a + * TableBuilder, which formats the strings into a table view for clean + * results while searching for keywords in the application. + * + * @param reader + */ + private void copyDatabaseIntoBuffer(Content source, AbstractReader reader, StringBuilder databaseBuffer) { + try { + Map tables = reader.getTableSchemas(); + iterateTablesAndPopulateBuffer(tables, reader, source, databaseBuffer); + } catch (AbstractReader.FileReaderException ex) { + logger.log(Level.WARNING, String.format( + "Error attempting to get tables from file: " //NON-NLS + + "[%s] (id=%d).", source.getName(), //NON-NLS + source.getId()), ex); + } + } - /** - * Queries the sqlite database and adds all tables and rows to a - * TableBuilder, which formats the strings into a table view for clean - * results while searching for keywords in the application. - * - * @param reader - */ - private void copyDatabaseIntoBuffer(Content source, AbstractReader reader) { + /** + * Iterates all of the tables and passes the rows to a helper function + * for reading. + * + * @param tables A map of table names to table schemas + * @param reader SqliteReader for interfacing with the database + * @param source Source database file for logging + */ + private void iterateTablesAndPopulateBuffer(Map tables, + AbstractReader reader, Content source, StringBuilder databaseBuffer) { + + for (String tableName : tables.keySet()) { + TableBuilder tableBuilder = new TableBuilder(); + tableBuilder.addSection(tableName); try { - Map tables = reader.getTableSchemas(); - iterateTablesAndPopulateBuffer(tables, reader, source); + List> rowsInTable + = reader.getRowsFromTable(tableName); + addRowsToTableBuilder(tableBuilder, rowsInTable, databaseBuffer); } catch (AbstractReader.FileReaderException ex) { logger.log(Level.WARNING, String.format( - "Error attempting to get tables from file: " //NON-NLS - + "[%s] (id=%d).", source.getName(), //NON-NLS - source.getId()), ex); + "Error attempting to read file table: [%s]" //NON-NLS + + " for file: [%s] (id=%d).", tableName, //NON-NLS + source.getName(), source.getId()), ex); } } - - /** - * Iterates all of the tables and passes the rows to a helper function - * for reading. - * - * @param tables A map of table names to table schemas - * @param reader SqliteReader for interfacing with the database - * @param source Source database file for logging - */ - private void iterateTablesAndPopulateBuffer(Map tables, - AbstractReader reader, Content source) { - - for (String tableName : tables.keySet()) { - TableBuilder tableBuilder = new TableBuilder(); - tableBuilder.addSection(tableName); - try { - List> rowsInTable - = reader.getRowsFromTable(tableName); - addRowsToTableBuilder(tableBuilder, rowsInTable); - } catch (AbstractReader.FileReaderException ex) { - logger.log(Level.WARNING, String.format( - "Error attempting to read file table: [%s]" //NON-NLS - + " for file: [%s] (id=%d).", tableName, //NON-NLS - source.getName(), source.getId()), ex); - } + } + + /** + * Iterates all rows in the table and adds the rows to the TableBuilder + * class which formats the input into a table view. + * + * @param tableBuilder + * @param rowsInTable list of rows from the sqlite table + */ + private void addRowsToTableBuilder(TableBuilder tableBuilder, + List> rowsInTable, StringBuilder databaseBuffer) { + if (!rowsInTable.isEmpty()) { + //Create a collection from the header set, so that the TableBuilder + //can easily format it + tableBuilder.addHeader(new ArrayList<>( + rowsInTable.get(0).keySet())); + for (Map row : rowsInTable) { + tableBuilder.addRow(row.values()); } } + //If rowsInTable was empty, just append the table as is + databaseBuffer.append(tableBuilder); + } + + @Override + public boolean isDisabled() { + return false; + } - /** - * Iterates all rows in the table and adds the rows to the TableBuilder - * class which formats the input into a table view. - * - * @param tableBuilder - * @param rowsInTable list of rows from the sqlite table - */ - private void addRowsToTableBuilder(TableBuilder tableBuilder, - List> rowsInTable) { - if (!rowsInTable.isEmpty()) { - //Create a collection from the header set, so that the TableBuilder - //can easily format it - tableBuilder.addHeader(new ArrayList<>( - rowsInTable.get(0).keySet())); - for (Map row : rowsInTable) { - tableBuilder.addRow(row.values()); - } - } - //If rowsInTable was empty, just append the table as is - databaseBuffer.append(tableBuilder); - } - - /** - * Returns one byte of the buffer at a time. This buffer was completely - * loaded during construction. Consider a lazy approach or a - * multi-threaded one if too slow. - * - * @return @throws IOException - */ - @Override - public int read() throws IOException { - //End of the buffer if true - if (currReadIndex == databaseBuffer.length() - 1) { - return NO_CONTENT_LEFT; - } - - return databaseBuffer.charAt(currReadIndex++); - } + @Override + public void logWarning(String msg, Exception ex) { + logger.log(Level.WARNING, msg, ex); //NON-NLS } /* @@ -279,7 +253,7 @@ public class SqliteTextExtractor extends ContentTextExtractor { public void addRow(Collection vals) { List rowValues = new ArrayList<>(); vals.forEach((val) -> { - rowValues.add(String.valueOf(val)); + rowValues.add(val.toString()); }); rows.add(rowValues.toArray( new String[rowValues.size()])); diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/TikaTextExtractor.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/TikaTextExtractor.java index 512f28a599..679bd5b9cc 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/TikaTextExtractor.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/TikaTextExtractor.java @@ -202,7 +202,6 @@ class TikaTextExtractor extends ContentTextExtractor { ) { return false; } - System.out.println(detectedFormat); return TIKA_SUPPORTED_TYPES.contains(detectedFormat); } From 8e0cd3b70056bb07648cfc4e9e4f16f5b14c3a87 Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dsmyda" Date: Fri, 7 Sep 2018 14:45:01 -0400 Subject: [PATCH 15/33] Fixed the unicode errors, cleaned up some of the reader code. Unicode will still cause formatting issues, but the keyword search module works --- .../tabulardatareader/AbstractReader.java | 2 - .../tabulardatareader/ExcelReader.java | 1 - .../tabulardatareader/FileReaderFactory.java | 1 - .../tabulardatareader/SQLiteReader.java | 4 +- .../keywordsearch/SqliteTextExtractor.java | 159 ++++++++---------- 5 files changed, 75 insertions(+), 92 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/tabulardatareader/AbstractReader.java b/Core/src/org/sleuthkit/autopsy/tabulardatareader/AbstractReader.java index f39f4c85ba..b488bd6aa8 100755 --- a/Core/src/org/sleuthkit/autopsy/tabulardatareader/AbstractReader.java +++ b/Core/src/org/sleuthkit/autopsy/tabulardatareader/AbstractReader.java @@ -22,11 +22,9 @@ import java.io.File; import java.io.IOException; import java.util.List; import java.util.Map; -import org.openide.util.Exceptions; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.datamodel.ContentUtils; -import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.TskCoreException; diff --git a/Core/src/org/sleuthkit/autopsy/tabulardatareader/ExcelReader.java b/Core/src/org/sleuthkit/autopsy/tabulardatareader/ExcelReader.java index 711d4c0c60..b1a5b40cfd 100755 --- a/Core/src/org/sleuthkit/autopsy/tabulardatareader/ExcelReader.java +++ b/Core/src/org/sleuthkit/autopsy/tabulardatareader/ExcelReader.java @@ -36,7 +36,6 @@ import org.apache.poi.ss.usermodel.Sheet; import org.apache.poi.ss.usermodel.Workbook; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.IngestServices; -import org.sleuthkit.datamodel.AbstractFile; import com.monitorjbl.xlsx.StreamingReader; import org.apache.poi.hssf.OldExcelFormatException; import org.sleuthkit.datamodel.Content; diff --git a/Core/src/org/sleuthkit/autopsy/tabulardatareader/FileReaderFactory.java b/Core/src/org/sleuthkit/autopsy/tabulardatareader/FileReaderFactory.java index 56707617ff..ffd152c80d 100755 --- a/Core/src/org/sleuthkit/autopsy/tabulardatareader/FileReaderFactory.java +++ b/Core/src/org/sleuthkit/autopsy/tabulardatareader/FileReaderFactory.java @@ -19,7 +19,6 @@ package org.sleuthkit.autopsy.tabulardatareader; import org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException; -import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Content; /** diff --git a/Core/src/org/sleuthkit/autopsy/tabulardatareader/SQLiteReader.java b/Core/src/org/sleuthkit/autopsy/tabulardatareader/SQLiteReader.java index 10e49d6f0b..408193b2b3 100755 --- a/Core/src/org/sleuthkit/autopsy/tabulardatareader/SQLiteReader.java +++ b/Core/src/org/sleuthkit/autopsy/tabulardatareader/SQLiteReader.java @@ -51,8 +51,8 @@ import org.sleuthkit.datamodel.TskCoreException; public final class SQLiteReader extends AbstractReader { private final Connection connection; - private final static IngestServices services = IngestServices.getInstance(); - private final static Logger logger = services.getLogger(SQLiteReader.class.getName()); + private final static IngestServices ingestServices = IngestServices.getInstance(); + private final static Logger logger = ingestServices.getLogger(SQLiteReader.class.getName()); /** * Writes data source file contents to local disk and opens a sqlite JDBC diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java index 8d4f16b50f..4569438175 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java @@ -55,6 +55,16 @@ public class SqliteTextExtractor extends ContentTextExtractor { return true; } + @Override + public boolean isDisabled() { + return false; + } + + @Override + public void logWarning(String msg, Exception ex) { + logger.log(Level.WARNING, msg, ex); //NON-NLS + } + /** * Supports only the sqlite mimetypes * @@ -81,48 +91,39 @@ public class SqliteTextExtractor extends ContentTextExtractor { @Override public Reader getReader(Content source) throws TextExtractorException { StringBuilder databaseBuffer = new StringBuilder(); - - /** - * The buffer is filled during initialization, meaning the whole sqlite - * file is read during construction. - * - * @param source Content file that is the sqlite database - * - * @throws - * org.sleuthkit.autopsy.keywordsearch.TextExtractor.TextExtractorException - */ + try (AbstractReader reader = FileReaderFactory.createReader( - SQLITE_MIMETYPE, source)) { - databaseBuffer = new StringBuilder(); - //Fill the entire buffer upon instantiation - copyDatabaseIntoBuffer(source, reader, databaseBuffer); - } catch (FileReaderInitException ex) { - throw new TextExtractorException( - String.format("Encountered a FileReaderInitException" //NON-NLS - + " when trying to initialize a SQLiteReader" //NON-NLS - + " for Content with id:[%s], name:[%s].", //NON-NLS - source.getId(), source.getName())); - } - - try { + SQLITE_MIMETYPE, source)) { + databaseBuffer = new StringBuilder(); + //Fill the buffer with table names and table data + copyDatabaseIntoBuffer(source, reader, databaseBuffer); + //Once the buffer is full, wrap it into a CharSource and open the reader + //This is necessary to maintain integrity of unicode string. Returning + //character by character will not work. return CharSource.wrap(databaseBuffer.toString()).openStream(); - } catch (IOException ex) { - throw new TextExtractorException(String.format("Unable to open CharSource stream on the databaseBuffer" - + "for content source name: [%s] with id: [%d]", source.getName(), source.getId())); + } catch (FileReaderInitException | IOException ex) { + throw new TextExtractorException( + String.format("Encountered a FileReaderInitException" //NON-NLS + + " when trying to initialize a SQLiteReader" //NON-NLS + + " for Content with id: [%s], name: [%s].", //NON-NLS + source.getId(), source.getName())); } } - + /** - * Queries the sqlite database and adds all tables and rows to a - * TableBuilder, which formats the strings into a table view for clean - * results while searching for keywords in the application. - * - * @param reader - */ - private void copyDatabaseIntoBuffer(Content source, AbstractReader reader, StringBuilder databaseBuffer) { + * Queries the sqlite database and adds all tables and rows to a + * TableBuilder, which formats the strings into a table view for clean + * results while searching for keywords in the application. + * + * @param reader Sqlite reader for the content source + * @param source Sqlite file source + * @param databaseBuffer Buffer containing all of the database content + */ + private void copyDatabaseIntoBuffer(Content source, AbstractReader reader, + StringBuilder databaseBuffer) { try { Map tables = reader.getTableSchemas(); - iterateTablesAndPopulateBuffer(tables, reader, source, databaseBuffer); + copyDatabaseIntoBuffer(tables, reader, source, databaseBuffer); } catch (AbstractReader.FileReaderException ex) { logger.log(Level.WARNING, String.format( "Error attempting to get tables from file: " //NON-NLS @@ -132,14 +133,16 @@ public class SqliteTextExtractor extends ContentTextExtractor { } /** - * Iterates all of the tables and passes the rows to a helper function - * for reading. - * - * @param tables A map of table names to table schemas - * @param reader SqliteReader for interfacing with the database - * @param source Source database file for logging - */ - private void iterateTablesAndPopulateBuffer(Map tables, + * Iterates all of the tables and populate the TableBuilder with all of the + * rows from the table. This TableBuilder object string will be added to the + * databaseBuffer. + * + * @param tables A map of table names to table schemas + * @param reader SqliteReader for interfacing with the database + * @param source Source database file for logging + * @param databaseBuffer Buffer containing all of the database content + */ + private void copyDatabaseIntoBuffer(Map tables, AbstractReader reader, Content source, StringBuilder databaseBuffer) { for (String tableName : tables.keySet()) { @@ -148,7 +151,17 @@ public class SqliteTextExtractor extends ContentTextExtractor { try { List> rowsInTable = reader.getRowsFromTable(tableName); - addRowsToTableBuilder(tableBuilder, rowsInTable, databaseBuffer); + if (!rowsInTable.isEmpty()) { + //Create a collection from the header set, so that the TableBuilder + //can easily format it + tableBuilder.addHeader(new ArrayList<>( + rowsInTable.get(0).keySet())); + for (Map row : rowsInTable) { + tableBuilder.addRow(row.values()); + } + } + //If rowsInTable was empty, just append the table as is + databaseBuffer.append(tableBuilder); } catch (AbstractReader.FileReaderException ex) { logger.log(Level.WARNING, String.format( "Error attempting to read file table: [%s]" //NON-NLS @@ -157,38 +170,6 @@ public class SqliteTextExtractor extends ContentTextExtractor { } } } - - /** - * Iterates all rows in the table and adds the rows to the TableBuilder - * class which formats the input into a table view. - * - * @param tableBuilder - * @param rowsInTable list of rows from the sqlite table - */ - private void addRowsToTableBuilder(TableBuilder tableBuilder, - List> rowsInTable, StringBuilder databaseBuffer) { - if (!rowsInTable.isEmpty()) { - //Create a collection from the header set, so that the TableBuilder - //can easily format it - tableBuilder.addHeader(new ArrayList<>( - rowsInTable.get(0).keySet())); - for (Map row : rowsInTable) { - tableBuilder.addRow(row.values()); - } - } - //If rowsInTable was empty, just append the table as is - databaseBuffer.append(tableBuilder); - } - - @Override - public boolean isDisabled() { - return false; - } - - @Override - public void logWarning(String msg, Exception ex) { - logger.log(Level.WARNING, msg, ex); //NON-NLS - } /* * Formats input so that it reads as a table in the console or in a text @@ -262,7 +243,9 @@ public class SqliteTextExtractor extends ContentTextExtractor { /** * Gets the max width of a cell in each column and the max number of * columns in any given row. This ensures that there is enough space for - * even the longest entry and enough columns. + * even the longest entry and enough columns. The length of the string + * seems to be different from the length of the print statement in some + * languages. For instance, arabic will cause the table to look off. * * @return */ @@ -278,7 +261,7 @@ public class SqliteTextExtractor extends ContentTextExtractor { for (int colNum = 0; colNum < row.length; colNum++) { widths[colNum] = Math.max( widths[colNum], - StringUtils.length(row[colNum]) + row[colNum].length() ); } } @@ -322,9 +305,10 @@ public class SqliteTextExtractor extends ContentTextExtractor { * * Example: \t| John | 12345678 | john@email.com |\n * - * @param row - * @param colMaxWidths - * @param buf + * @param row Array containing unformatted row content + * @param colMaxWidths An array of column maximum widths, so that + * everything is pretty printed. + * @param outputTable Buffer that formatted contents are written to */ private void addFormattedRowToBuffer(String[] row, int[] colMaxWidths, StringBuilder outputTable) { @@ -348,13 +332,16 @@ public class SqliteTextExtractor extends ContentTextExtractor { * \t| Email | Phone | Name |\n * \t+----------------------+\n * - * @param buf - * @param barLength - * @param header + * @param outputTable Buffer that formatted contents are written to + * @param barLength Length of the bar (i.e. +---------+) that will + * surround the header, based off of the length of + * the formatted header row + * @param needsHeader Boolean denoting if the header has been added to + * the buffer */ private void addFormattedHeaderToBuffer(StringBuilder outputTable, - int barLength, boolean header) { - if (header) { + int barLength, boolean needsHeader) { + if (needsHeader) { outputTable.insert(0, buildHorizontalBar(barLength)); outputTable.insert(0, section); outputTable.append(buildHorizontalBar(barLength)); From b4628e745f99e1e1079789879c336ced26f2cde0 Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dsmyda" Date: Mon, 10 Sep 2018 11:37:41 -0400 Subject: [PATCH 16/33] Major speed improvements, still need to comment and refactor and clean up --- .../keywordsearch/SqliteTextExtractor.java | 95 ++++++++++++------- 1 file changed, 60 insertions(+), 35 deletions(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java index 4569438175..efdb9f72cf 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java @@ -27,6 +27,7 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.logging.Level; +import javax.swing.text.Segment; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.tabulardatareader.AbstractReader; import org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException; @@ -38,17 +39,21 @@ import org.sleuthkit.autopsy.tabulardatareader.FileReaderFactory; * Dedicated SqliteTextExtractor to solve the problems associated with Tika's * Sqlite parser. * - * Tika problems: - * 1) Tika fails to open virtual tables - * 2) Tika fails to open tables with spaces in table name - * 3) Tika fails to include the table names in output (except for the first table it parses) - * 4) BasisTech > Apache + * Tika problems: + * 1) Tika fails to open virtual tables + * 2) Tika fails to open tables with spaces in table name + * 3) Tika fails to include the table names in output (except for the + * first table it parses) * */ public class SqliteTextExtractor extends ContentTextExtractor { private final String SQLITE_MIMETYPE = "application/x-sqlite3"; private static final Logger logger = Logger.getLogger(SqliteTextExtractor.class.getName()); + private final CharSequence EMPTY_CHARACTER_SEQUENCE = ""; + + LinkedList databaseContents; + Integer characterCount = 0; @Override boolean isContentTypeSpecific() { @@ -90,17 +95,11 @@ public class SqliteTextExtractor extends ContentTextExtractor { */ @Override public Reader getReader(Content source) throws TextExtractorException { - StringBuilder databaseBuffer = new StringBuilder(); - try (AbstractReader reader = FileReaderFactory.createReader( SQLITE_MIMETYPE, source)) { - databaseBuffer = new StringBuilder(); - //Fill the buffer with table names and table data - copyDatabaseIntoBuffer(source, reader, databaseBuffer); - //Once the buffer is full, wrap it into a CharSource and open the reader - //This is necessary to maintain integrity of unicode string. Returning - //character by character will not work. - return CharSource.wrap(databaseBuffer.toString()).openStream(); + final CharSequence databaseContents = getDatabaseContents(source, reader); + //CharSource will maintain unicode strings correctly + return CharSource.wrap(databaseContents).openStream(); } catch (FileReaderInitException | IOException ex) { throw new TextExtractorException( String.format("Encountered a FileReaderInitException" //NON-NLS @@ -117,19 +116,43 @@ public class SqliteTextExtractor extends ContentTextExtractor { * * @param reader Sqlite reader for the content source * @param source Sqlite file source - * @param databaseBuffer Buffer containing all of the database content */ - private void copyDatabaseIntoBuffer(Content source, AbstractReader reader, - StringBuilder databaseBuffer) { + private CharSequence getDatabaseContents(Content source, AbstractReader reader) { try { Map tables = reader.getTableSchemas(); - copyDatabaseIntoBuffer(tables, reader, source, databaseBuffer); + databaseContents = new LinkedList<>(); + copyDatabaseIntoBuffer(tables, reader, source, databaseContents); + return databaseContentsToCharSequence(); } catch (AbstractReader.FileReaderException ex) { logger.log(Level.WARNING, String.format( "Error attempting to get tables from file: " //NON-NLS + "[%s] (id=%d).", source.getName(), //NON-NLS source.getId()), ex); } + + //Failed to get tables from file + return EMPTY_CHARACTER_SEQUENCE; + } + + /** + * Copy linkedList elements into a character array to be wrapped into a + * CharSequence. + * + * @return A character seqeunces of the database contents + */ + private CharSequence databaseContentsToCharSequence() { + final char[] databaseCharacters = new char[characterCount]; + + int currSequenceIndex = 0; + for (String table : databaseContents) { + System.arraycopy(table.toCharArray(), 0, databaseCharacters, currSequenceIndex, table.length()); + currSequenceIndex += table.length(); + } + + //Segment class does not make an internal copy of the character array + //being passed in (more efficient). It also implements a CharSequences + //necessary for the CharSource class to create a compatible reader. + return new Segment(databaseCharacters, 0, characterCount); } /** @@ -140,10 +163,10 @@ public class SqliteTextExtractor extends ContentTextExtractor { * @param tables A map of table names to table schemas * @param reader SqliteReader for interfacing with the database * @param source Source database file for logging - * @param databaseBuffer Buffer containing all of the database content + * @param databaseContents List containing all of the database content */ private void copyDatabaseIntoBuffer(Map tables, - AbstractReader reader, Content source, StringBuilder databaseBuffer) { + AbstractReader reader, Content source, LinkedList databaseContents) { for (String tableName : tables.keySet()) { TableBuilder tableBuilder = new TableBuilder(); @@ -152,16 +175,16 @@ public class SqliteTextExtractor extends ContentTextExtractor { List> rowsInTable = reader.getRowsFromTable(tableName); if (!rowsInTable.isEmpty()) { - //Create a collection from the header set, so that the TableBuilder - //can easily format it tableBuilder.addHeader(new ArrayList<>( rowsInTable.get(0).keySet())); for (Map row : rowsInTable) { tableBuilder.addRow(row.values()); } } - //If rowsInTable was empty, just append the table as is - databaseBuffer.append(tableBuilder); + + String formattedTable = tableBuilder.toString(); + characterCount += formattedTable.length(); + databaseContents.add(formattedTable); } catch (AbstractReader.FileReaderException ex) { logger.log(Level.WARNING, String.format( "Error attempting to read file table: [%s]" //NON-NLS @@ -177,7 +200,8 @@ public class SqliteTextExtractor extends ContentTextExtractor { */ private class TableBuilder { - private List rows = new LinkedList<>(); + private final List rows = new LinkedList<>(); + private Integer characterCount = 0; //Formatters private final String HORIZONTAL_DELIMITER = "-"; @@ -188,6 +212,9 @@ public class SqliteTextExtractor extends ContentTextExtractor { private final String NEW_LINE = "\n"; private final String SPACE = " "; + //Number of escape sequences in the header row + private final int ESCAPE_SEQUENCES = 4; + private String section = ""; /** @@ -235,6 +262,7 @@ public class SqliteTextExtractor extends ContentTextExtractor { List rowValues = new ArrayList<>(); vals.forEach((val) -> { rowValues.add(val.toString()); + characterCount += val.toString().length(); }); rows.add(rowValues.toArray( new String[rowValues.size()])); @@ -242,10 +270,8 @@ public class SqliteTextExtractor extends ContentTextExtractor { /** * Gets the max width of a cell in each column and the max number of - * columns in any given row. This ensures that there is enough space for - * even the longest entry and enough columns. The length of the string - * seems to be different from the length of the print statement in some - * languages. For instance, arabic will cause the table to look off. + * columns in any given row. This ensures that there are enough columns + * and enough space for even the longest entry. * * @return */ @@ -277,7 +303,7 @@ public class SqliteTextExtractor extends ContentTextExtractor { */ @Override public String toString() { - StringBuilder outputTable = new StringBuilder(); + StringBuilder outputTable = new StringBuilder(characterCount); int barLength = 0; int[] colMaxWidths = getMaxWidthPerColumn(); @@ -289,7 +315,7 @@ public class SqliteTextExtractor extends ContentTextExtractor { //formatted header, minus the one tab added at the beginning //of the row (we want to count the vertical delimiters since //we want it all to line up. - barLength = outputTable.length() - 4; + barLength = outputTable.length() - ESCAPE_SEQUENCES; } addFormattedHeaderToBuffer(outputTable, barLength, header); header = false; @@ -328,9 +354,8 @@ public class SqliteTextExtractor extends ContentTextExtractor { /** * Outputs a fully formatted header. * - * Example: \t+----------------------+\n - * \t| Email | Phone | Name |\n - * \t+----------------------+\n + * Example: \t+----------------------+\n \t| Email | Phone | Name |\n + * \t+----------------------+\n * * @param outputTable Buffer that formatted contents are written to * @param barLength Length of the bar (i.e. +---------+) that will @@ -348,4 +373,4 @@ public class SqliteTextExtractor extends ContentTextExtractor { } } } -} +} \ No newline at end of file From 169fb9928f9753bff297733dc4c83f8eea2f3c08 Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dsmyda" Date: Mon, 10 Sep 2018 15:45:40 -0400 Subject: [PATCH 17/33] Cleaned up code, refactored, improved efficiency --- .../keywordsearch/ContentTextExtractor.java | 2 +- .../keywordsearch/SqliteTextExtractor.java | 214 +++++++++--------- .../keywordsearch/TikaTextExtractor.java | 4 +- 3 files changed, 112 insertions(+), 108 deletions(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/ContentTextExtractor.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/ContentTextExtractor.java index b855ae317f..bba2df2ced 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/ContentTextExtractor.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/ContentTextExtractor.java @@ -30,7 +30,7 @@ import org.sleuthkit.datamodel.Content; abstract class ContentTextExtractor implements TextExtractor { - static final List BLOB_MIME_TYPES + static final List BINARY_MIME_TYPES = Arrays.asList( //ignore binary blob data, for which string extraction will be used "application/octet-stream", //NON-NLS diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java index efdb9f72cf..c45dc3920b 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java @@ -23,6 +23,7 @@ import java.io.IOException; import java.io.Reader; import java.util.ArrayList; import java.util.Collection; +import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; @@ -39,12 +40,10 @@ import org.sleuthkit.autopsy.tabulardatareader.FileReaderFactory; * Dedicated SqliteTextExtractor to solve the problems associated with Tika's * Sqlite parser. * - * Tika problems: - * 1) Tika fails to open virtual tables - * 2) Tika fails to open tables with spaces in table name - * 3) Tika fails to include the table names in output (except for the - * first table it parses) - * + * Tika problems: + * 1) Tika fails to open virtual tables + * 2) Tika fails to open tables with spaces in table name + * 3) Tika fails to include the table names in output (except for the first table it parses) */ public class SqliteTextExtractor extends ContentTextExtractor { @@ -52,9 +51,6 @@ public class SqliteTextExtractor extends ContentTextExtractor { private static final Logger logger = Logger.getLogger(SqliteTextExtractor.class.getName()); private final CharSequence EMPTY_CHARACTER_SEQUENCE = ""; - LinkedList databaseContents; - Integer characterCount = 0; - @Override boolean isContentTypeSpecific() { return true; @@ -97,9 +93,9 @@ public class SqliteTextExtractor extends ContentTextExtractor { public Reader getReader(Content source) throws TextExtractorException { try (AbstractReader reader = FileReaderFactory.createReader( SQLITE_MIMETYPE, source)) { - final CharSequence databaseContents = getDatabaseContents(source, reader); + final CharSequence databaseContent = getDatabaseContents(source, reader); //CharSource will maintain unicode strings correctly - return CharSource.wrap(databaseContents).openStream(); + return CharSource.wrap(databaseContent).openStream(); } catch (FileReaderInitException | IOException ex) { throw new TextExtractorException( String.format("Encountered a FileReaderInitException" //NON-NLS @@ -114,15 +110,18 @@ public class SqliteTextExtractor extends ContentTextExtractor { * TableBuilder, which formats the strings into a table view for clean * results while searching for keywords in the application. * - * @param reader Sqlite reader for the content source - * @param source Sqlite file source + * @param reader Sqlite reader for the content source + * @param source Sqlite file source */ private CharSequence getDatabaseContents(Content source, AbstractReader reader) { try { Map tables = reader.getTableSchemas(); - databaseContents = new LinkedList<>(); - copyDatabaseIntoBuffer(tables, reader, source, databaseContents); - return databaseContentsToCharSequence(); + LinkedList databaseStorage = new LinkedList<>(); + + Integer charactersCopied = loadDatabaseIntoList(databaseStorage, + tables, reader, source); + + return toCharSequence(databaseStorage, charactersCopied); } catch (AbstractReader.FileReaderException ex) { logger.log(Level.WARNING, String.format( "Error attempting to get tables from file: " //NON-NLS @@ -134,64 +133,67 @@ public class SqliteTextExtractor extends ContentTextExtractor { return EMPTY_CHARACTER_SEQUENCE; } - /** - * Copy linkedList elements into a character array to be wrapped into a - * CharSequence. - * - * @return A character seqeunces of the database contents - */ - private CharSequence databaseContentsToCharSequence() { - final char[] databaseCharacters = new char[characterCount]; - - int currSequenceIndex = 0; - for (String table : databaseContents) { - System.arraycopy(table.toCharArray(), 0, databaseCharacters, currSequenceIndex, table.length()); - currSequenceIndex += table.length(); - } - - //Segment class does not make an internal copy of the character array - //being passed in (more efficient). It also implements a CharSequences - //necessary for the CharSource class to create a compatible reader. - return new Segment(databaseCharacters, 0, characterCount); - } - /** * Iterates all of the tables and populate the TableBuilder with all of the - * rows from the table. This TableBuilder object string will be added to the - * databaseBuffer. + * rows from the table. The table string will be added to the list of + * contents. * - * @param tables A map of table names to table schemas - * @param reader SqliteReader for interfacing with the database - * @param source Source database file for logging - * @param databaseContents List containing all of the database content + * @param databaseStorage List containing all of the database content + * @param tables A map of table names to table schemas + * @param reader SqliteReader for interfacing with the database + * @param source Source database file for logging */ - private void copyDatabaseIntoBuffer(Map tables, - AbstractReader reader, Content source, LinkedList databaseContents) { + private int loadDatabaseIntoList(LinkedList databaseStorage, + Map tables, AbstractReader reader, Content source) { + int charactersCopied = 0; for (String tableName : tables.keySet()) { TableBuilder tableBuilder = new TableBuilder(); - tableBuilder.addSection(tableName); + tableBuilder.setTableName(tableName); + try { - List> rowsInTable - = reader.getRowsFromTable(tableName); + List> rowsInTable = reader.getRowsFromTable(tableName); if (!rowsInTable.isEmpty()) { - tableBuilder.addHeader(new ArrayList<>( - rowsInTable.get(0).keySet())); + tableBuilder.addHeader(new ArrayList<>(rowsInTable.get(0).keySet())); for (Map row : rowsInTable) { tableBuilder.addRow(row.values()); } } - - String formattedTable = tableBuilder.toString(); - characterCount += formattedTable.length(); - databaseContents.add(formattedTable); } catch (AbstractReader.FileReaderException ex) { logger.log(Level.WARNING, String.format( "Error attempting to read file table: [%s]" //NON-NLS + " for file: [%s] (id=%d).", tableName, //NON-NLS source.getName(), source.getId()), ex); } + + String formattedTable = tableBuilder.toString(); + charactersCopied += formattedTable.length(); + databaseStorage.add(formattedTable); } + return charactersCopied; + } + + /** + * Copy linkedList elements into a CharSequence + * + * @return A character seqeunces of the database contents + */ + private CharSequence toCharSequence(LinkedList databaseStorage, + int characterCount) { + + final char[] databaseCharArray = new char[characterCount]; + + int currIndex = 0; + for (String table : databaseStorage) { + System.arraycopy(table.toCharArray(), 0, databaseCharArray, + currIndex, table.length()); + currIndex += table.length(); + } + + //Segment class does not make an internal copy of the character array + //being passed in (more efficient). It also implements a CharSequences + //necessary for the CharSource class to create a compatible reader. + return new Segment(databaseCharArray, 0, characterCount); } /* @@ -201,7 +203,7 @@ public class SqliteTextExtractor extends ContentTextExtractor { private class TableBuilder { private final List rows = new LinkedList<>(); - private Integer characterCount = 0; + private Integer charactersAdded = 0; //Formatters private final String HORIZONTAL_DELIMITER = "-"; @@ -215,29 +217,24 @@ public class SqliteTextExtractor extends ContentTextExtractor { //Number of escape sequences in the header row private final int ESCAPE_SEQUENCES = 4; - private String section = ""; + private String tableName = ""; /** * Add the section to the top left corner of the table. This is where * the name of the table should go. * - * @param section Table name + * @param tableName Table name */ - public void addSection(String section) { - this.section = section + NEW_LINE + NEW_LINE; + public void setTableName(String tableName) { + this.tableName = tableName + NEW_LINE + NEW_LINE; } /** - * Creates a horizontal bar given the length param. These are used to - * box the header up and at the bottom of the table. + * Creates a border given the length param. * * @return Ex: \t+----------------------+\n */ - private String buildHorizontalBar(int length) { - if (length == 0) { - return ""; - } - //Output: \t+----------------------+\n + private String createBorder(int length) { return TAB + HEADER_CORNER + StringUtils.repeat( HORIZONTAL_DELIMITER, length) + HEADER_CORNER + NEW_LINE; } @@ -262,7 +259,7 @@ public class SqliteTextExtractor extends ContentTextExtractor { List rowValues = new ArrayList<>(); vals.forEach((val) -> { rowValues.add(val.toString()); - characterCount += val.toString().length(); + charactersAdded += val.toString().length(); }); rows.add(rowValues.toArray( new String[rowValues.size()])); @@ -270,10 +267,10 @@ public class SqliteTextExtractor extends ContentTextExtractor { /** * Gets the max width of a cell in each column and the max number of - * columns in any given row. This ensures that there are enough columns + * columns in any given row. This ensures that there are enough columns * and enough space for even the longest entry. * - * @return + * @return array of column widths */ private int[] getMaxWidthPerColumn() { int maxNumberOfColumns = 0; @@ -296,32 +293,31 @@ public class SqliteTextExtractor extends ContentTextExtractor { } /** - * Returns a string version of the table, when printed to console it - * will be fully formatted. + * Returns a string version of the table, with all of the formatters and + * escape sequences necessary to print nicely in the console output. * * @return */ @Override public String toString() { - StringBuilder outputTable = new StringBuilder(characterCount); - - int barLength = 0; + StringBuilder outputTable = new StringBuilder(charactersAdded); int[] colMaxWidths = getMaxWidthPerColumn(); - boolean header = true; - for (String[] row : rows) { - addFormattedRowToBuffer(row, colMaxWidths, outputTable); - if (header) { - //Get the length of the horizontal bar from the length of the - //formatted header, minus the one tab added at the beginning - //of the row (we want to count the vertical delimiters since - //we want it all to line up. - barLength = outputTable.length() - ESCAPE_SEQUENCES; + int headerLength = 0; + + Iterator rowIterator = rows.iterator(); + if (rowIterator.hasNext()) { + //Length of the header defines the table boundaries + headerLength = appendFormattedHeader(rowIterator.next(), + colMaxWidths, outputTable); + + while (rowIterator.hasNext()) { + appendFormattedRow(rowIterator.next(), colMaxWidths, outputTable); } - addFormattedHeaderToBuffer(outputTable, barLength, header); - header = false; + + outputTable.insert(0, tableName); + outputTable.append(createBorder(headerLength)); + outputTable.append(NEW_LINE); } - outputTable.append(buildHorizontalBar(barLength)); - outputTable.append(NEW_LINE); return outputTable.toString(); } @@ -336,7 +332,7 @@ public class SqliteTextExtractor extends ContentTextExtractor { * everything is pretty printed. * @param outputTable Buffer that formatted contents are written to */ - private void addFormattedRowToBuffer(String[] row, + private void appendFormattedRow(String[] row, int[] colMaxWidths, StringBuilder outputTable) { outputTable.append(TAB); for (int colNum = 0; colNum < row.length; colNum++) { @@ -352,25 +348,33 @@ public class SqliteTextExtractor extends ContentTextExtractor { } /** - * Outputs a fully formatted header. + * Adds a fully formatted header to the table builder and returns the + * length of this header. The length of the header is needed to set the + * table boundaries * - * Example: \t+----------------------+\n \t| Email | Phone | Name |\n - * \t+----------------------+\n + * Example: \t+----------------------+\n + * \t| Email | Phone | Name |\n + * \t+----------------------+\n * - * @param outputTable Buffer that formatted contents are written to - * @param barLength Length of the bar (i.e. +---------+) that will - * surround the header, based off of the length of - * the formatted header row - * @param needsHeader Boolean denoting if the header has been added to - * the buffer + * @param row Array of contents in each column + * @param colMaxWidths Widths for each column in the table + * @param outputTable Output stringbuilder + * + * @return length of the formatted header, this length will be needed to + * correctly print the bottom table border. */ - private void addFormattedHeaderToBuffer(StringBuilder outputTable, - int barLength, boolean needsHeader) { - if (needsHeader) { - outputTable.insert(0, buildHorizontalBar(barLength)); - outputTable.insert(0, section); - outputTable.append(buildHorizontalBar(barLength)); - } + private int appendFormattedHeader(String[] row, int[] colMaxWidths, StringBuilder outputTable) { + appendFormattedRow(row, colMaxWidths, outputTable); + //Printable table dimensions are equal to the length of the header minus + //the number of escape sequences used to for formatting. + int barLength = outputTable.length() - ESCAPE_SEQUENCES; + String border = createBorder(barLength); + + //Surround the header with borders above and below. + outputTable.insert(0, border); + outputTable.append(border); + + return barLength; } } -} \ No newline at end of file +} diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/TikaTextExtractor.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/TikaTextExtractor.java index 679bd5b9cc..ca4b09a5c1 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/TikaTextExtractor.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/TikaTextExtractor.java @@ -57,6 +57,7 @@ class TikaTextExtractor extends ContentTextExtractor { static final private Logger logger = Logger.getLogger(TikaTextExtractor.class.getName()); private final ExecutorService tikaParseExecutor = Executors.newSingleThreadExecutor(); + private final String SQLITE_MIMETYPE = "application/x-sqlite3"; private final AutoDetectParser parser = new AutoDetectParser(); @@ -193,9 +194,8 @@ class TikaTextExtractor extends ContentTextExtractor { @Override public boolean isSupported(Content content, String detectedFormat) { - final String SQLITE_MIMETYPE = "application/x-sqlite3"; if (detectedFormat == null - || ContentTextExtractor.BLOB_MIME_TYPES.contains(detectedFormat) //any binary unstructured blobs (string extraction will be used) + || ContentTextExtractor.BINARY_MIME_TYPES.contains(detectedFormat) //any binary unstructured blobs (string extraction will be used) || ContentTextExtractor.ARCHIVE_MIME_TYPES.contains(detectedFormat) || (detectedFormat.startsWith("video/") && !detectedFormat.equals("video/x-flv")) //skip video other than flv (tika supports flv only) //NON-NLS || detectedFormat.equals(SQLITE_MIMETYPE) //Skip sqlite files, Tika cannot handle virtual tables and will fail with an exception. //NON-NLS From ddd923add54cd73bec9a2b0c99a6c6e1c8f80dfa Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dsmyda" Date: Mon, 10 Sep 2018 15:51:34 -0400 Subject: [PATCH 18/33] Changed 1 variable name --- .../autopsy/keywordsearch/SqliteTextExtractor.java | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java index c45dc3920b..9ced18fea6 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java @@ -302,12 +302,12 @@ public class SqliteTextExtractor extends ContentTextExtractor { public String toString() { StringBuilder outputTable = new StringBuilder(charactersAdded); int[] colMaxWidths = getMaxWidthPerColumn(); - int headerLength = 0; + int borderLength = 0; Iterator rowIterator = rows.iterator(); if (rowIterator.hasNext()) { //Length of the header defines the table boundaries - headerLength = appendFormattedHeader(rowIterator.next(), + borderLength = appendFormattedHeader(rowIterator.next(), colMaxWidths, outputTable); while (rowIterator.hasNext()) { @@ -315,7 +315,7 @@ public class SqliteTextExtractor extends ContentTextExtractor { } outputTable.insert(0, tableName); - outputTable.append(createBorder(headerLength)); + outputTable.append(createBorder(borderLength)); outputTable.append(NEW_LINE); } @@ -367,14 +367,14 @@ public class SqliteTextExtractor extends ContentTextExtractor { appendFormattedRow(row, colMaxWidths, outputTable); //Printable table dimensions are equal to the length of the header minus //the number of escape sequences used to for formatting. - int barLength = outputTable.length() - ESCAPE_SEQUENCES; - String border = createBorder(barLength); + int borderLength = outputTable.length() - ESCAPE_SEQUENCES; + String border = createBorder(borderLength); //Surround the header with borders above and below. outputTable.insert(0, border); outputTable.append(border); - return barLength; + return borderLength; } } } From 4dff05f58a668e9c562cdc82a90846751097e5a0 Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dsmyda" Date: Mon, 17 Sep 2018 09:29:02 -0400 Subject: [PATCH 19/33] Small little name change --- .../autopsy/keywordsearch/SqliteTextExtractor.java | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java index 9ced18fea6..f33b52bb9d 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java @@ -116,9 +116,9 @@ public class SqliteTextExtractor extends ContentTextExtractor { private CharSequence getDatabaseContents(Content source, AbstractReader reader) { try { Map tables = reader.getTableSchemas(); - LinkedList databaseStorage = new LinkedList<>(); + Collection databaseStorage = new LinkedList<>(); - Integer charactersCopied = loadDatabaseIntoList(databaseStorage, + Integer charactersCopied = loadDatabaseIntoCollection(databaseStorage, tables, reader, source); return toCharSequence(databaseStorage, charactersCopied); @@ -138,12 +138,12 @@ public class SqliteTextExtractor extends ContentTextExtractor { * rows from the table. The table string will be added to the list of * contents. * - * @param databaseStorage List containing all of the database content + * @param databaseStorage Collection containing all of the database content * @param tables A map of table names to table schemas * @param reader SqliteReader for interfacing with the database * @param source Source database file for logging */ - private int loadDatabaseIntoList(LinkedList databaseStorage, + private int loadDatabaseIntoCollection(Collection databaseStorage, Map tables, AbstractReader reader, Content source) { int charactersCopied = 0; @@ -178,7 +178,7 @@ public class SqliteTextExtractor extends ContentTextExtractor { * * @return A character seqeunces of the database contents */ - private CharSequence toCharSequence(LinkedList databaseStorage, + private CharSequence toCharSequence(Collection databaseStorage, int characterCount) { final char[] databaseCharArray = new char[characterCount]; From 72fcc406bc5c3076b1b76f6ed184cd54a32175c2 Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dsmyda" Date: Tue, 18 Sep 2018 12:32:26 -0400 Subject: [PATCH 20/33] Fixed codacy suggestions --- .../keywordsearch/SqliteTextExtractor.java | 24 +++++++++---------- .../keywordsearch/TikaTextExtractor.java | 2 +- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java index f33b52bb9d..1c5dfacc1e 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java @@ -47,9 +47,9 @@ import org.sleuthkit.autopsy.tabulardatareader.FileReaderFactory; */ public class SqliteTextExtractor extends ContentTextExtractor { - private final String SQLITE_MIMETYPE = "application/x-sqlite3"; + private static final String SQLITE_MIMETYPE = "application/x-sqlite3"; private static final Logger logger = Logger.getLogger(SqliteTextExtractor.class.getName()); - private final CharSequence EMPTY_CHARACTER_SEQUENCE = ""; + private static final CharSequence EMPTY_CHARACTER_SEQUENCE = ""; @Override boolean isContentTypeSpecific() { @@ -62,8 +62,8 @@ public class SqliteTextExtractor extends ContentTextExtractor { } @Override - public void logWarning(String msg, Exception ex) { - logger.log(Level.WARNING, msg, ex); //NON-NLS + public void logWarning(String msg, Exception exception) { + logger.log(Level.WARNING, msg, exception); //NON-NLS } /** @@ -196,7 +196,7 @@ public class SqliteTextExtractor extends ContentTextExtractor { return new Segment(databaseCharArray, 0, characterCount); } - /* + /** * Formats input so that it reads as a table in the console or in a text * viewer */ @@ -206,16 +206,16 @@ public class SqliteTextExtractor extends ContentTextExtractor { private Integer charactersAdded = 0; //Formatters - private final String HORIZONTAL_DELIMITER = "-"; - private final String VERTICAL_DELIMITER = "|"; - private final String HEADER_CORNER = "+"; + private static final String HORIZONTAL_DELIMITER = "-"; + private static final String VERTICAL_DELIMITER = "|"; + private static final String HEADER_CORNER = "+"; - private final String TAB = "\t"; - private final String NEW_LINE = "\n"; - private final String SPACE = " "; + private static final String TAB = "\t"; + private static final String NEW_LINE = "\n"; + private static final String SPACE = " "; //Number of escape sequences in the header row - private final int ESCAPE_SEQUENCES = 4; + private static final int ESCAPE_SEQUENCES = 4; private String tableName = ""; diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/TikaTextExtractor.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/TikaTextExtractor.java index ca4b09a5c1..2b1d24f89b 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/TikaTextExtractor.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/TikaTextExtractor.java @@ -57,7 +57,7 @@ class TikaTextExtractor extends ContentTextExtractor { static final private Logger logger = Logger.getLogger(TikaTextExtractor.class.getName()); private final ExecutorService tikaParseExecutor = Executors.newSingleThreadExecutor(); - private final String SQLITE_MIMETYPE = "application/x-sqlite3"; + private static final String SQLITE_MIMETYPE = "application/x-sqlite3"; private final AutoDetectParser parser = new AutoDetectParser(); From d5e4e1cb41eeed3ca0bd35412a2654ed54f57c21 Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dsmyda" Date: Tue, 18 Sep 2018 12:51:20 -0400 Subject: [PATCH 21/33] Changed constructor param in readers to accept an abstract file and added cast to SqliteTextExtractor --- .../autopsy/tabulardatareader/AbstractReader.java | 8 ++++---- .../autopsy/tabulardatareader/ExcelReader.java | 4 ++-- .../tabulardatareader/FileReaderFactory.java | 4 ++-- .../autopsy/tabulardatareader/SQLiteReader.java | 5 ++--- .../keywordsearch/SqliteTextExtractor.java | 15 ++++++++++++++- 5 files changed, 24 insertions(+), 12 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/tabulardatareader/AbstractReader.java b/Core/src/org/sleuthkit/autopsy/tabulardatareader/AbstractReader.java index b488bd6aa8..0001abff44 100755 --- a/Core/src/org/sleuthkit/autopsy/tabulardatareader/AbstractReader.java +++ b/Core/src/org/sleuthkit/autopsy/tabulardatareader/AbstractReader.java @@ -25,7 +25,7 @@ import java.util.Map; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.datamodel.ContentUtils; -import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.TskCoreException; /** @@ -34,7 +34,7 @@ import org.sleuthkit.datamodel.TskCoreException; */ public abstract class AbstractReader implements AutoCloseable { - public AbstractReader(Content file) + public AbstractReader(AbstractFile file) throws FileReaderInitException { try { @@ -54,7 +54,7 @@ public abstract class AbstractReader implements AutoCloseable { * @return Valid local path for copying * @throws NoCurrentCaseException if the current case has been closed. */ - final String getLocalDiskPath(Content file) throws FileReaderException { + final String getLocalDiskPath(AbstractFile file) throws FileReaderException { try { return Case.getCurrentCaseThrows().getTempDirectory() + File.separator + file.getId() + file.getName(); @@ -73,7 +73,7 @@ public abstract class AbstractReader implements AutoCloseable { * @throws NoCurrentCaseException Current case closed during file copying * @throws TskCoreException Exception finding files from abstract file */ - private void writeDataSourceToLocalDisk(Content file, String localDiskPath) + private void writeDataSourceToLocalDisk(AbstractFile file, String localDiskPath) throws FileReaderInitException { try { diff --git a/Core/src/org/sleuthkit/autopsy/tabulardatareader/ExcelReader.java b/Core/src/org/sleuthkit/autopsy/tabulardatareader/ExcelReader.java index b1a5b40cfd..cc4d2719de 100755 --- a/Core/src/org/sleuthkit/autopsy/tabulardatareader/ExcelReader.java +++ b/Core/src/org/sleuthkit/autopsy/tabulardatareader/ExcelReader.java @@ -38,7 +38,7 @@ import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.IngestServices; import com.monitorjbl.xlsx.StreamingReader; import org.apache.poi.hssf.OldExcelFormatException; -import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.AbstractFile; /** * Reads excel files and implements the abstract reader api for interfacing with @@ -58,7 +58,7 @@ public final class ExcelReader extends AbstractReader { private final static String EMPTY_CELL_STRING = ""; private Map headerCache; - public ExcelReader(Content file, String mimeType) + public ExcelReader(AbstractFile file, String mimeType) throws FileReaderInitException { super(file); try { diff --git a/Core/src/org/sleuthkit/autopsy/tabulardatareader/FileReaderFactory.java b/Core/src/org/sleuthkit/autopsy/tabulardatareader/FileReaderFactory.java index ffd152c80d..2887f1cd95 100755 --- a/Core/src/org/sleuthkit/autopsy/tabulardatareader/FileReaderFactory.java +++ b/Core/src/org/sleuthkit/autopsy/tabulardatareader/FileReaderFactory.java @@ -19,7 +19,7 @@ package org.sleuthkit.autopsy.tabulardatareader; import org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException; -import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.AbstractFile; /** * Factory for creating the correct reader given the mime type of a file. @@ -38,7 +38,7 @@ public final class FileReaderFactory { * @return The correct reader class needed to read the file contents * @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException */ - public static AbstractReader createReader(String mimeType, Content file) + public static AbstractReader createReader(String mimeType, AbstractFile file) throws FileReaderInitException { switch (mimeType) { case "application/x-sqlite3": diff --git a/Core/src/org/sleuthkit/autopsy/tabulardatareader/SQLiteReader.java b/Core/src/org/sleuthkit/autopsy/tabulardatareader/SQLiteReader.java index 408193b2b3..39b738525c 100755 --- a/Core/src/org/sleuthkit/autopsy/tabulardatareader/SQLiteReader.java +++ b/Core/src/org/sleuthkit/autopsy/tabulardatareader/SQLiteReader.java @@ -41,7 +41,6 @@ import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.datamodel.ContentUtils; import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; @@ -61,7 +60,7 @@ public final class SQLiteReader extends AbstractReader { * @param sqliteDbFile Data source abstract file * @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException */ - public SQLiteReader(Content sqliteDbFile) throws FileReaderInitException { + public SQLiteReader(AbstractFile sqliteDbFile) throws FileReaderInitException { super(sqliteDbFile); try { final String localDiskPath = super.getLocalDiskPath(sqliteDbFile); @@ -86,7 +85,7 @@ public final class SQLiteReader extends AbstractReader { * @throws TskCoreException fileManager cannot find AbstractFile files. * @throws IOException Issue during writing to file. */ - private void findAndCopySQLiteMetaFile(Content sqliteFile, + private void findAndCopySQLiteMetaFile(AbstractFile sqliteFile, String metaFileName) throws NoCurrentCaseException, TskCoreException, IOException { Case openCase = Case.getCurrentCaseThrows(); diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java index 1c5dfacc1e..32c5c1739e 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java @@ -35,6 +35,7 @@ import org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitExce import org.sleuthkit.datamodel.Content; import org.apache.commons.lang3.StringUtils; import org.sleuthkit.autopsy.tabulardatareader.FileReaderFactory; +import org.sleuthkit.datamodel.AbstractFile; /** * Dedicated SqliteTextExtractor to solve the problems associated with Tika's @@ -91,8 +92,20 @@ public class SqliteTextExtractor extends ContentTextExtractor { */ @Override public Reader getReader(Content source) throws TextExtractorException { + //Firewall for any content that is not an AbstractFile + if(!AbstractFile.class.isInstance(source)) { + try { + return CharSource.wrap(EMPTY_CHARACTER_SEQUENCE).openStream(); + } catch (IOException ex) { + throw new TextExtractorException( + String.format("Encountered an issue wrapping blank string" //NON-NLS + + " with CharSource for non-abstract file with id: [%s]," //NON-NLS + + " name: [%s].", source.getId(), source.getName())); //NON-NLS + } + } + try (AbstractReader reader = FileReaderFactory.createReader( - SQLITE_MIMETYPE, source)) { + SQLITE_MIMETYPE, (AbstractFile) source)) { final CharSequence databaseContent = getDatabaseContents(source, reader); //CharSource will maintain unicode strings correctly return CharSource.wrap(databaseContent).openStream(); From 4ce381fe4f806364774c1a1451fc1f1bdfd82218 Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dsmyda" Date: Tue, 18 Sep 2018 13:02:14 -0400 Subject: [PATCH 22/33] Made SqliteTextExtractor not public --- .../sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java index 32c5c1739e..06a595d203 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java @@ -46,7 +46,7 @@ import org.sleuthkit.datamodel.AbstractFile; * 2) Tika fails to open tables with spaces in table name * 3) Tika fails to include the table names in output (except for the first table it parses) */ -public class SqliteTextExtractor extends ContentTextExtractor { +class SqliteTextExtractor extends ContentTextExtractor { private static final String SQLITE_MIMETYPE = "application/x-sqlite3"; private static final Logger logger = Logger.getLogger(SqliteTextExtractor.class.getName()); From e56f28f813b65cdd3bd5f5ca78dd6224e656645c Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dsmyda" Date: Tue, 18 Sep 2018 14:09:01 -0400 Subject: [PATCH 23/33] Fixed string in exception text --- .../sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java index 06a595d203..65e0fecb6c 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java @@ -113,7 +113,7 @@ class SqliteTextExtractor extends ContentTextExtractor { throw new TextExtractorException( String.format("Encountered a FileReaderInitException" //NON-NLS + " when trying to initialize a SQLiteReader" //NON-NLS - + " for Content with id: [%s], name: [%s].", //NON-NLS + + " for AbstractFile with id: [%s], name: [%s].", //NON-NLS source.getId(), source.getName())); } } From 4fc5a59129e538d0947f1ed1dc89c41ad7ba0c4e Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dsmyda" Date: Tue, 18 Sep 2018 16:05:37 -0400 Subject: [PATCH 24/33] Didn't get merge quite right, here are changes to build --- .../org/sleuthkit/autopsy/tabulardatareader/SQLiteReader.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/tabulardatareader/SQLiteReader.java b/Core/src/org/sleuthkit/autopsy/tabulardatareader/SQLiteReader.java index eee440a896..2acd5e1d00 100755 --- a/Core/src/org/sleuthkit/autopsy/tabulardatareader/SQLiteReader.java +++ b/Core/src/org/sleuthkit/autopsy/tabulardatareader/SQLiteReader.java @@ -66,14 +66,14 @@ public final class SQLiteReader extends AbstractReader { public SQLiteReader(AbstractFile sqliteDbFile) throws FileReaderInitException { super(sqliteDbFile); try { - final String localDiskPath = super.getLocalDiskPath(sqliteDbFile); + final String localDiskPath = super.getLocalDiskPath(); // Look for any meta files associated with this DB - WAL, SHM, etc. findAndCopySQLiteMetaFile(sqliteDbFile, sqliteDbFile.getName() + "-wal"); findAndCopySQLiteMetaFile(sqliteDbFile, sqliteDbFile.getName() + "-shm"); connection = getDatabaseConnection(localDiskPath); } catch (ClassNotFoundException | SQLException |IOException | - NoCurrentCaseException | TskCoreException | FileReaderException ex) { + NoCurrentCaseException | TskCoreException ex) { throw new FileReaderInitException(ex); } } From d195fe002824642b7a6d0a917422b786afc10095 Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Tue, 18 Sep 2018 18:13:23 -0400 Subject: [PATCH 25/33] 4163 close unclosed result sets codacy noticed --- .../centralrepository/datamodel/AbstractSqlEamDb.java | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java index 6385a2eafc..cd2a11b031 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java @@ -230,7 +230,7 @@ abstract class AbstractSqlEamDb implements EamDb { + "examiner_name, examiner_email, examiner_phone, notes) " + "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) " + getConflictClause(); - + ResultSet resultSet = null; try { preparedStatement = conn.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS); @@ -270,7 +270,7 @@ abstract class AbstractSqlEamDb implements EamDb { preparedStatement.executeUpdate(); //update the case in the caches - ResultSet resultSet = preparedStatement.getGeneratedKeys(); + resultSet = preparedStatement.getGeneratedKeys(); if (!resultSet.next()) { throw new EamDbException(String.format("Failed to INSERT case %s in central repo", eamCase.getCaseUUID())); } @@ -283,6 +283,7 @@ abstract class AbstractSqlEamDb implements EamDb { } catch (SQLException ex) { throw new EamDbException("Error inserting new case.", ex); // NON-NLS } finally { + EamDbUtil.closeResultSet(resultSet); EamDbUtil.closeStatement(preparedStatement); EamDbUtil.closeConnection(conn); } @@ -584,7 +585,7 @@ abstract class AbstractSqlEamDb implements EamDb { String sql = "INSERT INTO data_sources(device_id, case_id, name) VALUES (?, ?, ?) " + getConflictClause(); - + ResultSet resultSet = null; try { preparedStatement = conn.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS); @@ -593,7 +594,7 @@ abstract class AbstractSqlEamDb implements EamDb { preparedStatement.setString(3, eamDataSource.getName()); preparedStatement.executeUpdate(); - ResultSet resultSet = preparedStatement.getGeneratedKeys(); + resultSet = preparedStatement.getGeneratedKeys(); if (!resultSet.next()) { throw new EamDbException(String.format("Failed to INSERT data source %s in central repo", eamDataSource.getName())); } @@ -604,6 +605,7 @@ abstract class AbstractSqlEamDb implements EamDb { } catch (SQLException ex) { throw new EamDbException("Error inserting new data source.", ex); // NON-NLS } finally { + EamDbUtil.closeResultSet(resultSet); EamDbUtil.closeStatement(preparedStatement); EamDbUtil.closeConnection(conn); } From fecdecb48456fdc05c612cb8eaa3694bcd46fb01 Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dsmyda" Date: Wed, 19 Sep 2018 10:17:40 -0400 Subject: [PATCH 26/33] Implemented suggested changes and fixed last codacy issue --- .../keywordsearch/SqliteTextExtractor.java | 57 ++++++++++--------- 1 file changed, 30 insertions(+), 27 deletions(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java index 65e0fecb6c..3ecb1a63ac 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java @@ -34,6 +34,7 @@ import org.sleuthkit.autopsy.tabulardatareader.AbstractReader; import org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException; import org.sleuthkit.datamodel.Content; import org.apache.commons.lang3.StringUtils; +import org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException; import org.sleuthkit.autopsy.tabulardatareader.FileReaderFactory; import org.sleuthkit.datamodel.AbstractFile; @@ -93,19 +94,19 @@ class SqliteTextExtractor extends ContentTextExtractor { @Override public Reader getReader(Content source) throws TextExtractorException { //Firewall for any content that is not an AbstractFile - if(!AbstractFile.class.isInstance(source)) { + if (!AbstractFile.class.isInstance(source)) { try { return CharSource.wrap(EMPTY_CHARACTER_SEQUENCE).openStream(); } catch (IOException ex) { throw new TextExtractorException( - String.format("Encountered an issue wrapping blank string" //NON-NLS - + " with CharSource for non-abstract file with id: [%s]," //NON-NLS - + " name: [%s].", source.getId(), source.getName())); //NON-NLS + String.format("Encountered an issue wrapping blank string" //NON-NLS + + " with CharSource for non-abstract file with id: [%s]," //NON-NLS + + " name: [%s].", source.getId(), source.getName()), ex); //NON-NLS } } - + try (AbstractReader reader = FileReaderFactory.createReader( - SQLITE_MIMETYPE, (AbstractFile) source)) { + (AbstractFile) source, SQLITE_MIMETYPE)) { final CharSequence databaseContent = getDatabaseContents(source, reader); //CharSource will maintain unicode strings correctly return CharSource.wrap(databaseContent).openStream(); @@ -114,7 +115,12 @@ class SqliteTextExtractor extends ContentTextExtractor { String.format("Encountered a FileReaderInitException" //NON-NLS + " when trying to initialize a SQLiteReader" //NON-NLS + " for AbstractFile with id: [%s], name: [%s].", //NON-NLS - source.getId(), source.getName())); + source.getId(), source.getName()), ex); + } catch (FileReaderException ex) { + throw new TextExtractorException( + String.format("Could not get contents from database " //NON-NLS + + "tables for AbstractFile with id [%s], name: [%s].", //NON-NLS + source.getId(), source.getName()), ex); } } @@ -126,24 +132,14 @@ class SqliteTextExtractor extends ContentTextExtractor { * @param reader Sqlite reader for the content source * @param source Sqlite file source */ - private CharSequence getDatabaseContents(Content source, AbstractReader reader) { - try { - Map tables = reader.getTableSchemas(); - Collection databaseStorage = new LinkedList<>(); + private CharSequence getDatabaseContents(Content source, AbstractReader reader) throws FileReaderException { + Map tables = reader.getTableSchemas(); + Collection databaseStorage = new LinkedList<>(); - Integer charactersCopied = loadDatabaseIntoCollection(databaseStorage, - tables, reader, source); + Integer charactersCopied = loadDatabaseIntoCollection(databaseStorage, + tables, reader, source); - return toCharSequence(databaseStorage, charactersCopied); - } catch (AbstractReader.FileReaderException ex) { - logger.log(Level.WARNING, String.format( - "Error attempting to get tables from file: " //NON-NLS - + "[%s] (id=%d).", source.getName(), //NON-NLS - source.getId()), ex); - } - - //Failed to get tables from file - return EMPTY_CHARACTER_SEQUENCE; + return toCharSequence(databaseStorage, charactersCopied); } /** @@ -172,7 +168,7 @@ class SqliteTextExtractor extends ContentTextExtractor { tableBuilder.addRow(row.values()); } } - } catch (AbstractReader.FileReaderException ex) { + } catch (FileReaderException ex) { logger.log(Level.WARNING, String.format( "Error attempting to read file table: [%s]" //NON-NLS + " for file: [%s] (id=%d).", tableName, //NON-NLS @@ -186,10 +182,17 @@ class SqliteTextExtractor extends ContentTextExtractor { return charactersCopied; } + /** - * Copy linkedList elements into a CharSequence - * - * @return A character seqeunces of the database contents + * Copy elements from collection (which contains formatted database tables) + * into a CharSequence so that it can be wrapped and used by the Google CharSource + * lib. + * + * @param databaseStorage Collection containing database contents + * @param characterCount Number of characters needed to be allocated in the buffer + * so that all of the contents in the collection can be copied over. + * + * @return CharSource of the formatted database contents */ private CharSequence toCharSequence(Collection databaseStorage, int characterCount) { From 6cfd20d58f207d6ba1a5ced0a9096a179d8b7b8d Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dsmyda" Date: Wed, 19 Sep 2018 10:20:09 -0400 Subject: [PATCH 27/33] Last minor param change --- .../autopsy/keywordsearch/SqliteTextExtractor.java | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java index 3ecb1a63ac..17800d5c06 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SqliteTextExtractor.java @@ -133,11 +133,10 @@ class SqliteTextExtractor extends ContentTextExtractor { * @param source Sqlite file source */ private CharSequence getDatabaseContents(Content source, AbstractReader reader) throws FileReaderException { - Map tables = reader.getTableSchemas(); Collection databaseStorage = new LinkedList<>(); - Integer charactersCopied = loadDatabaseIntoCollection(databaseStorage, - tables, reader, source); + Integer charactersCopied = loadDatabaseIntoCollection(databaseStorage, + reader, source); return toCharSequence(databaseStorage, charactersCopied); } @@ -152,8 +151,10 @@ class SqliteTextExtractor extends ContentTextExtractor { * @param reader SqliteReader for interfacing with the database * @param source Source database file for logging */ - private int loadDatabaseIntoCollection(Collection databaseStorage, - Map tables, AbstractReader reader, Content source) { + private int loadDatabaseIntoCollection(Collection databaseStorage, + AbstractReader reader, Content source) throws FileReaderException { + //Will throw a FileReaderException if table schemas are unattainable + Map tables = reader.getTableSchemas(); int charactersCopied = 0; for (String tableName : tables.keySet()) { @@ -161,6 +162,8 @@ class SqliteTextExtractor extends ContentTextExtractor { tableBuilder.setTableName(tableName); try { + //Catch any exception at a particular table, we want to ensure we grab + //content from as many tables as possible List> rowsInTable = reader.getRowsFromTable(tableName); if (!rowsInTable.isEmpty()) { tableBuilder.addHeader(new ArrayList<>(rowsInTable.get(0).keySet())); From ed8592c03d9dec4bbee1aa3b706a629816e9b976 Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Wed, 19 Sep 2018 12:26:20 -0400 Subject: [PATCH 28/33] Modify tests to use appropriate method signature for AbstractCommonAttributeSearcher --- .../CommonAttributeSearchInterCaseTests.java | 11 ++++------- .../IngestedWithHashAndFileTypeInterCaseTests.java | 11 +++-------- 2 files changed, 7 insertions(+), 15 deletions(-) diff --git a/Core/test/qa-functional/src/org/sleuthkit/autopsy/commonfilessearch/CommonAttributeSearchInterCaseTests.java b/Core/test/qa-functional/src/org/sleuthkit/autopsy/commonfilessearch/CommonAttributeSearchInterCaseTests.java index 966d0b8bce..e6ff7b743d 100644 --- a/Core/test/qa-functional/src/org/sleuthkit/autopsy/commonfilessearch/CommonAttributeSearchInterCaseTests.java +++ b/Core/test/qa-functional/src/org/sleuthkit/autopsy/commonfilessearch/CommonAttributeSearchInterCaseTests.java @@ -21,7 +21,6 @@ package org.sleuthkit.autopsy.commonfilessearch; import java.nio.file.Path; import java.sql.SQLException; -import java.util.Map; import junit.framework.Assert; import junit.framework.Test; import org.netbeans.junit.NbModuleSuite; @@ -111,9 +110,8 @@ public class CommonAttributeSearchInterCaseTests extends NbTestCase { private void assertResultsAreOfType(CorrelationAttributeInstance.Type type) { try { - Map dataSources = this.utils.getDataSourceMap(); - AbstractCommonAttributeSearcher builder = new AllInterCaseCommonAttributeSearcher(dataSources, false, false, type, 0); + AbstractCommonAttributeSearcher builder = new AllInterCaseCommonAttributeSearcher(false, false, type, 0); CommonAttributeSearchResults metadata = builder.findMatches(); @@ -146,22 +144,21 @@ public class CommonAttributeSearchInterCaseTests extends NbTestCase { */ public void testTwo() { try { - Map dataSources = this.utils.getDataSourceMap(); AbstractCommonAttributeSearcher builder; CommonAttributeSearchResults metadata; - builder = new AllInterCaseCommonAttributeSearcher(dataSources, false, false, this.utils.USB_ID_TYPE, 100); + builder = new AllInterCaseCommonAttributeSearcher(false, false, this.utils.USB_ID_TYPE, 100); metadata = builder.findMatches(); metadata.size(); //assertTrue("This should yield 13 results.", verifyInstanceCount(metadata, 13)); - builder = new AllInterCaseCommonAttributeSearcher(dataSources, false, false, this.utils.USB_ID_TYPE, 20); + builder = new AllInterCaseCommonAttributeSearcher(false, false, this.utils.USB_ID_TYPE, 20); metadata = builder.findMatches(); metadata.size(); //assertTrue("This should yield no results.", verifyInstanceCount(metadata, 0)); - builder = new AllInterCaseCommonAttributeSearcher(dataSources, false, false, this.utils.USB_ID_TYPE, 90); + builder = new AllInterCaseCommonAttributeSearcher(false, false, this.utils.USB_ID_TYPE, 90); metadata = builder.findMatches(); metadata.size(); //assertTrue("This should yield 2 results.", verifyInstanceCount(metadata, 2)); diff --git a/Core/test/qa-functional/src/org/sleuthkit/autopsy/commonfilessearch/IngestedWithHashAndFileTypeInterCaseTests.java b/Core/test/qa-functional/src/org/sleuthkit/autopsy/commonfilessearch/IngestedWithHashAndFileTypeInterCaseTests.java index 8f4f96e7a3..9c32eb0f94 100644 --- a/Core/test/qa-functional/src/org/sleuthkit/autopsy/commonfilessearch/IngestedWithHashAndFileTypeInterCaseTests.java +++ b/Core/test/qa-functional/src/org/sleuthkit/autopsy/commonfilessearch/IngestedWithHashAndFileTypeInterCaseTests.java @@ -21,7 +21,6 @@ package org.sleuthkit.autopsy.commonfilessearch; import java.nio.file.Path; import java.sql.SQLException; -import java.util.Map; import junit.framework.Test; import org.netbeans.junit.NbModuleSuite; import org.netbeans.junit.NbTestCase; @@ -96,10 +95,8 @@ public class IngestedWithHashAndFileTypeInterCaseTests extends NbTestCase { */ public void testOne() { try { - Map dataSources = this.utils.getDataSourceMap(); - //note that the params false and false are presently meaningless because that feature is not supported yet - AbstractCommonAttributeSearcher builder = new AllInterCaseCommonAttributeSearcher(dataSources, false, false, this.utils.FILE_TYPE, 0); + AbstractCommonAttributeSearcher builder = new AllInterCaseCommonAttributeSearcher(false, false, this.utils.FILE_TYPE, 0); CommonAttributeSearchResults metadata = builder.findMatches(); assertTrue("Results should not be empty", metadata.size() != 0); @@ -146,11 +143,10 @@ public class IngestedWithHashAndFileTypeInterCaseTests extends NbTestCase { */ public void testTwo() { try { - Map dataSources = this.utils.getDataSourceMap(); int matchesMustAlsoBeFoundInThisCase = this.utils.getCaseMap().get(CASE2); CorrelationAttributeInstance.Type fileType = CorrelationAttributeInstance.getDefaultCorrelationTypes().get(0); - AbstractCommonAttributeSearcher builder = new SingleInterCaseCommonAttributeSearcher(matchesMustAlsoBeFoundInThisCase, dataSources, false, false, fileType, 0); + AbstractCommonAttributeSearcher builder = new SingleInterCaseCommonAttributeSearcher(matchesMustAlsoBeFoundInThisCase, false, false, fileType, 0); CommonAttributeSearchResults metadata = builder.findMatches(); @@ -199,11 +195,10 @@ public class IngestedWithHashAndFileTypeInterCaseTests extends NbTestCase { */ public void testThree(){ try { - Map dataSources = this.utils.getDataSourceMap(); //note that the params false and false are presently meaningless because that feature is not supported yet CorrelationAttributeInstance.Type fileType = CorrelationAttributeInstance.getDefaultCorrelationTypes().get(0); - AbstractCommonAttributeSearcher builder = new AllInterCaseCommonAttributeSearcher(dataSources, false, false, fileType, 50); + AbstractCommonAttributeSearcher builder = new AllInterCaseCommonAttributeSearcher(false, false, fileType, 50); CommonAttributeSearchResults metadata = builder.findMatches(); From 831126e61cdce3cf12d5302d5a7c500f6224406d Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dgrove" Date: Wed, 19 Sep 2018 13:03:49 -0400 Subject: [PATCH 29/33] Fixed switch case to avoid null values. --- .../autopsy/casemodule/CasePreferences.java | 26 ++++++++++++------- 1 file changed, 16 insertions(+), 10 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/CasePreferences.java b/Core/src/org/sleuthkit/autopsy/casemodule/CasePreferences.java index 1c853c7951..dcee43fc82 100755 --- a/Core/src/org/sleuthkit/autopsy/casemodule/CasePreferences.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/CasePreferences.java @@ -98,16 +98,22 @@ public final class CasePreferences { Properties props = new Properties(); props.load(inputStream); String groupByDataSourceValue = props.getProperty(KEY_GROUP_BY_DATA_SOURCE); - switch (groupByDataSourceValue) { - case VALUE_TRUE: - groupItemsInTreeByDataSource = true; - break; - case VALUE_FALSE: - groupItemsInTreeByDataSource = false; - break; - default: - groupItemsInTreeByDataSource = null; - break; + if (groupByDataSourceValue != null) { + switch (groupByDataSourceValue) { + case VALUE_TRUE: + groupItemsInTreeByDataSource = true; + break; + case VALUE_FALSE: + groupItemsInTreeByDataSource = false; + break; + default: + logger.log(Level.WARNING, String.format("Unexpected value '%s' for key '%s'. Using 'null' instead.", + groupByDataSourceValue, KEY_GROUP_BY_DATA_SOURCE)); + groupItemsInTreeByDataSource = null; + break; + } + } else { + groupItemsInTreeByDataSource = null; } } catch (IOException ex) { logger.log(Level.SEVERE, "Error reading settings file", ex); From 25fc3f5ace2390557b0f3d3cca78af89eefedad8 Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Wed, 19 Sep 2018 17:09:57 -0400 Subject: [PATCH 30/33] 4163 Handle new cases, data sources and other null values that can not go in cache --- .../datamodel/AbstractSqlEamDb.java | 52 ++++++++++++++----- 1 file changed, 38 insertions(+), 14 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java index cd2a11b031..6f54f79812 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java @@ -20,6 +20,7 @@ package org.sleuthkit.autopsy.centralrepository.datamodel; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; +import com.google.common.cache.CacheLoader; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.List; @@ -402,6 +403,10 @@ abstract class AbstractSqlEamDb implements EamDb { public CorrelationCase getCaseByUUID(String caseUUID) throws EamDbException { try { return caseCacheByUUID.get(caseUUID, () -> getCaseByUUIDFromCr(caseUUID)); + } catch (CacheLoader.InvalidCacheLoadException ex) { + //cache can not store null values + logger.log(Level.INFO, "Unable to get current autopsy case from Central repo returning null as current case", ex); + return null; } catch (ExecutionException ex) { throw new EamDbException("Error getting autopsy case from Central repo", ex); } @@ -460,6 +465,10 @@ abstract class AbstractSqlEamDb implements EamDb { public CorrelationCase getCaseById(int caseId) throws EamDbException { try { return caseCacheById.get(caseId, () -> getCaseByIdFromCr(caseId)); + } catch (CacheLoader.InvalidCacheLoadException ex) { + //cache can not store null values + logger.log(Level.INFO, "Unable to get current autopsy case from Central repo returning null as current case", ex); + return null; } catch (ExecutionException ex) { throw new EamDbException("Error getting autopsy case from Central repo", ex); } @@ -544,9 +553,10 @@ abstract class AbstractSqlEamDb implements EamDb { } /** - * Create a key to the DataSourceCacheByDeviceId + * Create a key to the DataSourceCacheByDeviceId * - * @param caseId - the id of the CorrelationCase in the Central Repository + * @param caseId - the id of the CorrelationCase in the Central + * Repository * @param dataSourceDeviceId - the device Id of the data source * * @return a String to be used as a key for the dataSourceCacheByDeviceId @@ -558,8 +568,10 @@ abstract class AbstractSqlEamDb implements EamDb { /** * Create a key to the DataSourceCacheById * - * @param caseId - the id of the CorrelationCase in the Central Repository + * @param caseId - the id of the CorrelationCase in the Central + * Repository * @param dataSourceId - the id of the datasource in the central repository + * * @return a String to be used as a key for the dataSourceCacheById */ private static String getDataSourceByIdCacheKey(int caseId, int dataSourceId) { @@ -630,6 +642,10 @@ abstract class AbstractSqlEamDb implements EamDb { } try { return dataSourceCacheByDeviceId.get(getDataSourceByDeviceIdCacheKey(correlationCase.getID(), dataSourceDeviceId), () -> getDataSourceFromCr(correlationCase, dataSourceDeviceId)); + } catch (CacheLoader.InvalidCacheLoadException ex) { + //cache can not store null values + logger.log(Level.INFO, "Unable to current get data source from Central repo returning null as current data source", ex); + return null; } catch (ExecutionException ex) { throw new EamDbException("Error getting data source from central repository", ex); } @@ -694,6 +710,10 @@ abstract class AbstractSqlEamDb implements EamDb { } try { return dataSourceCacheById.get(getDataSourceByIdCacheKey(correlationCase.getID(), dataSourceId), () -> getDataSourceByIdFromCr(correlationCase, dataSourceId)); + } catch (CacheLoader.InvalidCacheLoadException ex) { + //cache can not store null values + logger.log(Level.INFO, "Unable to current get data source from Central repo returning null as current data source", ex); + return null; } catch (ExecutionException ex) { throw new EamDbException("Error getting data source from central repository", ex); } @@ -868,7 +888,7 @@ abstract class AbstractSqlEamDb implements EamDb { public List getArtifactInstancesByTypeValue(CorrelationAttributeInstance.Type aType, String value) throws EamDbException, CorrelationAttributeNormalizationException { String normalizedValue = CorrelationAttributeNormalizer.normalize(aType, value); - + Connection conn = connect(); List artifactInstances = new ArrayList<>(); @@ -1440,7 +1460,7 @@ abstract class AbstractSqlEamDb implements EamDb { @Override public CorrelationAttributeInstance getCorrelationAttributeInstance(CorrelationAttributeInstance.Type type, CorrelationCase correlationCase, CorrelationDataSource correlationDataSource, String value, String filePath) throws EamDbException, CorrelationAttributeNormalizationException { - + if (correlationCase == null) { throw new EamDbException("Correlation case is null"); } @@ -1459,7 +1479,7 @@ abstract class AbstractSqlEamDb implements EamDb { try { String normalizedValue = CorrelationAttributeNormalizer.normalize(type, value); - + String tableName = EamDbUtil.correlationTypeToInstanceTableName(type); String sql = "SELECT id, known_status, comment FROM " @@ -1701,7 +1721,7 @@ abstract class AbstractSqlEamDb implements EamDb { artifactInstances.add(artifactInstance); } catch (CorrelationAttributeNormalizationException ex) { logger.log(Level.INFO, "Unable to get artifact instance from resultset.", ex); - } + } } } catch (SQLException ex) { throw new EamDbException("Error getting notable artifact instances.", ex); // NON-NLS @@ -1724,7 +1744,7 @@ abstract class AbstractSqlEamDb implements EamDb { */ @Override public Long getCountArtifactInstancesKnownBad(CorrelationAttributeInstance.Type aType, String value) throws EamDbException, CorrelationAttributeNormalizationException { - + String normalizedValue = CorrelationAttributeNormalizer.normalize(aType, value); Connection conn = connect(); @@ -1771,7 +1791,7 @@ abstract class AbstractSqlEamDb implements EamDb { */ @Override public List getListCasesHavingArtifactInstancesKnownBad(CorrelationAttributeInstance.Type aType, String value) throws EamDbException, CorrelationAttributeNormalizationException { - + String normalizedValue = CorrelationAttributeNormalizer.normalize(aType, value); Connection conn = connect(); @@ -1931,7 +1951,7 @@ abstract class AbstractSqlEamDb implements EamDb { public boolean isValueInReferenceSet(String value, int referenceSetID, int correlationTypeID) throws EamDbException, CorrelationAttributeNormalizationException { String normalizeValued = CorrelationAttributeNormalizer.normalize(this.getCorrelationTypeById(correlationTypeID), value); - + Connection conn = connect(); Long matchingInstances = 0L; @@ -1969,10 +1989,10 @@ abstract class AbstractSqlEamDb implements EamDb { */ @Override public boolean isArtifactKnownBadByReference(CorrelationAttributeInstance.Type aType, String value) throws EamDbException, CorrelationAttributeNormalizationException { - + //this should be done here so that we can be certain that aType and value are valid before we proceed String normalizeValued = CorrelationAttributeNormalizer.normalize(aType, value); - + // TEMP: Only support file correlation type if (aType.getId() != CorrelationAttributeInstance.FILES_TYPE_ID) { return false; @@ -1985,7 +2005,7 @@ abstract class AbstractSqlEamDb implements EamDb { ResultSet resultSet = null; String sql = "SELECT count(*) FROM %s WHERE value=? AND known_status=?"; - try { + try { preparedStatement = conn.prepareStatement(String.format(sql, EamDbUtil.correlationTypeToReferenceTableName(aType))); preparedStatement.setString(1, normalizeValued); preparedStatement.setByte(2, TskData.FileKnown.BAD.getFileKnownValue()); @@ -2595,7 +2615,7 @@ abstract class AbstractSqlEamDb implements EamDb { EamDbUtil.closeResultSet(resultSet); EamDbUtil.closeConnection(conn); } - + return globalFileInstances; } @@ -2872,6 +2892,10 @@ abstract class AbstractSqlEamDb implements EamDb { public CorrelationAttributeInstance.Type getCorrelationTypeById(int typeId) throws EamDbException { try { return typeCache.get(CorrelationAttributeInstance.FILES_TYPE_ID, () -> getCorrelationTypeByIdFromCr(typeId)); + } catch (CacheLoader.InvalidCacheLoadException ex) { + //cache can not store null values + logger.log(Level.INFO, "Unable to get correlation type from Central repo returning null as the correlation type", ex); + return null; } catch (ExecutionException ex) { throw new EamDbException("Error getting correlation type", ex); } From 4a4be1dc9f70e5355e1e6ada913ba512f1399192 Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Wed, 19 Sep 2018 17:27:30 -0400 Subject: [PATCH 31/33] Change logging level so messages will not be logged with every case creation --- .../centralrepository/datamodel/AbstractSqlEamDb.java | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java index 6f54f79812..5719f6f44d 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java @@ -405,7 +405,7 @@ abstract class AbstractSqlEamDb implements EamDb { return caseCacheByUUID.get(caseUUID, () -> getCaseByUUIDFromCr(caseUUID)); } catch (CacheLoader.InvalidCacheLoadException ex) { //cache can not store null values - logger.log(Level.INFO, "Unable to get current autopsy case from Central repo returning null as current case", ex); + logger.log(Level.FINE, "Unable to get current autopsy case from Central repo returning null as current case", ex); return null; } catch (ExecutionException ex) { throw new EamDbException("Error getting autopsy case from Central repo", ex); @@ -467,7 +467,7 @@ abstract class AbstractSqlEamDb implements EamDb { return caseCacheById.get(caseId, () -> getCaseByIdFromCr(caseId)); } catch (CacheLoader.InvalidCacheLoadException ex) { //cache can not store null values - logger.log(Level.INFO, "Unable to get current autopsy case from Central repo returning null as current case", ex); + logger.log(Level.FINE, "Unable to get current autopsy case from Central repo returning null as current case", ex); return null; } catch (ExecutionException ex) { throw new EamDbException("Error getting autopsy case from Central repo", ex); @@ -644,7 +644,7 @@ abstract class AbstractSqlEamDb implements EamDb { return dataSourceCacheByDeviceId.get(getDataSourceByDeviceIdCacheKey(correlationCase.getID(), dataSourceDeviceId), () -> getDataSourceFromCr(correlationCase, dataSourceDeviceId)); } catch (CacheLoader.InvalidCacheLoadException ex) { //cache can not store null values - logger.log(Level.INFO, "Unable to current get data source from Central repo returning null as current data source", ex); + logger.log(Level.FINE, "Unable to current get data source from Central repo returning null as current data source", ex); return null; } catch (ExecutionException ex) { throw new EamDbException("Error getting data source from central repository", ex); @@ -712,7 +712,7 @@ abstract class AbstractSqlEamDb implements EamDb { return dataSourceCacheById.get(getDataSourceByIdCacheKey(correlationCase.getID(), dataSourceId), () -> getDataSourceByIdFromCr(correlationCase, dataSourceId)); } catch (CacheLoader.InvalidCacheLoadException ex) { //cache can not store null values - logger.log(Level.INFO, "Unable to current get data source from Central repo returning null as current data source", ex); + logger.log(Level.FINE, "Unable to current get data source from Central repo returning null as current data source", ex); return null; } catch (ExecutionException ex) { throw new EamDbException("Error getting data source from central repository", ex); @@ -2894,7 +2894,7 @@ abstract class AbstractSqlEamDb implements EamDb { return typeCache.get(CorrelationAttributeInstance.FILES_TYPE_ID, () -> getCorrelationTypeByIdFromCr(typeId)); } catch (CacheLoader.InvalidCacheLoadException ex) { //cache can not store null values - logger.log(Level.INFO, "Unable to get correlation type from Central repo returning null as the correlation type", ex); + logger.log(Level.FINE, "Unable to get correlation type from Central repo returning null as the correlation type", ex); return null; } catch (ExecutionException ex) { throw new EamDbException("Error getting correlation type", ex); From 8b53713dd2a2c993706a2c5097218268d46a8373 Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Wed, 19 Sep 2018 17:39:36 -0400 Subject: [PATCH 32/33] remove logging for expected cache loading exceptions --- .../datamodel/AbstractSqlEamDb.java | 25 ++++++++----------- 1 file changed, 10 insertions(+), 15 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java index 5719f6f44d..2aae94cb24 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java @@ -403,9 +403,8 @@ abstract class AbstractSqlEamDb implements EamDb { public CorrelationCase getCaseByUUID(String caseUUID) throws EamDbException { try { return caseCacheByUUID.get(caseUUID, () -> getCaseByUUIDFromCr(caseUUID)); - } catch (CacheLoader.InvalidCacheLoadException ex) { - //cache can not store null values - logger.log(Level.FINE, "Unable to get current autopsy case from Central repo returning null as current case", ex); + } catch (CacheLoader.InvalidCacheLoadException ignored) { + //lambda valueloader returned a null value and cache can not store null values this is normal if the case does not exist in the central repo yet return null; } catch (ExecutionException ex) { throw new EamDbException("Error getting autopsy case from Central repo", ex); @@ -465,9 +464,8 @@ abstract class AbstractSqlEamDb implements EamDb { public CorrelationCase getCaseById(int caseId) throws EamDbException { try { return caseCacheById.get(caseId, () -> getCaseByIdFromCr(caseId)); - } catch (CacheLoader.InvalidCacheLoadException ex) { - //cache can not store null values - logger.log(Level.FINE, "Unable to get current autopsy case from Central repo returning null as current case", ex); + } catch (CacheLoader.InvalidCacheLoadException ignored) { + //lambda valueloader returned a null value and cache can not store null values this is normal if the case does not exist in the central repo yet return null; } catch (ExecutionException ex) { throw new EamDbException("Error getting autopsy case from Central repo", ex); @@ -642,9 +640,8 @@ abstract class AbstractSqlEamDb implements EamDb { } try { return dataSourceCacheByDeviceId.get(getDataSourceByDeviceIdCacheKey(correlationCase.getID(), dataSourceDeviceId), () -> getDataSourceFromCr(correlationCase, dataSourceDeviceId)); - } catch (CacheLoader.InvalidCacheLoadException ex) { - //cache can not store null values - logger.log(Level.FINE, "Unable to current get data source from Central repo returning null as current data source", ex); + } catch (CacheLoader.InvalidCacheLoadException ignored) { + //lambda valueloader returned a null value and cache can not store null values this is normal if the dataSource does not exist in the central repo yet return null; } catch (ExecutionException ex) { throw new EamDbException("Error getting data source from central repository", ex); @@ -710,9 +707,8 @@ abstract class AbstractSqlEamDb implements EamDb { } try { return dataSourceCacheById.get(getDataSourceByIdCacheKey(correlationCase.getID(), dataSourceId), () -> getDataSourceByIdFromCr(correlationCase, dataSourceId)); - } catch (CacheLoader.InvalidCacheLoadException ex) { - //cache can not store null values - logger.log(Level.FINE, "Unable to current get data source from Central repo returning null as current data source", ex); + } catch (CacheLoader.InvalidCacheLoadException ignored) { + //lambda valueloader returned a null value and cache can not store null values this is normal if the dataSource does not exist in the central repo yet return null; } catch (ExecutionException ex) { throw new EamDbException("Error getting data source from central repository", ex); @@ -2892,9 +2888,8 @@ abstract class AbstractSqlEamDb implements EamDb { public CorrelationAttributeInstance.Type getCorrelationTypeById(int typeId) throws EamDbException { try { return typeCache.get(CorrelationAttributeInstance.FILES_TYPE_ID, () -> getCorrelationTypeByIdFromCr(typeId)); - } catch (CacheLoader.InvalidCacheLoadException ex) { - //cache can not store null values - logger.log(Level.FINE, "Unable to get correlation type from Central repo returning null as the correlation type", ex); + } catch (CacheLoader.InvalidCacheLoadException ignored) { + //lambda valueloader returned a null value and cache can not store null values this is normal if the correlation type does not exist in the central repo yet return null; } catch (ExecutionException ex) { throw new EamDbException("Error getting correlation type", ex); From 626712c7b0bdf9e1c6bbd4b22592a2a318d47b2e Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Wed, 19 Sep 2018 17:58:01 -0400 Subject: [PATCH 33/33] 4235 add back code to inherit highlighting to icon columns --- .../autopsy/corecomponents/DataResultViewerTable.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerTable.java b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerTable.java index 4b93eba8cc..25709df0df 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerTable.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerTable.java @@ -854,6 +854,8 @@ public class DataResultViewerTable extends AbstractDataResultViewer { "DataResultViewerTable.commentRenderer.noComment.toolTip=No comments found"}) @Override public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) { + Component component = super.getTableCellRendererComponent(table, value, isSelected, hasFocus, row, column); + setBackground(component.getBackground()); //inherit highlighting for selection setHorizontalAlignment(CENTER); Object switchValue = null; if ((value instanceof NodeProperty)) { @@ -908,6 +910,8 @@ public class DataResultViewerTable extends AbstractDataResultViewer { @Override public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) { + Component component = super.getTableCellRendererComponent(table, value, isSelected, hasFocus, row, column); + setBackground(component.getBackground()); //inherit highlighting for selection setHorizontalAlignment(CENTER); Object switchValue = null; if ((value instanceof NodeProperty)) { @@ -955,6 +959,8 @@ public class DataResultViewerTable extends AbstractDataResultViewer { @Override public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) { + Component component = super.getTableCellRendererComponent(table, value, isSelected, hasFocus, row, column); + setBackground(component.getBackground()); //inherit highlighting for selection setHorizontalAlignment(LEFT); Object countValue = null; if ((value instanceof NodeProperty)) {