Merge branch 'develop' of https://github.com/sleuthkit/autopsy into develop

This commit is contained in:
Raman 2018-09-20 11:39:11 -04:00
commit 6cbca516ca
15 changed files with 697 additions and 97 deletions

View File

@ -98,16 +98,22 @@ public final class CasePreferences {
Properties props = new Properties();
props.load(inputStream);
String groupByDataSourceValue = props.getProperty(KEY_GROUP_BY_DATA_SOURCE);
switch (groupByDataSourceValue) {
case VALUE_TRUE:
groupItemsInTreeByDataSource = true;
break;
case VALUE_FALSE:
groupItemsInTreeByDataSource = false;
break;
default:
groupItemsInTreeByDataSource = null;
break;
if (groupByDataSourceValue != null) {
switch (groupByDataSourceValue) {
case VALUE_TRUE:
groupItemsInTreeByDataSource = true;
break;
case VALUE_FALSE:
groupItemsInTreeByDataSource = false;
break;
default:
logger.log(Level.WARNING, String.format("Unexpected value '%s' for key '%s'. Using 'null' instead.",
groupByDataSourceValue, KEY_GROUP_BY_DATA_SOURCE));
groupItemsInTreeByDataSource = null;
break;
}
} else {
groupItemsInTreeByDataSource = null;
}
} catch (IOException ex) {
logger.log(Level.SEVERE, "Error reading settings file", ex);

View File

@ -18,6 +18,9 @@
*/
package org.sleuthkit.autopsy.centralrepository.datamodel;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;
@ -34,6 +37,8 @@ import java.time.LocalDate;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import org.sleuthkit.autopsy.casemodule.Case;
import static org.sleuthkit.autopsy.centralrepository.datamodel.EamDbUtil.updateSchemaVersion;
@ -57,7 +62,21 @@ abstract class AbstractSqlEamDb implements EamDb {
private int bulkArtifactsCount;
protected int bulkArtifactsThreshold;
private final Map<String, Collection<CorrelationAttributeInstance>> bulkArtifacts;
private static final int CASE_CACHE_TIMEOUT = 5;
private static final int DATA_SOURCE_CACHE_TIMEOUT = 5;
private static final Cache<Integer, CorrelationAttributeInstance.Type> typeCache = CacheBuilder.newBuilder().build();
private static final Cache<String, CorrelationCase> caseCacheByUUID = CacheBuilder.newBuilder()
.expireAfterWrite(CASE_CACHE_TIMEOUT, TimeUnit.MINUTES).
build();
private static final Cache<Integer, CorrelationCase> caseCacheById = CacheBuilder.newBuilder()
.expireAfterWrite(CASE_CACHE_TIMEOUT, TimeUnit.MINUTES).
build();
private static final Cache<String, CorrelationDataSource> dataSourceCacheByDeviceId = CacheBuilder.newBuilder()
.expireAfterWrite(DATA_SOURCE_CACHE_TIMEOUT, TimeUnit.MINUTES).
build();
private static final Cache<String, CorrelationDataSource> dataSourceCacheById = CacheBuilder.newBuilder()
.expireAfterWrite(DATA_SOURCE_CACHE_TIMEOUT, TimeUnit.MINUTES).
build();
// Maximum length for the value column in the instance tables
static final int MAX_VALUE_LENGTH = 256;
@ -88,7 +107,7 @@ abstract class AbstractSqlEamDb implements EamDb {
/**
* Add a new name/value pair in the db_info table.
*
* @param name Key to set
* @param name Key to set
* @param value Value to set
*
* @throws EamDbException
@ -149,10 +168,21 @@ abstract class AbstractSqlEamDb implements EamDb {
return value;
}
/**
* Reset the contents of the caches associated with EamDb results.
*/
protected final void clearCaches() {
typeCache.invalidateAll();
caseCacheByUUID.invalidateAll();
caseCacheById.invalidateAll();
dataSourceCacheByDeviceId.invalidateAll();
dataSourceCacheById.invalidateAll();
}
/**
* Update the value for a name in the name/value db_info table.
*
* @param name Name to find
* @param name Name to find
* @param value Value to assign to name.
*
* @throws EamDbException
@ -201,9 +231,9 @@ abstract class AbstractSqlEamDb implements EamDb {
+ "examiner_name, examiner_email, examiner_phone, notes) "
+ "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) "
+ getConflictClause();
ResultSet resultSet = null;
try {
preparedStatement = conn.prepareStatement(sql);
preparedStatement = conn.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS);
preparedStatement.setString(1, eamCase.getCaseUUID());
if (null == eamCase.getOrg()) {
@ -240,9 +270,21 @@ abstract class AbstractSqlEamDb implements EamDb {
}
preparedStatement.executeUpdate();
//update the case in the caches
resultSet = preparedStatement.getGeneratedKeys();
if (!resultSet.next()) {
throw new EamDbException(String.format("Failed to INSERT case %s in central repo", eamCase.getCaseUUID()));
}
int caseID = resultSet.getInt(1); //last_insert_rowid()
CorrelationCase correlationCase = new CorrelationCase(caseID, eamCase.getCaseUUID(), eamCase.getOrg(),
eamCase.getDisplayName(), eamCase.getCreationDate(), eamCase.getCaseNumber(), eamCase.getExaminerName(),
eamCase.getExaminerEmail(), eamCase.getExaminerPhone(), eamCase.getNotes());
caseCacheByUUID.put(eamCase.getCaseUUID(), correlationCase);
caseCacheById.put(caseID, correlationCase);
} catch (SQLException ex) {
throw new EamDbException("Error inserting new case.", ex); // NON-NLS
} finally {
EamDbUtil.closeResultSet(resultSet);
EamDbUtil.closeStatement(preparedStatement);
EamDbUtil.closeConnection(conn);
}
@ -339,6 +381,9 @@ abstract class AbstractSqlEamDb implements EamDb {
preparedStatement.setString(9, eamCase.getCaseUUID());
preparedStatement.executeUpdate();
//update the case in the cache
caseCacheById.put(eamCase.getID(), eamCase);
caseCacheByUUID.put(eamCase.getCaseUUID(), eamCase);
} catch (SQLException ex) {
throw new EamDbException("Error updating case.", ex); // NON-NLS
} finally {
@ -347,6 +392,25 @@ abstract class AbstractSqlEamDb implements EamDb {
}
}
/**
* Retrieves Case details based on Case UUID from the central repo
*
* @param caseUUID unique identifier for a case
*
* @return The retrieved case
*/
@Override
public CorrelationCase getCaseByUUID(String caseUUID) throws EamDbException {
try {
return caseCacheByUUID.get(caseUUID, () -> getCaseByUUIDFromCr(caseUUID));
} catch (CacheLoader.InvalidCacheLoadException ignored) {
//lambda valueloader returned a null value and cache can not store null values this is normal if the case does not exist in the central repo yet
return null;
} catch (ExecutionException ex) {
throw new EamDbException("Error getting autopsy case from Central repo", ex);
}
}
/**
* Retrieves Case details based on Case UUID
*
@ -354,10 +418,7 @@ abstract class AbstractSqlEamDb implements EamDb {
*
* @return The retrieved case
*/
@Override
public CorrelationCase getCaseByUUID(String caseUUID) throws EamDbException {
// @@@ We should have a cache here...
private CorrelationCase getCaseByUUIDFromCr(String caseUUID) throws EamDbException {
Connection conn = connect();
CorrelationCase eamCaseResult = null;
@ -377,6 +438,10 @@ abstract class AbstractSqlEamDb implements EamDb {
if (resultSet.next()) {
eamCaseResult = getEamCaseFromResultSet(resultSet);
}
if (eamCaseResult != null) {
//Update the version in the other cache
caseCacheById.put(eamCaseResult.getID(), eamCaseResult);
}
} catch (SQLException ex) {
throw new EamDbException("Error getting case details.", ex); // NON-NLS
} finally {
@ -397,8 +462,24 @@ abstract class AbstractSqlEamDb implements EamDb {
*/
@Override
public CorrelationCase getCaseById(int caseId) throws EamDbException {
// @@@ We should have a cache here...
try {
return caseCacheById.get(caseId, () -> getCaseByIdFromCr(caseId));
} catch (CacheLoader.InvalidCacheLoadException ignored) {
//lambda valueloader returned a null value and cache can not store null values this is normal if the case does not exist in the central repo yet
return null;
} catch (ExecutionException ex) {
throw new EamDbException("Error getting autopsy case from Central repo", ex);
}
}
/**
* Retrieves Case details based on Case ID
*
* @param caseID unique identifier for a case
*
* @return The retrieved case
*/
private CorrelationCase getCaseByIdFromCr(int caseId) throws EamDbException {
Connection conn = connect();
CorrelationCase eamCaseResult = null;
@ -410,7 +491,6 @@ abstract class AbstractSqlEamDb implements EamDb {
+ "FROM cases "
+ "LEFT JOIN organizations ON cases.org_id=organizations.id "
+ "WHERE cases.id=?";
try {
preparedStatement = conn.prepareStatement(sql);
preparedStatement.setInt(1, caseId);
@ -418,6 +498,10 @@ abstract class AbstractSqlEamDb implements EamDb {
if (resultSet.next()) {
eamCaseResult = getEamCaseFromResultSet(resultSet);
}
if (eamCaseResult != null) {
//Update the version in the other cache
caseCacheByUUID.put(eamCaseResult.getCaseUUID(), eamCaseResult);
}
} catch (SQLException ex) {
throw new EamDbException("Error getting case details.", ex); // NON-NLS
} finally {
@ -466,6 +550,32 @@ abstract class AbstractSqlEamDb implements EamDb {
return cases;
}
/**
* Create a key to the DataSourceCacheByDeviceId
*
* @param caseId - the id of the CorrelationCase in the Central
* Repository
* @param dataSourceDeviceId - the device Id of the data source
*
* @return a String to be used as a key for the dataSourceCacheByDeviceId
*/
private static String getDataSourceByDeviceIdCacheKey(int caseId, String dataSourceDeviceId) {
return "Case" + caseId + "DeviceId" + dataSourceDeviceId; //NON-NLS
}
/**
* Create a key to the DataSourceCacheById
*
* @param caseId - the id of the CorrelationCase in the Central
* Repository
* @param dataSourceId - the id of the datasource in the central repository
*
* @return a String to be used as a key for the dataSourceCacheById
*/
private static String getDataSourceByIdCacheKey(int caseId, int dataSourceId) {
return "Case" + caseId + "Id" + dataSourceId; //NON-NLS
}
/**
* Creates new Data Source in the database
*
@ -485,18 +595,27 @@ abstract class AbstractSqlEamDb implements EamDb {
String sql = "INSERT INTO data_sources(device_id, case_id, name) VALUES (?, ?, ?) "
+ getConflictClause();
ResultSet resultSet = null;
try {
preparedStatement = conn.prepareStatement(sql);
preparedStatement = conn.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS);
preparedStatement.setString(1, eamDataSource.getDeviceID());
preparedStatement.setInt(2, eamDataSource.getCaseID());
preparedStatement.setString(3, eamDataSource.getName());
preparedStatement.executeUpdate();
resultSet = preparedStatement.getGeneratedKeys();
if (!resultSet.next()) {
throw new EamDbException(String.format("Failed to INSERT data source %s in central repo", eamDataSource.getName()));
}
int dataSourceId = resultSet.getInt(1); //last_insert_rowid()
CorrelationDataSource dataSource = new CorrelationDataSource(eamDataSource.getCaseID(), dataSourceId, eamDataSource.getDeviceID(), eamDataSource.getName());
dataSourceCacheByDeviceId.put(getDataSourceByDeviceIdCacheKey(dataSource.getCaseID(), dataSource.getDeviceID()), dataSource);
dataSourceCacheById.put(getDataSourceByIdCacheKey(dataSource.getCaseID(), dataSource.getID()), dataSource);
} catch (SQLException ex) {
throw new EamDbException("Error inserting new data source.", ex); // NON-NLS
} finally {
EamDbUtil.closeResultSet(resultSet);
EamDbUtil.closeStatement(preparedStatement);
EamDbUtil.closeConnection(conn);
}
@ -505,18 +624,43 @@ abstract class AbstractSqlEamDb implements EamDb {
/**
* Retrieves Data Source details based on data source device ID
*
* @param correlationCase the current CorrelationCase used for ensuring
* uniqueness of DataSource
* @param correlationCase the current CorrelationCase used for ensuring
* uniqueness of DataSource
* @param dataSourceDeviceId the data source device ID number
*
* @return The data source
*
* @throws EamDbException
*/
@Override
public CorrelationDataSource getDataSource(CorrelationCase correlationCase, String dataSourceDeviceId) throws EamDbException {
if (correlationCase == null) {
throw new EamDbException("Correlation case is null");
}
try {
return dataSourceCacheByDeviceId.get(getDataSourceByDeviceIdCacheKey(correlationCase.getID(), dataSourceDeviceId), () -> getDataSourceFromCr(correlationCase, dataSourceDeviceId));
} catch (CacheLoader.InvalidCacheLoadException ignored) {
//lambda valueloader returned a null value and cache can not store null values this is normal if the dataSource does not exist in the central repo yet
return null;
} catch (ExecutionException ex) {
throw new EamDbException("Error getting data source from central repository", ex);
}
}
/**
* Gets the Data Source details based on data source device ID from the
* central repository.
*
* @param correlationCase the current CorrelationCase used for ensuring
* uniqueness of DataSource
* @param dataSourceDeviceId the data source device ID number
*
* @return The data source
*
* @throws EamDbException
*/
private CorrelationDataSource getDataSourceFromCr(CorrelationCase correlationCase, String dataSourceDeviceId) throws EamDbException {
Connection conn = connect();
CorrelationDataSource eamDataSourceResult = null;
@ -533,6 +677,9 @@ abstract class AbstractSqlEamDb implements EamDb {
if (resultSet.next()) {
eamDataSourceResult = getEamDataSourceFromResultSet(resultSet);
}
if (eamDataSourceResult != null) {
dataSourceCacheById.put(getDataSourceByIdCacheKey(correlationCase.getID(), eamDataSourceResult.getID()), eamDataSourceResult);
}
} catch (SQLException ex) {
throw new EamDbException("Error getting data source.", ex); // NON-NLS
} finally {
@ -548,8 +695,8 @@ abstract class AbstractSqlEamDb implements EamDb {
* Retrieves Data Source details based on data source ID
*
* @param correlationCase the current CorrelationCase used for ensuring
* uniqueness of DataSource
* @param dataSourceId the data source ID number
* uniqueness of DataSource
* @param dataSourceId the data source ID number
*
* @return The data source
*/
@ -558,7 +705,26 @@ abstract class AbstractSqlEamDb implements EamDb {
if (correlationCase == null) {
throw new EamDbException("Correlation case is null");
}
try {
return dataSourceCacheById.get(getDataSourceByIdCacheKey(correlationCase.getID(), dataSourceId), () -> getDataSourceByIdFromCr(correlationCase, dataSourceId));
} catch (CacheLoader.InvalidCacheLoadException ignored) {
//lambda valueloader returned a null value and cache can not store null values this is normal if the dataSource does not exist in the central repo yet
return null;
} catch (ExecutionException ex) {
throw new EamDbException("Error getting data source from central repository", ex);
}
}
/**
* Retrieves Data Source details based on data source ID
*
* @param correlationCase the current CorrelationCase used for ensuring
* uniqueness of DataSource
* @param dataSourceId the data source ID number
*
* @return The data source
*/
private CorrelationDataSource getDataSourceByIdFromCr(CorrelationCase correlationCase, int dataSourceId) throws EamDbException {
Connection conn = connect();
CorrelationDataSource eamDataSourceResult = null;
@ -575,6 +741,9 @@ abstract class AbstractSqlEamDb implements EamDb {
if (resultSet.next()) {
eamDataSourceResult = getEamDataSourceFromResultSet(resultSet);
}
if (eamDataSourceResult != null) {
dataSourceCacheByDeviceId.put(getDataSourceByDeviceIdCacheKey(correlationCase.getID(), eamDataSourceResult.getDeviceID()), eamDataSourceResult);
}
} catch (SQLException ex) {
throw new EamDbException("Error getting data source.", ex); // NON-NLS
} finally {
@ -715,7 +884,7 @@ abstract class AbstractSqlEamDb implements EamDb {
public List<CorrelationAttributeInstance> getArtifactInstancesByTypeValue(CorrelationAttributeInstance.Type aType, String value) throws EamDbException, CorrelationAttributeNormalizationException {
String normalizedValue = CorrelationAttributeNormalizer.normalize(aType, value);
Connection conn = connect();
List<CorrelationAttributeInstance> artifactInstances = new ArrayList<>();
@ -764,7 +933,7 @@ abstract class AbstractSqlEamDb implements EamDb {
* Retrieves eamArtifact instances from the database that are associated
* with the aType and filePath
*
* @param aType EamArtifact.Type to search for
* @param aType EamArtifact.Type to search for
* @param filePath File path to search for
*
* @return List of 0 or more EamArtifactInstances
@ -835,7 +1004,7 @@ abstract class AbstractSqlEamDb implements EamDb {
* @param value The correlation value
*
* @return Number of artifact instances having ArtifactType and
* ArtifactValue.
* ArtifactValue.
*/
@Override
public Long getCountArtifactInstancesByTypeValue(CorrelationAttributeInstance.Type aType, String value) throws EamDbException, CorrelationAttributeNormalizationException {
@ -957,11 +1126,11 @@ abstract class AbstractSqlEamDb implements EamDb {
* associated with the caseDisplayName and dataSource of the given
* eamArtifact instance.
*
* @param caseUUID Case ID to search for
* @param caseUUID Case ID to search for
* @param dataSourceID Data source ID to search for
*
* @return Number of artifact instances having caseDisplayName and
* dataSource
* dataSource
*/
@Override
public Long getCountArtifactInstancesByCaseDataSource(String caseUUID, String dataSourceID) throws EamDbException {
@ -1224,7 +1393,7 @@ abstract class AbstractSqlEamDb implements EamDb {
* associated CorrelationAttribute object.
*
* @param eamArtifact The correlation attribute whose database instance will
* be updated.
* be updated.
*
* @throws EamDbException
*/
@ -1274,11 +1443,11 @@ abstract class AbstractSqlEamDb implements EamDb {
* Find a correlation attribute in the Central Repository database given the
* instance type, case, data source, value, and file path.
*
* @param type The type of instance.
* @param correlationCase The case tied to the instance.
* @param type The type of instance.
* @param correlationCase The case tied to the instance.
* @param correlationDataSource The data source tied to the instance.
* @param value The value tied to the instance.
* @param filePath The file path tied to the instance.
* @param value The value tied to the instance.
* @param filePath The file path tied to the instance.
*
* @return The correlation attribute if it exists; otherwise null.
*
@ -1287,7 +1456,7 @@ abstract class AbstractSqlEamDb implements EamDb {
@Override
public CorrelationAttributeInstance getCorrelationAttributeInstance(CorrelationAttributeInstance.Type type, CorrelationCase correlationCase,
CorrelationDataSource correlationDataSource, String value, String filePath) throws EamDbException, CorrelationAttributeNormalizationException {
if (correlationCase == null) {
throw new EamDbException("Correlation case is null");
}
@ -1306,7 +1475,7 @@ abstract class AbstractSqlEamDb implements EamDb {
try {
String normalizedValue = CorrelationAttributeNormalizer.normalize(type, value);
String tableName = EamDbUtil.correlationTypeToInstanceTableName(type);
String sql
= "SELECT id, known_status, comment FROM "
@ -1349,7 +1518,7 @@ abstract class AbstractSqlEamDb implements EamDb {
*
* @param eamArtifact Artifact containing exactly one (1) ArtifactInstance.
* @param knownStatus The status to change the artifact to. Should never be
* KNOWN
* KNOWN
*/
@Override
public void setAttributeInstanceKnownStatus(CorrelationAttributeInstance eamArtifact, TskData.FileKnown knownStatus) throws EamDbException {
@ -1548,7 +1717,7 @@ abstract class AbstractSqlEamDb implements EamDb {
artifactInstances.add(artifactInstance);
} catch (CorrelationAttributeNormalizationException ex) {
logger.log(Level.INFO, "Unable to get artifact instance from resultset.", ex);
}
}
}
} catch (SQLException ex) {
throw new EamDbException("Error getting notable artifact instances.", ex); // NON-NLS
@ -1571,7 +1740,7 @@ abstract class AbstractSqlEamDb implements EamDb {
*/
@Override
public Long getCountArtifactInstancesKnownBad(CorrelationAttributeInstance.Type aType, String value) throws EamDbException, CorrelationAttributeNormalizationException {
String normalizedValue = CorrelationAttributeNormalizer.normalize(aType, value);
Connection conn = connect();
@ -1612,13 +1781,13 @@ abstract class AbstractSqlEamDb implements EamDb {
* @param value Value to search for
*
* @return List of cases containing this artifact with instances marked as
* bad
* bad
*
* @throws EamDbException
*/
@Override
public List<String> getListCasesHavingArtifactInstancesKnownBad(CorrelationAttributeInstance.Type aType, String value) throws EamDbException, CorrelationAttributeNormalizationException {
String normalizedValue = CorrelationAttributeNormalizer.normalize(aType, value);
Connection conn = connect();
@ -1778,7 +1947,7 @@ abstract class AbstractSqlEamDb implements EamDb {
public boolean isValueInReferenceSet(String value, int referenceSetID, int correlationTypeID) throws EamDbException, CorrelationAttributeNormalizationException {
String normalizeValued = CorrelationAttributeNormalizer.normalize(this.getCorrelationTypeById(correlationTypeID), value);
Connection conn = connect();
Long matchingInstances = 0L;
@ -1816,10 +1985,10 @@ abstract class AbstractSqlEamDb implements EamDb {
*/
@Override
public boolean isArtifactKnownBadByReference(CorrelationAttributeInstance.Type aType, String value) throws EamDbException, CorrelationAttributeNormalizationException {
//this should be done here so that we can be certain that aType and value are valid before we proceed
String normalizeValued = CorrelationAttributeNormalizer.normalize(aType, value);
// TEMP: Only support file correlation type
if (aType.getId() != CorrelationAttributeInstance.FILES_TYPE_ID) {
return false;
@ -1832,7 +2001,7 @@ abstract class AbstractSqlEamDb implements EamDb {
ResultSet resultSet = null;
String sql = "SELECT count(*) FROM %s WHERE value=? AND known_status=?";
try {
try {
preparedStatement = conn.prepareStatement(String.format(sql, EamDbUtil.correlationTypeToReferenceTableName(aType)));
preparedStatement.setString(1, normalizeValued);
preparedStatement.setByte(2, TskData.FileKnown.BAD.getFileKnownValue());
@ -1853,7 +2022,7 @@ abstract class AbstractSqlEamDb implements EamDb {
/**
* Process the Artifact instance in the EamDb
*
* @param type EamArtifact.Type to search for
* @param type EamArtifact.Type to search for
* @param instanceTableCallback callback to process the instance
*
* @throws EamDbException
@ -1892,9 +2061,10 @@ abstract class AbstractSqlEamDb implements EamDb {
/**
* Process the Artifact instance in the EamDb give a where clause
*
* @param type EamArtifact.Type to search for
* @param type EamArtifact.Type to search for
* @param instanceTableCallback callback to process the instance
* @param whereClause query string to execute
* @param whereClause query string to execute
*
* @throws EamDbException
*/
@Override
@ -2076,7 +2246,7 @@ abstract class AbstractSqlEamDb implements EamDb {
* Update an existing organization.
*
* @param updatedOrganization the values the Organization with the same ID
* will be updated to in the database.
* will be updated to in the database.
*
* @throws EamDbException
*/
@ -2279,7 +2449,8 @@ abstract class AbstractSqlEamDb implements EamDb {
* Add a new reference instance
*
* @param eamGlobalFileInstance The reference instance to add
* @param correlationType Correlation Type that this Reference Instance is
* @param correlationType Correlation Type that this Reference
* Instance is
*
* @throws EamDbException
*/
@ -2407,7 +2578,7 @@ abstract class AbstractSqlEamDb implements EamDb {
/**
* Get all reference entries having a given correlation type and value
*
* @param aType Type to use for matching
* @param aType Type to use for matching
* @param aValue Value to use for matching
*
* @return List of all global file instances with a type and value
@ -2440,7 +2611,7 @@ abstract class AbstractSqlEamDb implements EamDb {
EamDbUtil.closeResultSet(resultSet);
EamDbUtil.closeConnection(conn);
}
return globalFileInstances;
}
@ -2607,7 +2778,7 @@ abstract class AbstractSqlEamDb implements EamDb {
* artifacts.
*
* @return List of enabled EamArtifact.Type's. If none are defined in the
* database, the default list will be returned.
* database, the default list will be returned.
*
* @throws EamDbException
*/
@ -2642,7 +2813,7 @@ abstract class AbstractSqlEamDb implements EamDb {
* correlate artifacts.
*
* @return List of supported EamArtifact.Type's. If none are defined in the
* database, the default list will be returned.
* database, the default list will be returned.
*
* @throws EamDbException
*/
@ -2694,7 +2865,7 @@ abstract class AbstractSqlEamDb implements EamDb {
preparedStatement.setInt(4, aType.isEnabled() ? 1 : 0);
preparedStatement.setInt(5, aType.getId());
preparedStatement.executeUpdate();
typeCache.put(aType.getId(), aType);
} catch (SQLException ex) {
throw new EamDbException("Error updating correlation type.", ex); // NON-NLS
} finally {
@ -2715,6 +2886,26 @@ abstract class AbstractSqlEamDb implements EamDb {
*/
@Override
public CorrelationAttributeInstance.Type getCorrelationTypeById(int typeId) throws EamDbException {
try {
return typeCache.get(CorrelationAttributeInstance.FILES_TYPE_ID, () -> getCorrelationTypeByIdFromCr(typeId));
} catch (CacheLoader.InvalidCacheLoadException ignored) {
//lambda valueloader returned a null value and cache can not store null values this is normal if the correlation type does not exist in the central repo yet
return null;
} catch (ExecutionException ex) {
throw new EamDbException("Error getting correlation type", ex);
}
}
/**
* Get the EamArtifact.Type that has the given Type.Id from the central repo
*
* @param typeId Type.Id of Correlation Type to get
*
* @return EamArtifact.Type or null if it doesn't exist.
*
* @throws EamDbException
*/
private CorrelationAttributeInstance.Type getCorrelationTypeByIdFromCr(int typeId) throws EamDbException {
Connection conn = connect();
CorrelationAttributeInstance.Type aType;
@ -2746,7 +2937,7 @@ abstract class AbstractSqlEamDb implements EamDb {
* Convert a ResultSet to a EamCase object
*
* @param resultSet A resultSet with a set of values to create a EamCase
* object.
* object.
*
* @return fully populated EamCase object, or null
*
@ -2816,7 +3007,7 @@ abstract class AbstractSqlEamDb implements EamDb {
* Convert a ResultSet to a EamArtifactInstance object
*
* @param resultSet A resultSet with a set of values to create a
* EamArtifactInstance object.
* EamArtifactInstance object.
*
* @return fully populated EamArtifactInstance, or null
*

View File

@ -1,7 +1,7 @@
/*
* Central Repository
*
* Copyright 2015-2017 Basis Technology Corp.
* Copyright 2015-2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -78,6 +78,7 @@ final class PostgresEamDb extends AbstractSqlEamDb {
connectionPool.close();
connectionPool = null; // force it to be re-created on next connect()
}
clearCaches();
}
} catch (SQLException ex) {
throw new EamDbException("Failed to close existing database connections.", ex); // NON-NLS

View File

@ -1,7 +1,7 @@
/*
* Central Repository
*
* Copyright 2015-2017 Basis Technology Corp.
* Copyright 2015-2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -85,6 +85,7 @@ final class SqliteEamDb extends AbstractSqlEamDb {
connectionPool.close();
connectionPool = null; // force it to be re-created on next connect()
}
clearCaches();
}
} catch (SQLException ex) {
throw new EamDbException("Failed to close existing database connections.", ex); // NON-NLS

View File

@ -381,7 +381,7 @@ class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer {
Bundle.SQLiteViewer_errorMessage_failedToQueryDatabase());
} catch (FileReaderInitException ex) {
logger.log(Level.SEVERE, String.format(
"Failed to create a SQLiteReader '%s' (objId=%d)", //NON-NLS
"Failed to create a SQLiteReader for file: '%s' (objId=%d)", //NON-NLS
sqliteDbFile.getName(), sqliteDbFile.getId()), ex);
}
}

View File

@ -854,6 +854,8 @@ public class DataResultViewerTable extends AbstractDataResultViewer {
"DataResultViewerTable.commentRenderer.noComment.toolTip=No comments found"})
@Override
public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) {
Component component = super.getTableCellRendererComponent(table, value, isSelected, hasFocus, row, column);
setBackground(component.getBackground()); //inherit highlighting for selection
setHorizontalAlignment(CENTER);
Object switchValue = null;
if ((value instanceof NodeProperty)) {
@ -908,6 +910,8 @@ public class DataResultViewerTable extends AbstractDataResultViewer {
@Override
public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) {
Component component = super.getTableCellRendererComponent(table, value, isSelected, hasFocus, row, column);
setBackground(component.getBackground()); //inherit highlighting for selection
setHorizontalAlignment(CENTER);
Object switchValue = null;
if ((value instanceof NodeProperty)) {
@ -955,6 +959,8 @@ public class DataResultViewerTable extends AbstractDataResultViewer {
@Override
public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) {
Component component = super.getTableCellRendererComponent(table, value, isSelected, hasFocus, row, column);
setBackground(component.getBackground()); //inherit highlighting for selection
setHorizontalAlignment(LEFT);
Object countValue = null;
if ((value instanceof NodeProperty)) {

View File

@ -39,7 +39,7 @@ import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.IngestServices;
import com.monitorjbl.xlsx.StreamingReader;
import org.apache.poi.hssf.OldExcelFormatException;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.AbstractFile;
/**
* Reads excel files and implements the abstract reader api for interfacing with
@ -58,7 +58,7 @@ public final class ExcelReader extends AbstractReader {
private String LOCAL_DISK_PATH;
private String ACTIVE_MIME_TYPE;
public ExcelReader(Content file, String mimeType)
public ExcelReader(AbstractFile file, String mimeType)
throws FileReaderInitException {
super(file);
this.LOCAL_DISK_PATH = super.getLocalDiskPath();

View File

@ -19,7 +19,7 @@
package org.sleuthkit.autopsy.tabulardatareader;
import org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.AbstractFile;
/**
* Factory for creating the correct reader given the mime type of a file.
@ -44,7 +44,7 @@ public final class FileReaderFactory {
* @throws
* org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException
*/
public static AbstractReader createReader(Content file, String mimeType) throws FileReaderInitException {
public static AbstractReader createReader(AbstractFile file, String mimeType) throws FileReaderInitException {
switch (mimeType) {
case "application/x-sqlite3":
return new SQLiteReader(file);

View File

@ -53,28 +53,27 @@ import org.sleuthkit.datamodel.TskCoreException;
public final class SQLiteReader extends AbstractReader {
private final Connection connection;
private final static IngestServices services = IngestServices.getInstance();
private final static Logger logger = services.getLogger(SQLiteReader.class.getName());
private final static IngestServices ingestServices = IngestServices.getInstance();
private final static Logger logger = ingestServices.getLogger(SQLiteReader.class.getName());
/**
* Writes data source file contents to local disk and opens a sqlite JDBC
* connection.
*
* @param sqliteDbFile Data source content
*
* @throws
* org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException
* connection.
*
* @param sqliteDbFile Data source abstract file
* @throws org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException
*/
public SQLiteReader(Content sqliteDbFile) throws FileReaderInitException {
public SQLiteReader(AbstractFile sqliteDbFile) throws FileReaderInitException {
super(sqliteDbFile);
try {
final String localDiskPath = super.getLocalDiskPath();
// Look for any meta files associated with this DB - WAL, SHM, etc.
findAndCopySQLiteMetaFile(sqliteDbFile, sqliteDbFile.getName() + "-wal");
findAndCopySQLiteMetaFile(sqliteDbFile, sqliteDbFile.getName() + "-shm");
connection = getDatabaseConnection(super.getLocalDiskPath());
} catch (ClassNotFoundException | SQLException | IOException
| NoCurrentCaseException | TskCoreException ex) {
connection = getDatabaseConnection(localDiskPath);
} catch (ClassNotFoundException | SQLException |IOException |
NoCurrentCaseException | TskCoreException ex) {
throw new FileReaderInitException(ex);
}
}

View File

@ -21,7 +21,6 @@ package org.sleuthkit.autopsy.commonfilessearch;
import java.nio.file.Path;
import java.sql.SQLException;
import java.util.Map;
import junit.framework.Assert;
import junit.framework.Test;
import org.netbeans.junit.NbModuleSuite;
@ -111,9 +110,8 @@ public class CommonAttributeSearchInterCaseTests extends NbTestCase {
private void assertResultsAreOfType(CorrelationAttributeInstance.Type type) {
try {
Map<Long, String> dataSources = this.utils.getDataSourceMap();
AbstractCommonAttributeSearcher builder = new AllInterCaseCommonAttributeSearcher(dataSources, false, false, type, 0);
AbstractCommonAttributeSearcher builder = new AllInterCaseCommonAttributeSearcher(false, false, type, 0);
CommonAttributeSearchResults metadata = builder.findMatches();
@ -146,22 +144,21 @@ public class CommonAttributeSearchInterCaseTests extends NbTestCase {
*/
public void testTwo() {
try {
Map<Long, String> dataSources = this.utils.getDataSourceMap();
AbstractCommonAttributeSearcher builder;
CommonAttributeSearchResults metadata;
builder = new AllInterCaseCommonAttributeSearcher(dataSources, false, false, this.utils.USB_ID_TYPE, 100);
builder = new AllInterCaseCommonAttributeSearcher(false, false, this.utils.USB_ID_TYPE, 100);
metadata = builder.findMatches();
metadata.size();
//assertTrue("This should yield 13 results.", verifyInstanceCount(metadata, 13));
builder = new AllInterCaseCommonAttributeSearcher(dataSources, false, false, this.utils.USB_ID_TYPE, 20);
builder = new AllInterCaseCommonAttributeSearcher(false, false, this.utils.USB_ID_TYPE, 20);
metadata = builder.findMatches();
metadata.size();
//assertTrue("This should yield no results.", verifyInstanceCount(metadata, 0));
builder = new AllInterCaseCommonAttributeSearcher(dataSources, false, false, this.utils.USB_ID_TYPE, 90);
builder = new AllInterCaseCommonAttributeSearcher(false, false, this.utils.USB_ID_TYPE, 90);
metadata = builder.findMatches();
metadata.size();
//assertTrue("This should yield 2 results.", verifyInstanceCount(metadata, 2));

View File

@ -21,7 +21,6 @@ package org.sleuthkit.autopsy.commonfilessearch;
import java.nio.file.Path;
import java.sql.SQLException;
import java.util.Map;
import junit.framework.Test;
import org.netbeans.junit.NbModuleSuite;
import org.netbeans.junit.NbTestCase;
@ -96,10 +95,8 @@ public class IngestedWithHashAndFileTypeInterCaseTests extends NbTestCase {
*/
public void testOne() {
try {
Map<Long, String> dataSources = this.utils.getDataSourceMap();
//note that the params false and false are presently meaningless because that feature is not supported yet
AbstractCommonAttributeSearcher builder = new AllInterCaseCommonAttributeSearcher(dataSources, false, false, this.utils.FILE_TYPE, 0);
AbstractCommonAttributeSearcher builder = new AllInterCaseCommonAttributeSearcher(false, false, this.utils.FILE_TYPE, 0);
CommonAttributeSearchResults metadata = builder.findMatches();
assertTrue("Results should not be empty", metadata.size() != 0);
@ -146,11 +143,10 @@ public class IngestedWithHashAndFileTypeInterCaseTests extends NbTestCase {
*/
public void testTwo() {
try {
Map<Long, String> dataSources = this.utils.getDataSourceMap();
int matchesMustAlsoBeFoundInThisCase = this.utils.getCaseMap().get(CASE2);
CorrelationAttributeInstance.Type fileType = CorrelationAttributeInstance.getDefaultCorrelationTypes().get(0);
AbstractCommonAttributeSearcher builder = new SingleInterCaseCommonAttributeSearcher(matchesMustAlsoBeFoundInThisCase, dataSources, false, false, fileType, 0);
AbstractCommonAttributeSearcher builder = new SingleInterCaseCommonAttributeSearcher(matchesMustAlsoBeFoundInThisCase, false, false, fileType, 0);
CommonAttributeSearchResults metadata = builder.findMatches();
@ -199,11 +195,10 @@ public class IngestedWithHashAndFileTypeInterCaseTests extends NbTestCase {
*/
public void testThree(){
try {
Map<Long, String> dataSources = this.utils.getDataSourceMap();
//note that the params false and false are presently meaningless because that feature is not supported yet
CorrelationAttributeInstance.Type fileType = CorrelationAttributeInstance.getDefaultCorrelationTypes().get(0);
AbstractCommonAttributeSearcher builder = new AllInterCaseCommonAttributeSearcher(dataSources, false, false, fileType, 50);
AbstractCommonAttributeSearcher builder = new AllInterCaseCommonAttributeSearcher(false, false, fileType, 50);
CommonAttributeSearchResults metadata = builder.findMatches();

View File

@ -30,7 +30,7 @@ import org.sleuthkit.datamodel.Content;
abstract class ContentTextExtractor implements TextExtractor<Content> {
static final List<String> BLOB_MIME_TYPES
static final List<String> BINARY_MIME_TYPES
= Arrays.asList(
//ignore binary blob data, for which string extraction will be used
"application/octet-stream", //NON-NLS

View File

@ -249,6 +249,9 @@ public final class KeywordSearchIngestModule implements FileIngestModule {
textExtractors = new ArrayList<>();
//order matters, more specific extractors first
textExtractors.add(new HtmlTextExtractor());
//Add sqlite text extractor to be default for sqlite files, since tika stuggles
//with them. See SqliteTextExtractor class for specifics
textExtractors.add(new SqliteTextExtractor());
textExtractors.add(new TikaTextExtractor());
indexer = new Indexer();

View File

@ -0,0 +1,399 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2018-2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.keywordsearch;
import com.google.common.io.CharSource;
import java.io.IOException;
import java.io.Reader;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.logging.Level;
import javax.swing.text.Segment;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.tabulardatareader.AbstractReader;
import org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderInitException;
import org.sleuthkit.datamodel.Content;
import org.apache.commons.lang3.StringUtils;
import org.sleuthkit.autopsy.tabulardatareader.AbstractReader.FileReaderException;
import org.sleuthkit.autopsy.tabulardatareader.FileReaderFactory;
import org.sleuthkit.datamodel.AbstractFile;
/**
* Dedicated SqliteTextExtractor to solve the problems associated with Tika's
* Sqlite parser.
*
* Tika problems:
* 1) Tika fails to open virtual tables
* 2) Tika fails to open tables with spaces in table name
* 3) Tika fails to include the table names in output (except for the first table it parses)
*/
class SqliteTextExtractor extends ContentTextExtractor {
private static final String SQLITE_MIMETYPE = "application/x-sqlite3";
private static final Logger logger = Logger.getLogger(SqliteTextExtractor.class.getName());
private static final CharSequence EMPTY_CHARACTER_SEQUENCE = "";
@Override
boolean isContentTypeSpecific() {
return true;
}
@Override
public boolean isDisabled() {
return false;
}
@Override
public void logWarning(String msg, Exception exception) {
logger.log(Level.WARNING, msg, exception); //NON-NLS
}
/**
* Supports only the sqlite mimetypes
*
* @param file Content file
* @param detectedFormat Mimetype of content file
*
* @return true if x-sqlite3
*/
@Override
boolean isSupported(Content file, String detectedFormat) {
return SQLITE_MIMETYPE.equals(detectedFormat);
}
/**
* Returns an input stream that will read from a sqlite database.
*
* @param source Content file
*
* @return An InputStream that reads from a Sqlite database.
*
* @throws
* org.sleuthkit.autopsy.keywordsearch.TextExtractor.TextExtractorException
*/
@Override
public Reader getReader(Content source) throws TextExtractorException {
//Firewall for any content that is not an AbstractFile
if (!AbstractFile.class.isInstance(source)) {
try {
return CharSource.wrap(EMPTY_CHARACTER_SEQUENCE).openStream();
} catch (IOException ex) {
throw new TextExtractorException(
String.format("Encountered an issue wrapping blank string" //NON-NLS
+ " with CharSource for non-abstract file with id: [%s]," //NON-NLS
+ " name: [%s].", source.getId(), source.getName()), ex); //NON-NLS
}
}
try (AbstractReader reader = FileReaderFactory.createReader(
(AbstractFile) source, SQLITE_MIMETYPE)) {
final CharSequence databaseContent = getDatabaseContents(source, reader);
//CharSource will maintain unicode strings correctly
return CharSource.wrap(databaseContent).openStream();
} catch (FileReaderInitException | IOException ex) {
throw new TextExtractorException(
String.format("Encountered a FileReaderInitException" //NON-NLS
+ " when trying to initialize a SQLiteReader" //NON-NLS
+ " for AbstractFile with id: [%s], name: [%s].", //NON-NLS
source.getId(), source.getName()), ex);
} catch (FileReaderException ex) {
throw new TextExtractorException(
String.format("Could not get contents from database " //NON-NLS
+ "tables for AbstractFile with id [%s], name: [%s].", //NON-NLS
source.getId(), source.getName()), ex);
}
}
/**
* Queries the sqlite database and adds all tables and rows to a
* TableBuilder, which formats the strings into a table view for clean
* results while searching for keywords in the application.
*
* @param reader Sqlite reader for the content source
* @param source Sqlite file source
*/
private CharSequence getDatabaseContents(Content source, AbstractReader reader) throws FileReaderException {
Collection<String> databaseStorage = new LinkedList<>();
Integer charactersCopied = loadDatabaseIntoCollection(databaseStorage,
reader, source);
return toCharSequence(databaseStorage, charactersCopied);
}
/**
* Iterates all of the tables and populate the TableBuilder with all of the
* rows from the table. The table string will be added to the list of
* contents.
*
* @param databaseStorage Collection containing all of the database content
* @param tables A map of table names to table schemas
* @param reader SqliteReader for interfacing with the database
* @param source Source database file for logging
*/
private int loadDatabaseIntoCollection(Collection<String> databaseStorage,
AbstractReader reader, Content source) throws FileReaderException {
//Will throw a FileReaderException if table schemas are unattainable
Map<String, String> tables = reader.getTableSchemas();
int charactersCopied = 0;
for (String tableName : tables.keySet()) {
TableBuilder tableBuilder = new TableBuilder();
tableBuilder.setTableName(tableName);
try {
//Catch any exception at a particular table, we want to ensure we grab
//content from as many tables as possible
List<Map<String, Object>> rowsInTable = reader.getRowsFromTable(tableName);
if (!rowsInTable.isEmpty()) {
tableBuilder.addHeader(new ArrayList<>(rowsInTable.get(0).keySet()));
for (Map<String, Object> row : rowsInTable) {
tableBuilder.addRow(row.values());
}
}
} catch (FileReaderException ex) {
logger.log(Level.WARNING, String.format(
"Error attempting to read file table: [%s]" //NON-NLS
+ " for file: [%s] (id=%d).", tableName, //NON-NLS
source.getName(), source.getId()), ex);
}
String formattedTable = tableBuilder.toString();
charactersCopied += formattedTable.length();
databaseStorage.add(formattedTable);
}
return charactersCopied;
}
/**
* Copy elements from collection (which contains formatted database tables)
* into a CharSequence so that it can be wrapped and used by the Google CharSource
* lib.
*
* @param databaseStorage Collection containing database contents
* @param characterCount Number of characters needed to be allocated in the buffer
* so that all of the contents in the collection can be copied over.
*
* @return CharSource of the formatted database contents
*/
private CharSequence toCharSequence(Collection<String> databaseStorage,
int characterCount) {
final char[] databaseCharArray = new char[characterCount];
int currIndex = 0;
for (String table : databaseStorage) {
System.arraycopy(table.toCharArray(), 0, databaseCharArray,
currIndex, table.length());
currIndex += table.length();
}
//Segment class does not make an internal copy of the character array
//being passed in (more efficient). It also implements a CharSequences
//necessary for the CharSource class to create a compatible reader.
return new Segment(databaseCharArray, 0, characterCount);
}
/**
* Formats input so that it reads as a table in the console or in a text
* viewer
*/
private class TableBuilder {
private final List<String[]> rows = new LinkedList<>();
private Integer charactersAdded = 0;
//Formatters
private static final String HORIZONTAL_DELIMITER = "-";
private static final String VERTICAL_DELIMITER = "|";
private static final String HEADER_CORNER = "+";
private static final String TAB = "\t";
private static final String NEW_LINE = "\n";
private static final String SPACE = " ";
//Number of escape sequences in the header row
private static final int ESCAPE_SEQUENCES = 4;
private String tableName = "";
/**
* Add the section to the top left corner of the table. This is where
* the name of the table should go.
*
* @param tableName Table name
*/
public void setTableName(String tableName) {
this.tableName = tableName + NEW_LINE + NEW_LINE;
}
/**
* Creates a border given the length param.
*
* @return Ex: \t+----------------------+\n
*/
private String createBorder(int length) {
return TAB + HEADER_CORNER + StringUtils.repeat(
HORIZONTAL_DELIMITER, length) + HEADER_CORNER + NEW_LINE;
}
/**
* Add header row to underlying list collection, which will be formatted
* when toString is called.
*
* @param vals
*/
public void addHeader(Collection<Object> vals) {
addRow(vals);
}
/**
* Add a row to the underlying list collection, which will be formatted
* when toString is called.
*
* @param vals
*/
public void addRow(Collection<Object> vals) {
List<String> rowValues = new ArrayList<>();
vals.forEach((val) -> {
rowValues.add(val.toString());
charactersAdded += val.toString().length();
});
rows.add(rowValues.toArray(
new String[rowValues.size()]));
}
/**
* Gets the max width of a cell in each column and the max number of
* columns in any given row. This ensures that there are enough columns
* and enough space for even the longest entry.
*
* @return array of column widths
*/
private int[] getMaxWidthPerColumn() {
int maxNumberOfColumns = 0;
for (String[] row : rows) {
maxNumberOfColumns = Math.max(
maxNumberOfColumns, row.length);
}
int[] widths = new int[maxNumberOfColumns];
for (String[] row : rows) {
for (int colNum = 0; colNum < row.length; colNum++) {
widths[colNum] = Math.max(
widths[colNum],
row[colNum].length()
);
}
}
return widths;
}
/**
* Returns a string version of the table, with all of the formatters and
* escape sequences necessary to print nicely in the console output.
*
* @return
*/
@Override
public String toString() {
StringBuilder outputTable = new StringBuilder(charactersAdded);
int[] colMaxWidths = getMaxWidthPerColumn();
int borderLength = 0;
Iterator<String[]> rowIterator = rows.iterator();
if (rowIterator.hasNext()) {
//Length of the header defines the table boundaries
borderLength = appendFormattedHeader(rowIterator.next(),
colMaxWidths, outputTable);
while (rowIterator.hasNext()) {
appendFormattedRow(rowIterator.next(), colMaxWidths, outputTable);
}
outputTable.insert(0, tableName);
outputTable.append(createBorder(borderLength));
outputTable.append(NEW_LINE);
}
return outputTable.toString();
}
/**
* Outputs a fully formatted row in the table
*
* Example: \t| John | 12345678 | john@email.com |\n
*
* @param row Array containing unformatted row content
* @param colMaxWidths An array of column maximum widths, so that
* everything is pretty printed.
* @param outputTable Buffer that formatted contents are written to
*/
private void appendFormattedRow(String[] row,
int[] colMaxWidths, StringBuilder outputTable) {
outputTable.append(TAB);
for (int colNum = 0; colNum < row.length; colNum++) {
outputTable.append(VERTICAL_DELIMITER);
outputTable.append(SPACE);
outputTable.append(StringUtils.rightPad(
StringUtils.defaultString(row[colNum]),
colMaxWidths[colNum]));
outputTable.append(SPACE);
}
outputTable.append(VERTICAL_DELIMITER);
outputTable.append(NEW_LINE);
}
/**
* Adds a fully formatted header to the table builder and returns the
* length of this header. The length of the header is needed to set the
* table boundaries
*
* Example: \t+----------------------+\n
* \t| Email | Phone | Name |\n
* \t+----------------------+\n
*
* @param row Array of contents in each column
* @param colMaxWidths Widths for each column in the table
* @param outputTable Output stringbuilder
*
* @return length of the formatted header, this length will be needed to
* correctly print the bottom table border.
*/
private int appendFormattedHeader(String[] row, int[] colMaxWidths, StringBuilder outputTable) {
appendFormattedRow(row, colMaxWidths, outputTable);
//Printable table dimensions are equal to the length of the header minus
//the number of escape sequences used to for formatting.
int borderLength = outputTable.length() - ESCAPE_SEQUENCES;
String border = createBorder(borderLength);
//Surround the header with borders above and below.
outputTable.insert(0, border);
outputTable.append(border);
return borderLength;
}
}
}

View File

@ -57,6 +57,7 @@ class TikaTextExtractor extends ContentTextExtractor {
static final private Logger logger = Logger.getLogger(TikaTextExtractor.class.getName());
private final ExecutorService tikaParseExecutor = Executors.newSingleThreadExecutor();
private static final String SQLITE_MIMETYPE = "application/x-sqlite3";
private final AutoDetectParser parser = new AutoDetectParser();
@ -194,9 +195,10 @@ class TikaTextExtractor extends ContentTextExtractor {
@Override
public boolean isSupported(Content content, String detectedFormat) {
if (detectedFormat == null
|| ContentTextExtractor.BLOB_MIME_TYPES.contains(detectedFormat) //any binary unstructured blobs (string extraction will be used)
|| ContentTextExtractor.BINARY_MIME_TYPES.contains(detectedFormat) //any binary unstructured blobs (string extraction will be used)
|| ContentTextExtractor.ARCHIVE_MIME_TYPES.contains(detectedFormat)
|| (detectedFormat.startsWith("video/") && !detectedFormat.equals("video/x-flv")) //skip video other than flv (tika supports flv only) //NON-NLS
|| detectedFormat.equals(SQLITE_MIMETYPE) //Skip sqlite files, Tika cannot handle virtual tables and will fail with an exception. //NON-NLS
) {
return false;
}