diff --git a/Core/src/org/sleuthkit/autopsy/actions/DeleteFileBlackboardArtifactTagAction.java b/Core/src/org/sleuthkit/autopsy/actions/DeleteFileBlackboardArtifactTagAction.java index 86696b0c63..dd2108c1fc 100644 --- a/Core/src/org/sleuthkit/autopsy/actions/DeleteFileBlackboardArtifactTagAction.java +++ b/Core/src/org/sleuthkit/autopsy/actions/DeleteFileBlackboardArtifactTagAction.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2017 Basis Technology Corp. + * Copyright 2017-2018 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -38,7 +38,6 @@ import org.openide.util.actions.Presenter; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.services.TagsManager; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifactTag; import org.sleuthkit.datamodel.TagName; @@ -54,7 +53,7 @@ import org.sleuthkit.datamodel.TskData; }) public class DeleteFileBlackboardArtifactTagAction extends AbstractAction implements Presenter.Popup { - private static final Logger LOGGER = Logger.getLogger(DeleteFileBlackboardArtifactTagAction.class.getName()); + private static final Logger logger = Logger.getLogger(DeleteFileBlackboardArtifactTagAction.class.getName()); private static final long serialVersionUID = 1L; private static final String MENU_TEXT = NbBundle.getMessage(DeleteFileBlackboardArtifactTagAction.class, @@ -98,17 +97,11 @@ public class DeleteFileBlackboardArtifactTagAction extends AbstractAction implem protected Void doInBackground() throws Exception { TagsManager tagsManager = Case.getCurrentCase().getServices().getTagsManager(); - // Pull the from the global context to avoid unnecessary calls - // to the database. - final Collection selectedFilesList - = new HashSet<>(Utilities.actionsGlobalContext().lookupAll(AbstractFile.class)); - AbstractFile file = selectedFilesList.iterator().next(); - try { - LOGGER.log(Level.INFO, "Removing tag {0} from {1}", new Object[]{tagName.getDisplayName(), file.getName()}); //NON-NLS + logger.log(Level.INFO, "Removing tag {0} from {1}", new Object[]{tagName.getDisplayName(), artifactTag.getContent().getName()}); //NON-NLS tagsManager.deleteBlackboardArtifactTag(artifactTag); } catch (TskCoreException tskCoreException) { - LOGGER.log(Level.SEVERE, "Error untagging artifact", tskCoreException); //NON-NLS + logger.log(Level.SEVERE, "Error untagging artifact", tskCoreException); //NON-NLS Platform.runLater(() -> new Alert(Alert.AlertType.ERROR, Bundle.DeleteFileBlackboardArtifactTagAction_deleteTag_alert(artifactId)).show() ); @@ -122,7 +115,7 @@ public class DeleteFileBlackboardArtifactTagAction extends AbstractAction implem try { get(); } catch (InterruptedException | ExecutionException ex) { - LOGGER.log(Level.SEVERE, "Unexpected exception while untagging artifact", ex); //NON-NLS + logger.log(Level.SEVERE, "Unexpected exception while untagging artifact", ex); //NON-NLS } } }.execute(); diff --git a/Core/src/org/sleuthkit/autopsy/actions/DeleteFileContentTagAction.java b/Core/src/org/sleuthkit/autopsy/actions/DeleteFileContentTagAction.java index 5c8d4abb74..c11ffd39fa 100644 --- a/Core/src/org/sleuthkit/autopsy/actions/DeleteFileContentTagAction.java +++ b/Core/src/org/sleuthkit/autopsy/actions/DeleteFileContentTagAction.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2017 Basis Technology Corp. + * Copyright 2017-2018 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -52,7 +52,7 @@ import org.sleuthkit.datamodel.TskData; }) public class DeleteFileContentTagAction extends AbstractAction implements Presenter.Popup { - private static final Logger LOGGER = Logger.getLogger(DeleteFileContentTagAction.class.getName()); + private static final Logger logger = Logger.getLogger(DeleteFileContentTagAction.class.getName()); private static final long serialVersionUID = 1L; private static final String MENU_TEXT = NbBundle.getMessage(DeleteFileContentTagAction.class, @@ -97,17 +97,11 @@ public class DeleteFileContentTagAction extends AbstractAction implements Presen protected Void doInBackground() throws Exception { TagsManager tagsManager = Case.getCurrentCase().getServices().getTagsManager(); - // Pull the from the global context to avoid unnecessary calls - // to the database. - final Collection selectedFilesList = - new HashSet<>(Utilities.actionsGlobalContext().lookupAll(AbstractFile.class)); - AbstractFile file = selectedFilesList.iterator().next(); - try { - LOGGER.log(Level.INFO, "Removing tag {0} from {1}", new Object[]{tagName.getDisplayName(), file.getName()}); //NON-NLS + logger.log(Level.INFO, "Removing tag {0} from {1}", new Object[]{tagName.getDisplayName(), contentTag.getContent().getName()}); //NON-NLS tagsManager.deleteContentTag(contentTag); } catch (TskCoreException tskCoreException) { - LOGGER.log(Level.SEVERE, "Error untagging file", tskCoreException); //NON-NLS + logger.log(Level.SEVERE, "Error untagging file", tskCoreException); //NON-NLS Platform.runLater(() -> new Alert(Alert.AlertType.ERROR, Bundle.DeleteFileContentTagAction_deleteTag_alert(fileId)).show() ); @@ -121,7 +115,7 @@ public class DeleteFileContentTagAction extends AbstractAction implements Presen try { get(); } catch (InterruptedException | ExecutionException ex) { - LOGGER.log(Level.SEVERE, "Unexpected exception while untagging file", ex); //NON-NLS + logger.log(Level.SEVERE, "Unexpected exception while untagging file", ex); //NON-NLS } } }.execute(); diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.java b/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.java index ad642a9cbc..c098112efa 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.java @@ -489,12 +489,16 @@ public class DataContentViewerOtherCases extends javax.swing.JPanel implements D corAttrInstances.addAll(getCorrelatedInstances(corAttr, dataSourceName, deviceId)); corAttrInstances.forEach((corAttrInstance) -> { - CorrelationAttribute newCeArtifact = new CorrelationAttribute( - corAttr.getCorrelationType(), - corAttr.getCorrelationValue() - ); - newCeArtifact.addInstance(corAttrInstance); - tableModel.addEamArtifact(newCeArtifact); + try { + CorrelationAttribute newCeArtifact = new CorrelationAttribute( + corAttr.getCorrelationType(), + corAttr.getCorrelationValue() + ); + newCeArtifact.addInstance(corAttrInstance); + tableModel.addEamArtifact(newCeArtifact); + } catch (EamDbException ex){ + LOGGER.log(Level.SEVERE, "Error creating correlation attribute", ex); + } }); } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java index 87478529f7..18af7f6c81 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java @@ -271,6 +271,10 @@ public abstract class AbstractSqlEamDb implements EamDb { */ @Override public void updateCase(CorrelationCase eamCase) throws EamDbException { + if(eamCase == null) { + throw new EamDbException("CorrelationCase argument is null"); + } + Connection conn = connect(); PreparedStatement preparedStatement = null; @@ -444,6 +448,10 @@ public abstract class AbstractSqlEamDb implements EamDb { */ @Override public CorrelationDataSource getDataSource(CorrelationCase correlationCase, String dataSourceDeviceId) throws EamDbException { + if(correlationCase == null) { + throw new EamDbException("CorrelationCase argument is null"); + } + Connection conn = connect(); CorrelationDataSource eamDataSourceResult = null; @@ -513,6 +521,16 @@ public abstract class AbstractSqlEamDb implements EamDb { */ @Override public void addArtifact(CorrelationAttribute eamArtifact) throws EamDbException { + if(eamArtifact == null) { + throw new EamDbException("CorrelationAttribute is null"); + } + if(eamArtifact.getCorrelationType() == null) { + throw new EamDbException("Correlation type is null"); + } + if(eamArtifact.getCorrelationValue() == null) { + throw new EamDbException("Correlation value is null"); + } + Connection conn = connect(); List eamInstances = eamArtifact.getInstances(); @@ -526,11 +544,21 @@ public abstract class AbstractSqlEamDb implements EamDb { sql.append("(case_id, data_source_id, value, file_path, known_status, comment) "); sql.append("VALUES ((SELECT id FROM cases WHERE case_uid=? LIMIT 1), "); sql.append("(SELECT id FROM data_sources WHERE device_id=? AND case_id=? LIMIT 1), ?, ?, ?, ?)"); - + try { preparedStatement = conn.prepareStatement(sql.toString()); for (CorrelationAttributeInstance eamInstance : eamInstances) { if (!eamArtifact.getCorrelationValue().isEmpty()) { + if(eamInstance.getCorrelationCase() == null) { + throw new EamDbException("CorrelationAttributeInstance has null case"); + } + if(eamInstance.getCorrelationDataSource() == null) { + throw new EamDbException("CorrelationAttributeInstance has null data source"); + } + if(eamInstance.getKnownStatus() == null) { + throw new EamDbException("CorrelationAttributeInstance has null known status"); + } + preparedStatement.setString(1, eamInstance.getCorrelationCase().getCaseUUID()); preparedStatement.setString(2, eamInstance.getCorrelationDataSource().getDeviceID()); preparedStatement.setInt(3, eamInstance.getCorrelationDataSource().getCaseID()); @@ -567,6 +595,9 @@ public abstract class AbstractSqlEamDb implements EamDb { */ @Override public List getArtifactInstancesByTypeValue(CorrelationAttribute.Type aType, String value) throws EamDbException { + if(aType == null) { + throw new EamDbException("Correlation type is null"); + } Connection conn = connect(); List artifactInstances = new ArrayList<>(); @@ -619,6 +650,12 @@ public abstract class AbstractSqlEamDb implements EamDb { */ @Override public List getArtifactInstancesByPath(CorrelationAttribute.Type aType, String filePath) throws EamDbException { + if(aType == null) { + throw new EamDbException("Correlation type is null"); + } + if(filePath == null) { + throw new EamDbException("Correlation value is null"); + } Connection conn = connect(); List artifactInstances = new ArrayList<>(); @@ -641,7 +678,7 @@ public abstract class AbstractSqlEamDb implements EamDb { try { preparedStatement = conn.prepareStatement(sql.toString()); - preparedStatement.setString(1, filePath); + preparedStatement.setString(1, filePath.toLowerCase()); resultSet = preparedStatement.executeQuery(); while (resultSet.next()) { artifactInstance = getEamArtifactInstanceFromResultSet(resultSet); @@ -670,6 +707,13 @@ public abstract class AbstractSqlEamDb implements EamDb { */ @Override public Long getCountArtifactInstancesByTypeValue(CorrelationAttribute.Type aType, String value) throws EamDbException { + if(aType == null) { + throw new EamDbException("Correlation type is null"); + } + if(value == null) { + throw new EamDbException("Correlation value is null"); + } + Connection conn = connect(); Long instanceCount = 0L; @@ -684,7 +728,7 @@ public abstract class AbstractSqlEamDb implements EamDb { try { preparedStatement = conn.prepareStatement(sql.toString()); - preparedStatement.setString(1, value); + preparedStatement.setString(1, value.toLowerCase()); resultSet = preparedStatement.executeQuery(); resultSet.next(); instanceCount = resultSet.getLong(1); @@ -701,6 +745,9 @@ public abstract class AbstractSqlEamDb implements EamDb { @Override public int getFrequencyPercentage(CorrelationAttribute corAttr) throws EamDbException { + if (corAttr == null) { + throw new EamDbException("Correlation attribute is null"); + } Double uniqueTypeValueTuples = getCountUniqueCaseDataSourceTuplesHavingTypeValue(corAttr.getCorrelationType(), corAttr.getCorrelationValue()).doubleValue(); Double uniqueCaseDataSourceTuples = getCountUniqueDataSources().doubleValue(); Double commonalityPercentage = uniqueTypeValueTuples / uniqueCaseDataSourceTuples * 100; @@ -719,6 +766,10 @@ public abstract class AbstractSqlEamDb implements EamDb { */ @Override public Long getCountUniqueCaseDataSourceTuplesHavingTypeValue(CorrelationAttribute.Type aType, String value) throws EamDbException { + if(aType == null) { + throw new EamDbException("Correlation type is null"); + } + Connection conn = connect(); Long instanceCount = 0L; @@ -840,6 +891,10 @@ public abstract class AbstractSqlEamDb implements EamDb { @Override public void prepareBulkArtifact(CorrelationAttribute eamArtifact) throws EamDbException { + if(eamArtifact.getCorrelationType() == null) { + throw new EamDbException("Correlation type is null"); + } + synchronized (bulkArtifacts) { bulkArtifacts.get(eamArtifact.getCorrelationType().getDbTableName()).add(eamArtifact); bulkArtifactsCount++; @@ -893,6 +948,17 @@ public abstract class AbstractSqlEamDb implements EamDb { for (CorrelationAttributeInstance eamInstance : eamInstances) { if (!eamArtifact.getCorrelationValue().isEmpty()) { + + if(eamInstance.getCorrelationCase() == null) { + throw new EamDbException("Correlation attribute instance has null case"); + } + if(eamInstance.getCorrelationDataSource() == null) { + throw new EamDbException("Correlation attribute instance has null data source"); + } + if(eamInstance.getKnownStatus()== null) { + throw new EamDbException("Correlation attribute instance has null known known status"); + } + bulkPs.setString(1, eamInstance.getCorrelationCase().getCaseUUID()); bulkPs.setString(2, eamInstance.getCorrelationDataSource().getDeviceID()); bulkPs.setInt(3, eamInstance.getCorrelationDataSource().getCaseID()); @@ -929,12 +995,16 @@ public abstract class AbstractSqlEamDb implements EamDb { */ @Override public void bulkInsertCases(List cases) throws EamDbException { - Connection conn = connect(); - + if(cases == null) { + throw new EamDbException("cases argument is null"); + } + if (cases.isEmpty()) { return; } + Connection conn = connect(); + int counter = 0; PreparedStatement bulkPs = null; try { @@ -1012,15 +1082,28 @@ public abstract class AbstractSqlEamDb implements EamDb { */ @Override public void setArtifactInstanceKnownStatus(CorrelationAttribute eamArtifact, TskData.FileKnown knownStatus) throws EamDbException { - Connection conn = connect(); - + if(eamArtifact == null) { + throw new EamDbException("Correlation attribute is null"); + } + if(knownStatus == null) { + throw new EamDbException("Known status is null"); + } if (1 != eamArtifact.getInstances().size()) { throw new EamDbException("Error: Artifact must have exactly one (1) Artifact Instance to set as notable."); // NON-NLS } - + List eamInstances = eamArtifact.getInstances(); CorrelationAttributeInstance eamInstance = eamInstances.get(0); + if(eamInstance.getCorrelationCase() == null) { + throw new EamDbException("Correlation case is null"); + } + if(eamInstance.getCorrelationDataSource() == null) { + throw new EamDbException("Correlation data source is null"); + } + + Connection conn = connect(); + PreparedStatement preparedUpdate = null; PreparedStatement preparedQuery = null; ResultSet resultSet = null; @@ -1103,6 +1186,10 @@ public abstract class AbstractSqlEamDb implements EamDb { */ @Override public List getArtifactInstancesKnownBad(CorrelationAttribute.Type aType, String value) throws EamDbException { + if(aType == null) { + throw new EamDbException("Correlation type is null"); + } + Connection conn = connect(); List artifactInstances = new ArrayList<>(); @@ -1153,6 +1240,10 @@ public abstract class AbstractSqlEamDb implements EamDb { */ @Override public Long getCountArtifactInstancesKnownBad(CorrelationAttribute.Type aType, String value) throws EamDbException { + if(aType == null) { + throw new EamDbException("Correlation type is null"); + } + Connection conn = connect(); Long badInstances = 0L; @@ -1197,6 +1288,10 @@ public abstract class AbstractSqlEamDb implements EamDb { */ @Override public List getListCasesHavingArtifactInstancesKnownBad(CorrelationAttribute.Type aType, String value) throws EamDbException { + if(aType == null) { + throw new EamDbException("Correlation type is null"); + } + Connection conn = connect(); Collection caseNames = new LinkedHashSet<>(); @@ -1313,7 +1408,7 @@ public abstract class AbstractSqlEamDb implements EamDb { @Override public boolean referenceSetIsValid(int referenceSetID, String setName, String version) throws EamDbException { EamGlobalSet refSet = this.getReferenceSetByID(referenceSetID); - if (refSet == null) { + if(refSet == null) { return false; } @@ -1382,6 +1477,9 @@ public abstract class AbstractSqlEamDb implements EamDb { */ @Override public boolean isArtifactKnownBadByReference(CorrelationAttribute.Type aType, String value) throws EamDbException { + if(aType == null) { + throw new EamDbException("null correlation type"); + } // TEMP: Only support file correlation type if (aType.getId() != CorrelationAttribute.FILES_TYPE_ID) { @@ -1424,6 +1522,10 @@ public abstract class AbstractSqlEamDb implements EamDb { */ @Override public long newOrganization(EamOrganization eamOrg) throws EamDbException { + if(eamOrg == null) { + throw new EamDbException("EamOrganization is null"); + } + Connection conn = connect(); ResultSet generatedKeys = null; PreparedStatement preparedStatement = null; @@ -1529,6 +1631,9 @@ public abstract class AbstractSqlEamDb implements EamDb { public EamOrganization getReferenceSetOrganization(int referenceSetID) throws EamDbException { EamGlobalSet globalSet = getReferenceSetByID(referenceSetID); + if(globalSet == null) { + throw new EamDbException("Reference set with ID " + referenceSetID + " not found"); + } return (getOrganizationByID(globalSet.getOrgID())); } @@ -1542,6 +1647,10 @@ public abstract class AbstractSqlEamDb implements EamDb { */ @Override public void updateOrganization(EamOrganization updatedOrganization) throws EamDbException { + if(updatedOrganization == null) { + throw new EamDbException("null updatedOrganization"); + } + Connection conn = connect(); PreparedStatement preparedStatement = null; String sql = "UPDATE organizations SET org_name = ?, poc_name = ?, poc_email = ?, poc_phone = ? WHERE id = ?"; @@ -1566,6 +1675,10 @@ public abstract class AbstractSqlEamDb implements EamDb { "AbstractSqlEamDb.deleteOrganization.errorDeleting.message=Error executing query when attempting to delete organization by id."}) @Override public void deleteOrganization(EamOrganization organizationToDelete) throws EamDbException { + if(organizationToDelete == null) { + throw new EamDbException("Organization to delete is null"); + } + Connection conn = connect(); PreparedStatement checkIfUsedStatement = null; ResultSet resultSet = null; @@ -1605,6 +1718,18 @@ public abstract class AbstractSqlEamDb implements EamDb { */ @Override public int newReferenceSet(EamGlobalSet eamGlobalSet) throws EamDbException { + if(eamGlobalSet == null){ + throw new EamDbException("EamGlobalSet argument is null"); + } + + if(eamGlobalSet.getFileKnownStatus() == null){ + throw new EamDbException("File known status on the EamGlobalSet is null"); + } + + if(eamGlobalSet.getType() == null){ + throw new EamDbException("Type on the EamGlobalSet is null"); + } + Connection conn = connect(); PreparedStatement preparedStatement1 = null; @@ -1666,8 +1791,11 @@ public abstract class AbstractSqlEamDb implements EamDb { preparedStatement1 = conn.prepareStatement(sql1); preparedStatement1.setInt(1, referenceSetID); resultSet = preparedStatement1.executeQuery(); - resultSet.next(); - return getEamGlobalSetFromResultSet(resultSet); + if(resultSet.next()) { + return getEamGlobalSetFromResultSet(resultSet); + } else { + return null; + } } catch (SQLException ex) { throw new EamDbException("Error getting reference set by id.", ex); // NON-NLS @@ -1689,6 +1817,11 @@ public abstract class AbstractSqlEamDb implements EamDb { */ @Override public List getAllReferenceSets(CorrelationAttribute.Type correlationType) throws EamDbException { + + if(correlationType == null){ + throw new EamDbException("Correlation type is null"); + } + List results = new ArrayList<>(); Connection conn = connect(); @@ -1723,6 +1856,13 @@ public abstract class AbstractSqlEamDb implements EamDb { */ @Override public void addReferenceInstance(EamGlobalFileInstance eamGlobalFileInstance, CorrelationAttribute.Type correlationType) throws EamDbException { + if(eamGlobalFileInstance.getKnownStatus() == null){ + throw new EamDbException("known status of EamGlobalFileInstance is null"); + } + if(correlationType == null){ + throw new EamDbException("Correlation type is null"); + } + Connection conn = connect(); PreparedStatement preparedStatement = null; @@ -1786,6 +1926,13 @@ public abstract class AbstractSqlEamDb implements EamDb { */ @Override public void bulkInsertReferenceTypeEntries(Set globalInstances, CorrelationAttribute.Type contentType) throws EamDbException { + if(contentType == null) { + throw new EamDbException("Null correlation type"); + } + if(globalInstances == null) { + throw new EamDbException("Null set of EamGlobalFileInstance"); + } + Connection conn = connect(); PreparedStatement bulkPs = null; @@ -1799,6 +1946,10 @@ public abstract class AbstractSqlEamDb implements EamDb { bulkPs = conn.prepareStatement(String.format(sql, EamDbUtil.correlationTypeToReferenceTableName(contentType))); for (EamGlobalFileInstance globalInstance : globalInstances) { + if(globalInstance.getKnownStatus() == null){ + throw new EamDbException("EamGlobalFileInstance with value " + globalInstance.getMD5Hash() + " has null known status"); + } + bulkPs.setInt(1, globalInstance.getGlobalSetID()); bulkPs.setString(2, globalInstance.getMD5Hash()); bulkPs.setByte(3, globalInstance.getKnownStatus().getFileKnownValue()); @@ -1808,7 +1959,7 @@ public abstract class AbstractSqlEamDb implements EamDb { bulkPs.executeBatch(); conn.commit(); - } catch (SQLException ex) { + } catch (SQLException | EamDbException ex) { try { conn.rollback(); } catch (SQLException ex2) { @@ -1833,6 +1984,10 @@ public abstract class AbstractSqlEamDb implements EamDb { */ @Override public List getReferenceInstancesByTypeValue(CorrelationAttribute.Type aType, String aValue) throws EamDbException { + if(aType == null) { + throw new EamDbException("correlation type is null"); + } + Connection conn = connect(); List globalFileInstances = new ArrayList<>(); @@ -1869,6 +2024,10 @@ public abstract class AbstractSqlEamDb implements EamDb { */ @Override public int newCorrelationType(CorrelationAttribute.Type newType) throws EamDbException { + if (newType == null) { + throw new EamDbException("null correlation type"); + } + Connection conn = connect(); PreparedStatement preparedStatement = null; @@ -1883,7 +2042,7 @@ public abstract class AbstractSqlEamDb implements EamDb { } else { insertSql = "INSERT INTO correlation_types(id, display_name, db_table_name, supported, enabled) VALUES (?, ?, ?, ?, ?)"; } - querySql = "SELECT id FROM correlation_types WHERE display_name=? AND db_table_name=?"; + querySql = "SELECT * FROM correlation_types WHERE display_name=? AND db_table_name=?"; try { preparedStatement = conn.prepareStatement(insertSql); @@ -2073,9 +2232,12 @@ public abstract class AbstractSqlEamDb implements EamDb { preparedStatement = conn.prepareStatement(sql); preparedStatement.setInt(1, typeId); resultSet = preparedStatement.executeQuery(); - resultSet.next(); - aType = getCorrelationTypeFromResultSet(resultSet); - return aType; + if(resultSet.next()) { + aType = getCorrelationTypeFromResultSet(resultSet); + return aType; + } else { + throw new EamDbException("Failed to find entry for correlation type ID = " + typeId); + } } catch (SQLException ex) { throw new EamDbException("Error getting correlation type by id.", ex); // NON-NLS @@ -2131,8 +2293,8 @@ public abstract class AbstractSqlEamDb implements EamDb { } CorrelationDataSource eamDataSource = new CorrelationDataSource( - resultSet.getInt("id"), resultSet.getInt("case_id"), + resultSet.getInt("id"), resultSet.getString("device_id"), resultSet.getString("name") ); @@ -2166,7 +2328,7 @@ public abstract class AbstractSqlEamDb implements EamDb { * * @throws SQLException when an expected column name is not in the resultSet */ - private CorrelationAttributeInstance getEamArtifactInstanceFromResultSet(ResultSet resultSet) throws SQLException { + private CorrelationAttributeInstance getEamArtifactInstanceFromResultSet(ResultSet resultSet) throws SQLException, EamDbException { if (null == resultSet) { return null; } @@ -2216,7 +2378,7 @@ public abstract class AbstractSqlEamDb implements EamDb { return eamGlobalSet; } - private EamGlobalFileInstance getEamGlobalFileInstanceFromResultSet(ResultSet resultSet) throws SQLException { + private EamGlobalFileInstance getEamGlobalFileInstanceFromResultSet(ResultSet resultSet) throws SQLException, EamDbException { if (null == resultSet) { return null; } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttribute.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttribute.java index 39968d7922..427bbc97bb 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttribute.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttribute.java @@ -28,7 +28,7 @@ import org.openide.util.NbBundle.Messages; /** * Represents a type and value pair that can be used for correlation. * CorrelationAttributeInstances store information about the actual - * occurences of the attribute. + * occurrences of the attribute. */ public class CorrelationAttribute implements Serializable { @@ -66,7 +66,10 @@ public class CorrelationAttribute implements Serializable { return DEFAULT_CORRELATION_TYPES; } - public CorrelationAttribute(Type correlationType, String correlationValue) { + public CorrelationAttribute(Type correlationType, String correlationValue) throws EamDbException { + if(correlationValue == null) { + throw new EamDbException ("Correlation value is null"); + } this.ID = ""; this.correlationType = correlationType; // Lower-case all values to normalize and improve correlation hits, going forward make sure this makes sense for all correlation types @@ -181,9 +184,12 @@ public class CorrelationAttribute implements Serializable { * Must start with a lowercase letter and only contain * lowercase letters, numbers, and '_' characters. * @param supported Is this Type currently supported - * @param enabled Is this Type currentl enabled. + * @param enabled Is this Type currently enabled. */ public Type(int id, String displayName, String dbTableName, Boolean supported, Boolean enabled) throws EamDbException { + if(dbTableName == null) { + throw new EamDbException("dbTableName is null"); + } this.id = id; this.displayName = displayName; this.dbTableName = dbTableName; @@ -195,7 +201,7 @@ public class CorrelationAttribute implements Serializable { } /** - * Constructior for custom types where we do not know the Type ID until + * Constructor for custom types where we do not know the Type ID until * the row has been entered into the correlation_types table * in the central repository. * @@ -204,7 +210,7 @@ public class CorrelationAttribute implements Serializable { * Must start with a lowercase letter and only contain * lowercase letters, numbers, and '_' characters. * @param supported Is this Type currently supported - * @param enabled Is this Type currentl enabled. + * @param enabled Is this Type currently enabled. */ public Type(String displayName, String dbTableName, Boolean supported, Boolean enabled) throws EamDbException { this(-1, displayName, dbTableName, supported, enabled); diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeInstance.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeInstance.java index f75364c580..5ca99abc7a 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeInstance.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeInstance.java @@ -46,7 +46,7 @@ public class CorrelationAttributeInstance implements Serializable { public CorrelationAttributeInstance( CorrelationCase eamCase, CorrelationDataSource eamDataSource - ) { + ) throws EamDbException { this(-1, eamCase, eamDataSource, "", null, TskData.FileKnown.UNKNOWN); } @@ -54,7 +54,7 @@ public class CorrelationAttributeInstance implements Serializable { CorrelationCase eamCase, CorrelationDataSource eamDataSource, String filePath - ) { + ) throws EamDbException { this(-1, eamCase, eamDataSource, filePath, null, TskData.FileKnown.UNKNOWN); } @@ -63,7 +63,7 @@ public class CorrelationAttributeInstance implements Serializable { CorrelationDataSource eamDataSource, String filePath, String comment - ) { + ) throws EamDbException { this(-1, eamCase, eamDataSource, filePath, comment, TskData.FileKnown.UNKNOWN); } @@ -73,7 +73,7 @@ public class CorrelationAttributeInstance implements Serializable { String filePath, String comment, TskData.FileKnown knownStatus - ) { + ) throws EamDbException { this(-1, eamCase, eamDataSource, filePath, comment, knownStatus); } @@ -84,7 +84,11 @@ public class CorrelationAttributeInstance implements Serializable { String filePath, String comment, TskData.FileKnown knownStatus - ) { + ) throws EamDbException { + if(filePath == null) { + throw new EamDbException("file path is null"); + } + this.ID = ID; this.correlationCase = eamCase; this.correlationDataSource = eamDataSource; diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationCase.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationCase.java index 79d94837ee..2d441881de 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationCase.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationCase.java @@ -50,6 +50,10 @@ public class CorrelationCase implements Serializable { * @param caseUUID Globally unique identifier * @param displayName */ + public CorrelationCase(String caseUUID, String displayName) { + this(-1, caseUUID, displayName); + } + CorrelationCase(int ID, String caseUUID, String displayName) { this(ID, caseUUID, null, displayName, DATE_FORMAT.format(new Date()), null, null, null, null, null); } @@ -156,7 +160,7 @@ public class CorrelationCase implements Serializable { /** * @return the database ID for the case or -1 if it is unknown (or not in the DB) */ - int getID() { + public int getID() { // @@@ Should probably have some lazy logic here to lead the ID from the DB if it is -1 return databaseId; } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationDataSource.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationDataSource.java index 9bad6dbac7..9aa9fada32 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationDataSource.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationDataSource.java @@ -38,6 +38,16 @@ public class CorrelationDataSource implements Serializable { private final String deviceID; //< Unique to its associated case (not necessarily globally unique) private final String name; + /** + * + * @param caseId + * @param deviceId + * @param name + */ + public CorrelationDataSource(int caseId, String deviceId, String name) { + this(caseId, -1, deviceId, name); + } + CorrelationDataSource(int caseId, int dataSourceId, String deviceId, diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamArtifactUtil.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamArtifactUtil.java index 171444fa42..c0810b52d3 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamArtifactUtil.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamArtifactUtil.java @@ -135,7 +135,8 @@ public class EamArtifactUtil { * @return the new EamArtifact, or null if one was not created because * bbArtifact did not contain the needed data */ - private static CorrelationAttribute getCorrelationAttributeFromBlackboardArtifact(CorrelationAttribute.Type correlationType, BlackboardArtifact bbArtifact) { + private static CorrelationAttribute getCorrelationAttributeFromBlackboardArtifact(CorrelationAttribute.Type correlationType, + BlackboardArtifact bbArtifact) throws EamDbException { String value = null; int artifactTypeID = bbArtifact.getArtifactTypeID(); diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamGlobalFileInstance.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamGlobalFileInstance.java index 87d974f353..3c538e67c8 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamGlobalFileInstance.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamGlobalFileInstance.java @@ -36,7 +36,7 @@ public class EamGlobalFileInstance { int globalSetID, String MD5Hash, TskData.FileKnown knownStatus, - String comment) { + String comment) throws EamDbException { this(-1, globalSetID, MD5Hash, knownStatus, comment); } @@ -45,7 +45,13 @@ public class EamGlobalFileInstance { int globalSetID, String MD5Hash, TskData.FileKnown knownStatus, - String comment) { + String comment) throws EamDbException { + if(MD5Hash == null){ + throw new EamDbException("null MD5 hash"); + } + if(knownStatus == null){ + throw new EamDbException("null known status"); + } this.instanceID = instanceID; this.globalSetID = globalSetID; // Normalize hashes by lower casing @@ -111,7 +117,10 @@ public class EamGlobalFileInstance { /** * @param MD5Hash the MD5Hash to set */ - public void setMD5Hash(String MD5Hash) { + public void setMD5Hash(String MD5Hash) throws EamDbException { + if(MD5Hash == null){ + throw new EamDbException("null MD5 hash"); + } // Normalize hashes by lower casing this.MD5Hash = MD5Hash.toLowerCase(); } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java index 13c4fb00a7..d9d1ea2fa3 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java @@ -22,6 +22,7 @@ import java.io.File; import java.sql.Connection; import java.sql.SQLException; import java.sql.Statement; +import java.util.Arrays; import java.util.List; import java.util.Set; import java.util.concurrent.locks.ReentrantReadWriteLock; @@ -165,6 +166,7 @@ public class SqliteEamDb extends AbstractSqlEamDb { connectionPool.setMaxIdle(-1); connectionPool.setMaxWaitMillis(1000); connectionPool.setValidationQuery(dbSettings.getValidationQuery()); + connectionPool.setConnectionInitSqls(Arrays.asList("PRAGMA foreign_keys = ON")); } /** diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/Bundle.properties b/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/Bundle.properties index 72ecac2866..9faba7a7b3 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/Bundle.properties +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/Bundle.properties @@ -17,13 +17,13 @@ EamSqliteSettingsDialog.bnDatabasePathFileOpen.text=Browse... EamSqliteSettingsDialog.tfDatabasePath.toolTipText=Filename and path to store SQLite db file EamSqliteSettingsDialog.tfDatabasePath.text= EamSqliteSettingsDialog.lbDatabasePath.text=Database Path : -ImportHashDatabaseDialog.tfDatabaseName.tooltip=Name for this database -ImportHashDatabaseDialog.tfDatabaseVersion.tooltip.text=Database Version Number -ImportHashDatabaseDialog.tfDatabaseName.tooltip=Name for this database -ImportHashDatabaseDialog.tfDatabaseVersion.tooltip.text=Database Version Number +ImportHashDatabaseDialog.tfDatabaseName.tooltip=Name for this hash set +ImportHashDatabaseDialog.tfDatabaseVersion.tooltip.text=Hash Set Version Number +ImportHashDatabaseDialog.tfDatabaseName.tooltip=Name for this hash set +ImportHashDatabaseDialog.tfDatabaseVersion.tooltip.text=Hash Set Version Number GlobalSettingsPanel.tbOops.text= GlobalSettingsPanel.lbDatabaseSettings.text=Database Settings -GlobalSettingsPanel.bnImportDatabase.label=Import Hash Database +GlobalSettingsPanel.bnImportDatabase.label=Import Hash Set AddNewOrganizationDialog.lbPocPhone.text=Phone: AddNewOrganizationDialog.lbPocEmail.text=Email: AddNewOrganizationDialog.lbPocName.text=Name: diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/Bundle.properties b/Core/src/org/sleuthkit/autopsy/contentviewers/Bundle.properties index 45916b87b3..68e938a107 100644 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/Bundle.properties +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/Bundle.properties @@ -33,3 +33,13 @@ MessageContentViewer.directionText.text=direction MessageContentViewer.ccLabel.text=CC: MessageContentViewer.attachmentsPanel.TabConstraints.tabTitle=Attachments MessageContentViewer.viewInNewWindowButton.text=View in New Window +JPEGViewerDummy.jLabel1.text=You are looking at a JPEG file: +JPEGViewerDummy.jTextField1.text=jTextField1 +SQLiteViewer.nextPageButton.text= +SQLiteViewer.prevPageButton.text= +SQLiteViewer.numPagesLabel.text=N +SQLiteViewer.jLabel3.text=of +SQLiteViewer.currPageLabel.text=x +SQLiteViewer.jLabel2.text=Page +SQLiteViewer.numEntriesField.text=num Entries +SQLiteViewer.jLabel1.text=Table diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/FileTypeViewer.java b/Core/src/org/sleuthkit/autopsy/contentviewers/FileTypeViewer.java new file mode 100644 index 0000000000..f4a677c4f4 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/FileTypeViewer.java @@ -0,0 +1,50 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2018 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.contentviewers; + +import java.awt.Component; +import java.util.List; +import org.sleuthkit.datamodel.AbstractFile; + +/** + * Defines an interface for application specific content viewer + * + */ +interface FileTypeViewer { + + /** + * Returns list of MIME types supported by this viewer + */ + List getSupportedMIMETypes(); + + /** + * Display the given file's content in the view panel + */ + void setFile(AbstractFile file); + + /** + * Returns panel + */ + Component getComponent(); + + /** + * Clears the data in the panel + */ + void resetComponent(); +} diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/FileViewer.form b/Core/src/org/sleuthkit/autopsy/contentviewers/FileViewer.form new file mode 100644 index 0000000000..d07831cafe --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/FileViewer.form @@ -0,0 +1,41 @@ + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/FileViewer.java b/Core/src/org/sleuthkit/autopsy/contentviewers/FileViewer.java new file mode 100644 index 0000000000..db13e523e5 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/FileViewer.java @@ -0,0 +1,235 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2018 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.contentviewers; + +import com.google.common.base.Strings; +import java.awt.Component; +import java.util.HashMap; +import java.util.Map; +import java.util.logging.Level; +import org.openide.nodes.Node; +import org.openide.util.NbBundle; +import org.openide.util.lookup.ServiceProvider; +import org.sleuthkit.autopsy.corecomponentinterfaces.DataContentViewer; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector; +import org.sleuthkit.datamodel.AbstractFile; + +/** + * Generic Application content viewer + */ +@ServiceProvider(service = DataContentViewer.class, position = 5) +public class FileViewer extends javax.swing.JPanel implements DataContentViewer { + + private static final int CONFIDENCE_LEVEL = 7; + private static final long serialVersionUID = 1L; + private static final Logger LOGGER = Logger.getLogger(FileViewer.class.getName()); + + private final Map mimeTypeToViewerMap = new HashMap<>(); + + // TBD: This hardcoded list of viewers should be replaced with a dynamic lookup + private static final FileTypeViewer[] KNOWN_VIEWERS = new FileTypeViewer[]{ + // new JPEGViewerDummy(), // this if for testing only + new SQLiteViewer() + }; + + private FileTypeViewer lastViewer; + + /** + * Creates new form ApplicationContentViewer + */ + public FileViewer() { + + // init the mimetype to viewer map + for (FileTypeViewer cv : KNOWN_VIEWERS) { + cv.getSupportedMIMETypes().forEach((mimeType) -> { + if (mimeTypeToViewerMap.containsKey(mimeType) == false) { + mimeTypeToViewerMap.put(mimeType, cv); + } else { + LOGGER.log(Level.WARNING, "Duplicate viewer for mimtype: {0}", mimeType); //NON-NLS + } + }); + } + + initComponents(); + + LOGGER.log(Level.INFO, "Created ApplicationContentViewer instance: {0}", this); //NON-NLS + } + + /** + * Get the FileTypeViewer for a given mimetype + * + * @param mimeType + * + * @return FileTypeViewer, null if no known content viewer supports the mimetype + */ + private FileTypeViewer getSupportingViewer(String mimeType) { + return mimeTypeToViewerMap.get(mimeType); + } + + /** + * This method is called from within the constructor to initialize the form. + * WARNING: Do NOT modify this code. The content of this method is always + * regenerated by the Form Editor. + */ + @SuppressWarnings("unchecked") + // //GEN-BEGIN:initComponents + private void initComponents() { + + setLayout(new javax.swing.OverlayLayout(this)); + }// //GEN-END:initComponents + + + // Variables declaration - do not modify//GEN-BEGIN:variables + // End of variables declaration//GEN-END:variables + + @Override + public void setNode(Node selectedNode) { + + resetComponent(); + + if (selectedNode == null) { + return; + } + + AbstractFile file = selectedNode.getLookup().lookup(AbstractFile.class); + if (file == null) { + return; + } + + String mimeType = file.getMIMEType(); + if (Strings.isNullOrEmpty(mimeType)) { + LOGGER.log(Level.INFO, "Mimetype not known for file: {0}", file.getName()); //NON-NLS + try { + FileTypeDetector fileTypeDetector = new FileTypeDetector(); + mimeType = fileTypeDetector.getMIMEType(file); + }catch (FileTypeDetector.FileTypeDetectorInitException ex) { + LOGGER.log(Level.SEVERE, "Failed to initialize FileTypeDetector.", ex); //NON-NLS + return; + } + } + + if (mimeType.equalsIgnoreCase("application/octet-stream")) { + return; + } + else { + FileTypeViewer viewer = getSupportingViewer(mimeType); + if (viewer != null) { + lastViewer = viewer; + + viewer.setFile(file); + this.removeAll(); + this.add(viewer.getComponent()); + this.repaint(); + } + } + + } + + @Override + @NbBundle.Messages("ApplicationContentViewer.title=Application") + public String getTitle() { + return Bundle.ApplicationContentViewer_title(); + } + + @Override + @NbBundle.Messages("ApplicationContentViewer.toolTip=Displays file contents.") + public String getToolTip() { + return Bundle.ApplicationContentViewer_toolTip(); + } + + @Override + public DataContentViewer createInstance() { + return new FileViewer(); + } + + @Override + public Component getComponent() { + return this; + } + + @Override + public void resetComponent() { + + if (lastViewer != null) { + lastViewer.resetComponent(); + } + this.removeAll(); + lastViewer = null; + } + + @Override + public boolean isSupported(Node node) { + + if (node == null) { + return false; + } + + AbstractFile aFile = node.getLookup().lookup(AbstractFile.class); + if (aFile == null) { + return false; + } + + String mimeType = aFile.getMIMEType(); + if (Strings.isNullOrEmpty(mimeType)) { + LOGGER.log(Level.INFO, "Mimetype not known for file: {0}", aFile.getName()); //NON-NLS + try { + FileTypeDetector fileTypeDetector = new FileTypeDetector(); + mimeType = fileTypeDetector.getMIMEType(aFile); + }catch (FileTypeDetector.FileTypeDetectorInitException ex) { + LOGGER.log(Level.SEVERE, "Failed to initialize FileTypeDetector.", ex); //NON-NLS + return false; + } + } + + if (mimeType.equalsIgnoreCase("application/octet-stream")) { + return false; + } else { + return (getSupportingViewer(mimeType) != null); + } + + } + + @Override + public int isPreferred(Node node) { + AbstractFile file = node.getLookup().lookup(AbstractFile.class); + String mimeType = file.getMIMEType(); + + if (Strings.isNullOrEmpty(mimeType)) { + LOGGER.log(Level.INFO, "Mimetype not known for file: {0}", file.getName()); //NON-NLS + try { + FileTypeDetector fileTypeDetector = new FileTypeDetector(); + mimeType = fileTypeDetector.getMIMEType(file); + }catch (FileTypeDetector.FileTypeDetectorInitException ex) { + LOGGER.log(Level.SEVERE, "Failed to initialize FileTypeDetector.", ex); //NON-NLS + return 0; + } + } + + if (mimeType.equalsIgnoreCase("application/octet-stream")) { + return 0; + } else { + if (null != getSupportingViewer(mimeType)) { + return CONFIDENCE_LEVEL; + } + } + + return 0; + } +} diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/JPEGViewerDummy.form b/Core/src/org/sleuthkit/autopsy/contentviewers/JPEGViewerDummy.form new file mode 100644 index 0000000000..587dd3c9a0 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/JPEGViewerDummy.form @@ -0,0 +1,58 @@ + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/JPEGViewerDummy.java b/Core/src/org/sleuthkit/autopsy/contentviewers/JPEGViewerDummy.java new file mode 100644 index 0000000000..8aea7540e1 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/JPEGViewerDummy.java @@ -0,0 +1,89 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package org.sleuthkit.autopsy.contentviewers; + +import java.awt.Component; +import java.util.Arrays; +import java.util.List; +import org.sleuthkit.datamodel.AbstractFile; + +public class JPEGViewerDummy extends javax.swing.JPanel implements FileTypeViewer { + + public static final String[] SUPPORTED_MIMETYPES = new String[]{"image/jpeg"}; + + /** + * Creates new form JPEGViewer + */ + public JPEGViewerDummy() { + initComponents(); + } + + /** + * This method is called from within the constructor to initialize the form. + * WARNING: Do NOT modify this code. The content of this method is always + * regenerated by the Form Editor. + */ + @SuppressWarnings("unchecked") + // //GEN-BEGIN:initComponents + private void initComponents() { + + jLabel1 = new javax.swing.JLabel(); + jTextField1 = new javax.swing.JTextField(); + + org.openide.awt.Mnemonics.setLocalizedText(jLabel1, org.openide.util.NbBundle.getMessage(JPEGViewerDummy.class, "JPEGViewerDummy.jLabel1.text")); // NOI18N + + jTextField1.setEditable(false); + jTextField1.setText(org.openide.util.NbBundle.getMessage(JPEGViewerDummy.class, "JPEGViewerDummy.jTextField1.text")); // NOI18N + + javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); + this.setLayout(layout); + layout.setHorizontalGroup( + layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(layout.createSequentialGroup() + .addGap(43, 43, 43) + .addComponent(jLabel1) + .addGap(35, 35, 35) + .addComponent(jTextField1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) + .addContainerGap(120, Short.MAX_VALUE)) + ); + layout.setVerticalGroup( + layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(layout.createSequentialGroup() + .addContainerGap() + .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) + .addComponent(jLabel1) + .addComponent(jTextField1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addContainerGap(269, Short.MAX_VALUE)) + ); + }// //GEN-END:initComponents + + @Override + public List getSupportedMIMETypes() { + return Arrays.asList(SUPPORTED_MIMETYPES); + } + + @Override + public Component getComponent() { + return this; + } + + @Override + public void resetComponent() { + this.jTextField1.setText(""); + } + + @Override + public void setFile(AbstractFile file) { + this.jTextField1.setText(file.getName()); + } + + + // Variables declaration - do not modify//GEN-BEGIN:variables + private javax.swing.JLabel jLabel1; + private javax.swing.JTextField jTextField1; + // End of variables declaration//GEN-END:variables + +} diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/SQLiteTableRowFactory.java b/Core/src/org/sleuthkit/autopsy/contentviewers/SQLiteTableRowFactory.java new file mode 100644 index 0000000000..633f40260c --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/SQLiteTableRowFactory.java @@ -0,0 +1,88 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2018 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.contentviewers; + +import java.util.List; +import java.util.Map; +import java.util.Objects; +import org.openide.nodes.AbstractNode; +import org.openide.nodes.ChildFactory; +import org.openide.nodes.Children; +import org.openide.nodes.Node; +import org.openide.nodes.Sheet; +import org.sleuthkit.autopsy.datamodel.NodeProperty; + +public class SQLiteTableRowFactory extends ChildFactory { + + private final List> rows; + + public SQLiteTableRowFactory(List> rows) { + this.rows = rows; + } + + @Override + protected boolean createKeys(List keys) { + if (rows != null) { + for (int i = 0; i < rows.size(); i++) { + keys.add(i); + } + } + return true; + } + + @Override + protected Node createNodeForKey(Integer key) { + if (Objects.isNull(rows) || rows.isEmpty() || key >= rows.size()) { + return null; + } + + return new SQLiteTableRowNode(rows.get(key)); + } + +} + +class SQLiteTableRowNode extends AbstractNode { + + private final Map row; + + SQLiteTableRowNode(Map row) { + super(Children.LEAF); + this.row = row; + } + + @Override + protected Sheet createSheet() { + + Sheet s = super.createSheet(); + Sheet.Set properties = s.get(Sheet.PROPERTIES); + if (properties == null) { + properties = Sheet.createPropertiesSet(); + s.put(properties); + } + + for (Map.Entry col : row.entrySet()) { + String colName = col.getKey(); + String colVal = col.getValue().toString(); + + properties.put(new NodeProperty<>(colName, colName, colName, colVal)); // NON-NLS + } + + return s; + } +} diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/SQLiteTableView.form b/Core/src/org/sleuthkit/autopsy/contentviewers/SQLiteTableView.form new file mode 100644 index 0000000000..2c7924e2a4 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/SQLiteTableView.form @@ -0,0 +1,18 @@ + + +
+ + + + + + + + + + + + + + + diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/SQLiteTableView.java b/Core/src/org/sleuthkit/autopsy/contentviewers/SQLiteTableView.java new file mode 100644 index 0000000000..7ca873e13c --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/SQLiteTableView.java @@ -0,0 +1,163 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2018 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.contentviewers; + +import java.awt.BorderLayout; +import java.awt.Component; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import javax.swing.JPanel; +import javax.swing.JTable; +import javax.swing.ListSelectionModel; +import javax.swing.ScrollPaneConstants; +import javax.swing.SwingWorker; +import javax.swing.table.TableCellRenderer; +import javax.swing.table.TableColumnModel; +import org.netbeans.swing.etable.ETableColumn; +import org.netbeans.swing.etable.ETableColumnModel; +import org.netbeans.swing.outline.Outline; +import org.openide.explorer.ExplorerManager; +import org.openide.nodes.AbstractNode; +import org.openide.nodes.Children; + +class SQLiteTableView extends JPanel implements ExplorerManager.Provider { + + private final org.openide.explorer.view.OutlineView outlineView; + private final Outline outline; + private final ExplorerManager explorerManager; + + /** + * Creates new form SQLiteTableView + * + */ + SQLiteTableView() { + + initComponents(); + outlineView = new org.openide.explorer.view.OutlineView(); + add(outlineView, BorderLayout.CENTER); + outlineView.setPropertyColumns(); // column headers will be set later + outlineView.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_ALWAYS); + outlineView.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_ALWAYS); + + outline = outlineView.getOutline(); + + outline.setSelectionMode(ListSelectionModel.SINGLE_SELECTION); + outline.setAutoResizeMode(JTable.AUTO_RESIZE_OFF); + outline.setRowSelectionAllowed(false); + outline.setRootVisible(false); + + explorerManager = new ExplorerManager(); + } + + /** + * Sets up the columns in the display table + * + * @param tableRows + */ + void setupTable(List> tableRows) { + + + if (Objects.isNull(tableRows) || tableRows.isEmpty()) { + outlineView.setPropertyColumns(); + } else { + + // Set up the column names + Map row = tableRows.get(0); + String[] propStrings = new String[row.size() * 2]; + int i = 0; + for (Map.Entry col : row.entrySet()) { + String colName = col.getKey(); + propStrings[2 * i] = colName; + propStrings[2 * i + 1] = colName; + i++; + } + + outlineView.setPropertyColumns(propStrings); + } + + // Hide the 'Nodes' column + TableColumnModel columnModel = outline.getColumnModel(); + ETableColumn column = (ETableColumn) columnModel.getColumn(0); + ((ETableColumnModel) columnModel).setColumnHidden(column, true); + + // Set the Nodes for the ExplorerManager. + // The Swingworker ensures that setColumnWidths() is called after all nodes have been created. + new SwingWorker() { + @Override + protected Boolean doInBackground() throws Exception { + + explorerManager.setRootContext(new AbstractNode(Children.create(new SQLiteTableRowFactory(tableRows), true))); + return false; + } + + @Override + protected void done() { + super.done(); + + setColumnWidths(); + } + }.execute(); + + } + + private void setColumnWidths() { + int margin = 4; + int padding = 8; + + // find the maximum width needed to fit the values for the first N rows, at most + final int rows = Math.min(20, outline.getRowCount()); + for (int col = 1; col < outline.getColumnCount(); col++) { + int columnWidthLimit = 500; + int columnWidth = 50; + + for (int row = 0; row < rows; row++) { + TableCellRenderer renderer = outline.getCellRenderer(row, col); + Component comp = outline.prepareRenderer(renderer, row, col); + + columnWidth = Math.max(comp.getPreferredSize().width, columnWidth); + } + + columnWidth += 2 * margin + padding; // add margin and regular padding + columnWidth = Math.min(columnWidth, columnWidthLimit); + outline.getColumnModel().getColumn(col).setPreferredWidth(columnWidth); + } + } + + + /** + * This method is called from within the constructor to initialize the form. + * WARNING: Do NOT modify this code. The content of this method is always + * regenerated by the Form Editor. + */ + @SuppressWarnings("unchecked") + // //GEN-BEGIN:initComponents + private void initComponents() { + + setLayout(new java.awt.BorderLayout()); + }// //GEN-END:initComponents + + @Override + public ExplorerManager getExplorerManager() { + return explorerManager; + } + + // Variables declaration - do not modify//GEN-BEGIN:variables + // End of variables declaration//GEN-END:variables +} diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/SQLiteViewer.form b/Core/src/org/sleuthkit/autopsy/contentviewers/SQLiteViewer.form new file mode 100644 index 0000000000..0469da7b73 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/SQLiteViewer.form @@ -0,0 +1,209 @@ + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/SQLiteViewer.java b/Core/src/org/sleuthkit/autopsy/contentviewers/SQLiteViewer.java new file mode 100644 index 0000000000..627d30e87c --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/SQLiteViewer.java @@ -0,0 +1,514 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2018 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.contentviewers; + +import java.awt.BorderLayout; +import java.awt.Component; +import java.io.File; +import java.io.IOException; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.TreeMap; +import java.util.concurrent.ExecutionException; +import java.util.logging.Level; +import javax.swing.JComboBox; +import javax.swing.SwingWorker; +import org.openide.util.NbBundle; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.datamodel.ContentUtils; +import org.sleuthkit.datamodel.AbstractFile; + +public class SQLiteViewer extends javax.swing.JPanel implements FileTypeViewer { + + public static final String[] SUPPORTED_MIMETYPES = new String[]{"application/x-sqlite3"}; + private static final Logger LOGGER = Logger.getLogger(FileViewer.class.getName()); + private Connection connection = null; + + private String tmpDBPathName = null; + private File tmpDBFile = null; + + private final Map dbTablesMap = new TreeMap<>(); + + private static final int ROWS_PER_PAGE = 100; + private int numRows; // num of rows in the selected table + private int currPage = 0; // curr page of rows being displayed + + SQLiteTableView selectedTableView = new SQLiteTableView(); + + private SwingWorker worker; + + /** + * Creates new form SQLiteViewer + */ + public SQLiteViewer() { + initComponents(); + jTableDataPanel.add(selectedTableView, BorderLayout.CENTER); + } + + /** + * This method is called from within the constructor to initialize the form. + * WARNING: Do NOT modify this code. The content of this method is always + * regenerated by the Form Editor. + */ + @SuppressWarnings("unchecked") + // //GEN-BEGIN:initComponents + private void initComponents() { + + jHdrPanel = new javax.swing.JPanel(); + tablesDropdownList = new javax.swing.JComboBox<>(); + jLabel1 = new javax.swing.JLabel(); + numEntriesField = new javax.swing.JTextField(); + jLabel2 = new javax.swing.JLabel(); + currPageLabel = new javax.swing.JLabel(); + jLabel3 = new javax.swing.JLabel(); + numPagesLabel = new javax.swing.JLabel(); + prevPageButton = new javax.swing.JButton(); + nextPageButton = new javax.swing.JButton(); + jTableDataPanel = new javax.swing.JPanel(); + + jHdrPanel.setPreferredSize(new java.awt.Dimension(536, 40)); + + tablesDropdownList.setModel(new javax.swing.DefaultComboBoxModel<>(new String[] { "Item 1", "Item 2", "Item 3", "Item 4" })); + tablesDropdownList.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + tablesDropdownListActionPerformed(evt); + } + }); + + org.openide.awt.Mnemonics.setLocalizedText(jLabel1, org.openide.util.NbBundle.getMessage(SQLiteViewer.class, "SQLiteViewer.jLabel1.text")); // NOI18N + + numEntriesField.setEditable(false); + numEntriesField.setText(org.openide.util.NbBundle.getMessage(SQLiteViewer.class, "SQLiteViewer.numEntriesField.text")); // NOI18N + numEntriesField.setBorder(null); + + org.openide.awt.Mnemonics.setLocalizedText(jLabel2, org.openide.util.NbBundle.getMessage(SQLiteViewer.class, "SQLiteViewer.jLabel2.text")); // NOI18N + + org.openide.awt.Mnemonics.setLocalizedText(currPageLabel, org.openide.util.NbBundle.getMessage(SQLiteViewer.class, "SQLiteViewer.currPageLabel.text")); // NOI18N + + org.openide.awt.Mnemonics.setLocalizedText(jLabel3, org.openide.util.NbBundle.getMessage(SQLiteViewer.class, "SQLiteViewer.jLabel3.text")); // NOI18N + + org.openide.awt.Mnemonics.setLocalizedText(numPagesLabel, org.openide.util.NbBundle.getMessage(SQLiteViewer.class, "SQLiteViewer.numPagesLabel.text")); // NOI18N + + prevPageButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/corecomponents/btn_step_back.png"))); // NOI18N + org.openide.awt.Mnemonics.setLocalizedText(prevPageButton, org.openide.util.NbBundle.getMessage(SQLiteViewer.class, "SQLiteViewer.prevPageButton.text")); // NOI18N + prevPageButton.setBorderPainted(false); + prevPageButton.setContentAreaFilled(false); + prevPageButton.setDisabledSelectedIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/corecomponents/btn_step_back_disabled.png"))); // NOI18N + prevPageButton.setMargin(new java.awt.Insets(2, 0, 2, 0)); + prevPageButton.setPreferredSize(new java.awt.Dimension(23, 23)); + prevPageButton.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + prevPageButtonActionPerformed(evt); + } + }); + + nextPageButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/corecomponents/btn_step_forward.png"))); // NOI18N + org.openide.awt.Mnemonics.setLocalizedText(nextPageButton, org.openide.util.NbBundle.getMessage(SQLiteViewer.class, "SQLiteViewer.nextPageButton.text")); // NOI18N + nextPageButton.setBorderPainted(false); + nextPageButton.setContentAreaFilled(false); + nextPageButton.setDisabledSelectedIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/corecomponents/btn_step_forward_disabled.png"))); // NOI18N + nextPageButton.setMargin(new java.awt.Insets(2, 0, 2, 0)); + nextPageButton.setPreferredSize(new java.awt.Dimension(23, 23)); + nextPageButton.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + nextPageButtonActionPerformed(evt); + } + }); + + javax.swing.GroupLayout jHdrPanelLayout = new javax.swing.GroupLayout(jHdrPanel); + jHdrPanel.setLayout(jHdrPanelLayout); + jHdrPanelLayout.setHorizontalGroup( + jHdrPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(jHdrPanelLayout.createSequentialGroup() + .addContainerGap() + .addComponent(jLabel1) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(tablesDropdownList, javax.swing.GroupLayout.PREFERRED_SIZE, 130, javax.swing.GroupLayout.PREFERRED_SIZE) + .addGap(18, 18, 18) + .addComponent(numEntriesField, javax.swing.GroupLayout.PREFERRED_SIZE, 71, javax.swing.GroupLayout.PREFERRED_SIZE) + .addGap(15, 15, 15) + .addComponent(jLabel2) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(currPageLabel) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(jLabel3) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(numPagesLabel) + .addGap(18, 18, 18) + .addComponent(prevPageButton, javax.swing.GroupLayout.PREFERRED_SIZE, 23, javax.swing.GroupLayout.PREFERRED_SIZE) + .addGap(0, 0, 0) + .addComponent(nextPageButton, javax.swing.GroupLayout.PREFERRED_SIZE, 23, javax.swing.GroupLayout.PREFERRED_SIZE) + .addContainerGap(133, Short.MAX_VALUE)) + ); + jHdrPanelLayout.setVerticalGroup( + jHdrPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(jHdrPanelLayout.createSequentialGroup() + .addContainerGap() + .addGroup(jHdrPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addComponent(nextPageButton, javax.swing.GroupLayout.PREFERRED_SIZE, 23, javax.swing.GroupLayout.PREFERRED_SIZE) + .addComponent(prevPageButton, javax.swing.GroupLayout.PREFERRED_SIZE, 23, javax.swing.GroupLayout.PREFERRED_SIZE) + .addGroup(jHdrPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) + .addComponent(tablesDropdownList, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) + .addComponent(jLabel1) + .addComponent(numEntriesField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) + .addComponent(jLabel2) + .addComponent(currPageLabel) + .addComponent(jLabel3) + .addComponent(numPagesLabel))) + .addContainerGap()) + ); + + jTableDataPanel.setLayout(new java.awt.BorderLayout()); + + javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); + this.setLayout(layout); + layout.setHorizontalGroup( + layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addComponent(jHdrPanel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) + .addComponent(jTableDataPanel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) + ); + layout.setVerticalGroup( + layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(layout.createSequentialGroup() + .addComponent(jHdrPanel, javax.swing.GroupLayout.PREFERRED_SIZE, 53, javax.swing.GroupLayout.PREFERRED_SIZE) + .addGap(0, 0, 0) + .addComponent(jTableDataPanel, javax.swing.GroupLayout.DEFAULT_SIZE, 317, Short.MAX_VALUE)) + ); + }// //GEN-END:initComponents + + private void nextPageButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_nextPageButtonActionPerformed + + currPage++; + if (currPage * ROWS_PER_PAGE > numRows) { + nextPageButton.setEnabled(false); + } + currPageLabel.setText(Integer.toString(currPage)); + prevPageButton.setEnabled(true); + + // read and display a page of rows + String tableName = (String) this.tablesDropdownList.getSelectedItem(); + readTable(tableName, (currPage - 1) * ROWS_PER_PAGE + 1, ROWS_PER_PAGE); + }//GEN-LAST:event_nextPageButtonActionPerformed + + private void prevPageButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_prevPageButtonActionPerformed + + currPage--; + if (currPage == 1) { + prevPageButton.setEnabled(false); + } + currPageLabel.setText(Integer.toString(currPage)); + nextPageButton.setEnabled(true); + + // read and display a page of rows + String tableName = (String) this.tablesDropdownList.getSelectedItem(); + readTable(tableName, (currPage - 1) * ROWS_PER_PAGE + 1, ROWS_PER_PAGE); + }//GEN-LAST:event_prevPageButtonActionPerformed + + private void tablesDropdownListActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_tablesDropdownListActionPerformed + JComboBox cb = (JComboBox) evt.getSource(); + String tableName = (String) cb.getSelectedItem(); + if (null == tableName) { + return; + } + + selectTable(tableName); + }//GEN-LAST:event_tablesDropdownListActionPerformed + + + // Variables declaration - do not modify//GEN-BEGIN:variables + private javax.swing.JLabel currPageLabel; + private javax.swing.JPanel jHdrPanel; + private javax.swing.JLabel jLabel1; + private javax.swing.JLabel jLabel2; + private javax.swing.JLabel jLabel3; + private javax.swing.JPanel jTableDataPanel; + private javax.swing.JButton nextPageButton; + private javax.swing.JTextField numEntriesField; + private javax.swing.JLabel numPagesLabel; + private javax.swing.JButton prevPageButton; + private javax.swing.JComboBox tablesDropdownList; + // End of variables declaration//GEN-END:variables + + @Override + public List getSupportedMIMETypes() { + return Arrays.asList(SUPPORTED_MIMETYPES); + } + + @Override + public void setFile(AbstractFile file) { + processSQLiteFile(file); + } + + @Override + public Component getComponent() { + return this; + } + + @Override + public void resetComponent() { + + dbTablesMap.clear(); + + tablesDropdownList.setEnabled(true); + tablesDropdownList.removeAllItems(); + numEntriesField.setText(""); + + // close DB connection to file + if (null != connection) { + try { + connection.close(); + connection = null; + } catch (SQLException ex) { + LOGGER.log(Level.SEVERE, "Failed to close DB connection to file.", ex); //NON-NLS + } + } + + // delete last temp file + if (null != tmpDBFile) { + tmpDBFile.delete(); + tmpDBFile = null; + } + } + + /** + * Process the given SQLite DB file + * + * @param sqliteFile - + * + * @return none + */ + private void processSQLiteFile(AbstractFile sqliteFile) { + + tablesDropdownList.removeAllItems(); + + new SwingWorker() { + @Override + protected Boolean doInBackground() throws Exception { + + try { + // Copy the file to temp folder + tmpDBPathName = Case.getCurrentCase().getTempDirectory() + File.separator + sqliteFile.getName() + "-" + sqliteFile.getId(); + tmpDBFile = new File(tmpDBPathName); + ContentUtils.writeToFile(sqliteFile, tmpDBFile); + + // Open copy using JDBC + Class.forName("org.sqlite.JDBC"); //NON-NLS //load JDBC driver + connection = DriverManager.getConnection("jdbc:sqlite:" + tmpDBPathName); //NON-NLS + + // Read all table names and schema + return getTables(); + } catch (IOException ex) { + LOGGER.log(Level.SEVERE, "Failed to copy DB file.", ex); //NON-NLS + } catch (SQLException ex) { + LOGGER.log(Level.SEVERE, "Failed to Open DB.", ex); //NON-NLS + } catch (ClassNotFoundException ex) { + LOGGER.log(Level.SEVERE, "Failed to initialize JDBC Sqlite.", ex); //NON-NLS + } + return false; + } + + @Override + protected void done() { + super.done(); + try { + boolean status = get(); + if ((status == true) && (dbTablesMap.size() > 0)) { + dbTablesMap.keySet().forEach((tableName) -> { + tablesDropdownList.addItem(tableName); + }); + } else { + // Populate error message + tablesDropdownList.addItem("No tables found"); + tablesDropdownList.setEnabled(false); + } + } catch (InterruptedException | ExecutionException ex) { + LOGGER.log(Level.SEVERE, "Unexpected exception while opening DB file", ex); //NON-NLS + } + } + }.execute(); + + } + + /** + * Gets the table names and their schema from loaded SQLite db file + * + * @return true if success, false otherwise + */ + private boolean getTables() { + + try { + Statement statement = connection.createStatement(); + + ResultSet resultSet = statement.executeQuery( + "SELECT name, sql FROM sqlite_master " + + " WHERE type= 'table' " + + " ORDER BY name;"); //NON-NLS + + while (resultSet.next()) { + String tableName = resultSet.getString("name"); //NON-NLS + String tableSQL = resultSet.getString("sql"); //NON-NLS + + dbTablesMap.put(tableName, tableSQL); + } + } catch (SQLException e) { + LOGGER.log(Level.SEVERE, "Error getting table names from the DB", e); //NON-NLS + } + return true; + } + + private void selectTable(String tableName) { + if (worker != null && !worker.isDone()) { + worker.cancel(false); + worker = null; + } + + worker = new SwingWorker() { + @Override + protected Integer doInBackground() throws Exception { + + try { + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery( + "SELECT count (*) as count FROM " + tableName); //NON-NLS + + return resultSet.getInt("count"); + } catch (SQLException ex) { + LOGGER.log(Level.SEVERE, "Failed to get data for table.", ex); //NON-NLS + } + //NON-NLS + return 0; + } + + @Override + protected void done() { + super.done(); + try { + + numRows = get(); + numEntriesField.setText(numRows + " entries"); + + currPage = 1; + currPageLabel.setText(Integer.toString(currPage)); + numPagesLabel.setText(Integer.toString((numRows / ROWS_PER_PAGE) + 1)); + + prevPageButton.setEnabled(false); + + + if (numRows > 0) { + nextPageButton.setEnabled(((numRows > ROWS_PER_PAGE))); + readTable(tableName, (currPage - 1) * ROWS_PER_PAGE + 1, ROWS_PER_PAGE); + } else { + nextPageButton.setEnabled(false); + selectedTableView.setupTable(Collections.emptyList()); + } + + } catch (InterruptedException | ExecutionException ex) { + LOGGER.log(Level.SEVERE, "Unexpected exception while reading table.", ex); //NON-NLS + } + } + }; + worker.execute(); + } + + private void readTable(String tableName, int startRow, int numRowsToRead) { + + if (worker != null && !worker.isDone()) { + worker.cancel(false); + worker = null; + } + + worker = new SwingWorker>, Void>() { + @Override + protected ArrayList> doInBackground() throws Exception { + try { + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery( + "SELECT * FROM " + tableName + + " LIMIT " + Integer.toString(numRowsToRead) + + " OFFSET " + Integer.toString(startRow - 1) + ); //NON-NLS + + return resultSetToArrayList(resultSet); + } catch (SQLException ex) { + LOGGER.log(Level.SEVERE, "Failed to get data for table " + tableName, ex); //NON-NLS + } + //NON-NLS + return null; + } + + @Override + protected void done() { + + if (isCancelled()) { + return; + } + + super.done(); + try { + ArrayList> rows = get(); + if (Objects.nonNull(rows)) { + selectedTableView.setupTable(rows); + }else{ + selectedTableView.setupTable(Collections.emptyList()); + } + } catch (InterruptedException | ExecutionException ex) { + LOGGER.log(Level.SEVERE, "Unexpected exception while reading table " + tableName, ex); //NON-NLS + } + } + }; + + worker.execute(); + } + + @NbBundle.Messages("SQLiteViewer.BlobNotShown.message=BLOB Data not shown") + private ArrayList> resultSetToArrayList(ResultSet rs) throws SQLException { + ResultSetMetaData metaData = rs.getMetaData(); + int columns = metaData.getColumnCount(); + ArrayList> rowlist = new ArrayList<>(); + while (rs.next()) { + Map row = new LinkedHashMap<>(columns); + for (int i = 1; i <= columns; ++i) { + if (rs.getObject(i) == null) { + row.put(metaData.getColumnName(i), ""); + } else { + if (metaData.getColumnTypeName(i).compareToIgnoreCase("blob") == 0) { + row.put(metaData.getColumnName(i), Bundle.SQLiteViewer_BlobNotShown_message()); + } else { + row.put(metaData.getColumnName(i), rs.getObject(i)); + } + } + } + rowlist.add(row); + } + + return rowlist; + } +} diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataContentViewerHex.java b/Core/src/org/sleuthkit/autopsy/corecomponents/DataContentViewerHex.java index a7a97e4b75..7a4df69e9d 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataContentViewerHex.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataContentViewerHex.java @@ -453,7 +453,7 @@ public class DataContentViewerHex extends javax.swing.JPanel implements DataCont return; } - Content content = (selectedNode).getLookup().lookup(Content.class); + Content content = DataContentViewerUtility.getDefaultContent(selectedNode); if (content == null) { resetComponent(); return; diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataContentViewerString.java b/Core/src/org/sleuthkit/autopsy/corecomponents/DataContentViewerString.java index 14cb2eb2c1..936ff3c46b 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataContentViewerString.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataContentViewerString.java @@ -452,8 +452,7 @@ public class DataContentViewerString extends javax.swing.JPanel implements DataC return; } - Lookup lookup = selectedNode.getLookup(); - Content content = lookup.lookup(Content.class); + Content content = DataContentViewerUtility.getDefaultContent(selectedNode); if (content != null) { this.setDataView(content, 0); return; diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataContentViewerUtility.java b/Core/src/org/sleuthkit/autopsy/corecomponents/DataContentViewerUtility.java new file mode 100755 index 0000000000..53491b407e --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataContentViewerUtility.java @@ -0,0 +1,54 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2018 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.corecomponents; + +import org.sleuthkit.datamodel.Content; +import org.openide.nodes.Node; +import org.sleuthkit.datamodel.BlackboardArtifact; + +/** + * Utility classes for content viewers. + * In theory, this would live in the contentviewer package, + * but the initial method was needed only be viewers in + * corecomponents and therefore can stay out of public API. + */ +class DataContentViewerUtility { + /** + * Returns the first non-Blackboard Artifact from a Node. + * Needed for (at least) Hex and Strings that want to view + * all types of content (not just AbstractFile), but don't want + * to display an artifact unless that's the only thing there. + * Scenario is hash hit or interesting item hit. + * + * @param node Node passed into content viewer + * @return highest priority content or null if there is no content + */ + static Content getDefaultContent(Node node) { + Content bbContentSeen = null; + for (Content content : (node).getLookup().lookupAll(Content.class)) { + if (content instanceof BlackboardArtifact) { + bbContentSeen = content; + } + else { + return content; + } + } + return bbContentSeen; + } +} diff --git a/Core/src/org/sleuthkit/autopsy/directorytree/ViewContextAction.java b/Core/src/org/sleuthkit/autopsy/directorytree/ViewContextAction.java index 95c5f50c23..792252605a 100644 --- a/Core/src/org/sleuthkit/autopsy/directorytree/ViewContextAction.java +++ b/Core/src/org/sleuthkit/autopsy/directorytree/ViewContextAction.java @@ -202,7 +202,7 @@ public class ViewContextAction extends AbstractAction { undecoratedParentNode.setChildNodeSelectionInfo(new ContentNodeSelectionInfo(content)); TreeView treeView = treeViewTopComponent.getTree(); treeView.expandNode(parentTreeViewNode); - if (treeViewTopComponent.getSelectedNode().getDisplayName().equals(parentTreeViewNode.getDisplayName())) { + if (treeViewTopComponent.getSelectedNode().equals(parentTreeViewNode)) { //In the case where our tree view already has the destination directory selected //due to an optimization in the ExplorerManager.setExploredContextAndSelection method //the property change we listen for to call DirectoryTreeTopComponent.respondSelection diff --git a/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeDetector.java b/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeDetector.java index b5ebcae990..795afd0838 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeDetector.java +++ b/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeDetector.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011-2017 Basis Technology Corp. + * Copyright 2011-2018 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -19,22 +19,16 @@ package org.sleuthkit.autopsy.modules.filetypeid; import java.util.ArrayList; -import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.SortedSet; import java.util.TreeSet; -import java.util.logging.Level; import java.util.stream.Collectors; import org.apache.tika.Tika; import org.apache.tika.io.TikaInputStream; import org.apache.tika.mime.MimeTypes; -import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.casemodule.services.Blackboard; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.ReadContentInputStream; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; @@ -178,6 +172,8 @@ public class FileTypeDetector { * * @return A MIME type name. If file type could not be detected, or results * were uncertain, octet-stream is returned. + * + */ public String getMIMEType(AbstractFile file) { /* @@ -276,52 +272,30 @@ public class FileTypeDetector { } /** - * Determines whether or not the a file matches a user-defined custom file - * type. If the file matches and corresponds to an interesting files type - * rule, this method has the side effect of creating an interesting files - * hit artifact and indexing that artifact for keyword search. + * Determines whether or not a file matches a user-defined custom file type. * * @param file The file to test. * - * @return The file type name string or null, if no match is detected. - * - * @throws TskCoreException + * @return The MIME type as a string if a match is found; otherwise null. */ private String detectUserDefinedType(AbstractFile file) { + String retValue = null; + for (FileType fileType : userDefinedFileTypes) { if (fileType.matches(file)) { - if (fileType.createInterestingFileHit()) { - try { - BlackboardArtifact artifact; - artifact = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT); - Collection attributes = new ArrayList<>(); - BlackboardAttribute setNameAttribute = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, FileTypeIdModuleFactory.getModuleName(), fileType.getInterestingFilesSetName()); - attributes.add(setNameAttribute); - BlackboardAttribute ruleNameAttribute = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY, FileTypeIdModuleFactory.getModuleName(), fileType.getMimeType()); - attributes.add(ruleNameAttribute); - artifact.addAttributes(attributes); - try { - Case.getCurrentCase().getServices().getBlackboard().indexArtifact(artifact); - } catch (Blackboard.BlackboardException ex) { - logger.log(Level.SEVERE, String.format("Unable to index TSK_INTERESTING_FILE_HIT blackboard artifact %d (file obj_id=%d)", artifact.getArtifactID(), file.getId()), ex); //NON-NLS - } - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, String.format("Unable to create TSK_INTERESTING_FILE_HIT artifact for file (obj_id=%d)", file.getId()), ex); //NON-NLS - } - } - return fileType.getMimeType(); + retValue = fileType.getMimeType(); + break; } } - return null; + return retValue; } /** - * Determines whether or not the a file matches a custom file type defined - * by Autopsy. + * Determines whether or not a file matches a custom file type defined by Autopsy. * * @param file The file to test. * - * @return The file type name string or null, if no match is detected. + * @return The MIME type as a string if a match is found; otherwise null. */ private String detectAutopsyDefinedType(AbstractFile file) { for (FileType fileType : autopsyDefinedFileTypes) { @@ -393,7 +367,7 @@ public class FileTypeDetector { * * @throws TskCoreException if detection is required and there is a problem * writing the result to the case database. - * @deprecated Use detectMIMEType instead, and call AbstractFile.setMIMEType + * @deprecated Use getMIMEType instead, and call AbstractFile.setMIMEType * and AbstractFile.save to save the result to the file object and the * database. */ @@ -417,7 +391,7 @@ public class FileTypeDetector { * @throws TskCoreException if detection is required and there is a problem * writing the result to the case database. * - * @deprecated Use detectMIMEType instead, and call AbstractFile.setMIMEType + * @deprecated Use getMIMEType instead, and call AbstractFile.setMIMEType * and AbstractFile.save to save the result to the file object and the * database. */ @@ -439,7 +413,7 @@ public class FileTypeDetector { * were uncertain, octet-stream is returned. * * @throws TskCoreException - * @deprecated Use detectMIMEType instead. + * @deprecated Use getMIMEType instead. */ @Deprecated public String detect(AbstractFile file) throws TskCoreException { diff --git a/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java index 8e6b3a58b2..be0bcdfbb3 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java @@ -1,15 +1,15 @@ /* * Autopsy Forensic Browser - * - * Copyright 2013-2015 Basis Technology Corp. + * + * Copyright 2013-2018 Basis Technology Corp. * Contact: carrier sleuthkit org - * + * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -18,9 +18,14 @@ */ package org.sleuthkit.autopsy.modules.filetypeid; +import java.util.ArrayList; +import java.util.Collection; import java.util.HashMap; +import java.util.List; import java.util.logging.Level; import org.openide.util.NbBundle; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.casemodule.services.Blackboard; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.FileIngestModule; import org.sleuthkit.autopsy.ingest.IngestJobContext; @@ -29,13 +34,16 @@ import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.autopsy.ingest.IngestModule.ProcessResult; import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter; +import org.sleuthkit.datamodel.BlackboardArtifact; +import org.sleuthkit.datamodel.BlackboardAttribute; +import org.sleuthkit.datamodel.TskCoreException; /** * Detects the type of a file based on signature (magic) values. Posts results * to the blackboard. */ @NbBundle.Messages({ - "CannotRunFileTypeDetection=Unable to run file type detection." + "CannotRunFileTypeDetection=Unable to run file type detection." }) public class FileTypeIdIngestModule implements FileIngestModule { @@ -91,7 +99,12 @@ public class FileTypeIdIngestModule implements FileIngestModule { */ try { long startTime = System.currentTimeMillis(); - fileTypeDetector.getMIMEType(file); + String mimeType = fileTypeDetector.getMIMEType(file); + file.setMIMEType(mimeType); + FileType fileType = detectUserDefinedFileType(file); + if (fileType != null && fileType.createInterestingFileHit()) { + createInterestingFileHit(file, fileType); + } addToTotals(jobId, (System.currentTimeMillis() - startTime)); return ProcessResult.OK; } catch (Exception e) { @@ -100,6 +113,57 @@ public class FileTypeIdIngestModule implements FileIngestModule { } } + /** + * Determines whether or not a file matches a user-defined custom file type. + * + * @param file The file to test. + * + * @return The file type if a match is found; otherwise null. + * + * @throws CustomFileTypesException If there is an issue getting an instance + * of CustomFileTypesManager. + */ + private FileType detectUserDefinedFileType(AbstractFile file) throws CustomFileTypesManager.CustomFileTypesException { + FileType retValue = null; + + CustomFileTypesManager customFileTypesManager = CustomFileTypesManager.getInstance(); + List fileTypesList = customFileTypesManager.getUserDefinedFileTypes(); + for (FileType fileType : fileTypesList) { + if (fileType.matches(file)) { + retValue = fileType; + break; + } + } + + return retValue; + } + + /** + * Create an Interesting File hit using the specified file type rule. + * + * @param file The file from which to generate an artifact. + * @param fileType The file type rule for categorizing the hit. + */ + private void createInterestingFileHit(AbstractFile file, FileType fileType) { + try { + BlackboardArtifact artifact; + artifact = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT); + Collection attributes = new ArrayList<>(); + BlackboardAttribute setNameAttribute = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, FileTypeIdModuleFactory.getModuleName(), fileType.getInterestingFilesSetName()); + attributes.add(setNameAttribute); + BlackboardAttribute ruleNameAttribute = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY, FileTypeIdModuleFactory.getModuleName(), fileType.getMimeType()); + attributes.add(ruleNameAttribute); + artifact.addAttributes(attributes); + try { + Case.getCurrentCase().getServices().getBlackboard().indexArtifact(artifact); + } catch (Blackboard.BlackboardException ex) { + logger.log(Level.SEVERE, String.format("Unable to index TSK_INTERESTING_FILE_HIT blackboard artifact %d (file obj_id=%d)", artifact.getArtifactID(), file.getId()), ex); //NON-NLS + } + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Unable to create TSK_INTERESTING_FILE_HIT artifact for file (obj_id=%d)", file.getId()), ex); //NON-NLS + } + } + @Override public void shutDown() { /** diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/AddContentToHashDbAction.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/AddContentToHashDbAction.java index 5ec70ce3b9..6488661987 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/AddContentToHashDbAction.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/AddContentToHashDbAction.java @@ -149,7 +149,7 @@ final class AddContentToHashDbAction extends AbstractAction implements Presenter if (null != md5Hash) { // don't let them add the hash for an empty file to the DB if (HashUtility.isNoDataMd5(md5Hash)) { //NON-NLS - Logger.getLogger(AddContentToHashDbAction.class.getName()).log(Level.INFO, "Not adding " + file.getName() + " to database (empty content)"); //NON-NLS + Logger.getLogger(AddContentToHashDbAction.class.getName()).log(Level.INFO, "Not adding " + file.getName() + " to hash set (empty content)"); //NON-NLS JOptionPane.showMessageDialog(null, NbBundle.getMessage(this.getClass(), "AddContentToHashDbAction.addFilesToHashSet.unableToAddFileEmptyMsg", @@ -162,7 +162,7 @@ final class AddContentToHashDbAction extends AbstractAction implements Presenter try { hashSet.addHashes(file); } catch (TskCoreException ex) { - Logger.getLogger(AddContentToHashDbAction.class.getName()).log(Level.SEVERE, "Error adding to hash database", ex); //NON-NLS + Logger.getLogger(AddContentToHashDbAction.class.getName()).log(Level.SEVERE, "Error adding to hash set", ex); //NON-NLS JOptionPane.showMessageDialog(null, NbBundle.getMessage(this.getClass(), "AddContentToHashDbAction.addFilesToHashSet.unableToAddFileMsg", diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/Bundle.properties b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/Bundle.properties index 9c402d98d4..b1600cd89d 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/Bundle.properties +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/Bundle.properties @@ -1,11 +1,11 @@ OpenIDE-Module-Display-Category=Ingest Module OpenIDE-Module-Long-Description=\ - Hash Database ingest module. \n\n\ - The ingest module analyzes files in the disk image and marks them as "known" (based on NSRL database lookup for "known" files) and "bad / interesting" (based on one or more databases supplied by the user).\n\n\ - The module also contains additional non-ingest tools that are integrated in the GUI, such as file lookup by hash and hash database configuration. + Hash Set ingest module. \n\n\ + The ingest module analyzes files in the disk image and marks them as "known" (based on NSRL hashset lookup for "known" files) and "bad / interesting" (based on one or more hash sets supplied by the user).\n\n\ + The module also contains additional non-ingest tools that are integrated in the GUI, such as file lookup by hash and hash set configuration. OpenIDE-Module-Name=HashDatabases -OptionsCategory_Name_HashDatabase=Hash Databases -OptionsCategory_Keywords_HashDatabase=Hash Databases +OptionsCategory_Name_HashDatabase=Hash Sets +OptionsCategory_Keywords_HashDatabase=Hash Sets HashDbSearchPanel.hashTable.columnModel.title0=MD5 Hashes HashDbSearchPanel.addButton.text=Add Hash HashDbSearchPanel.hashField.text= @@ -16,11 +16,11 @@ HashDbSearchPanel.titleLabel.text=Search for files with the following MD5 hash(e HashDbSearchPanel.errorField.text=Error: Not all files have been hashed. HashDbSearchPanel.saveBox.text=Remember Hashes HashDbSearchPanel.cancelButton.text=Cancel -OpenIDE-Module-Short-Description=Hash Database Ingest Module and hash db tools +OpenIDE-Module-Short-Description=Hash Set Ingest Module and hash set tools HashDbImportDatabaseDialog.jLabel1.text=Name: HashDbImportDatabaseDialog.databasePathTextField.text= HashDbImportDatabaseDialog.knownBadRadioButton.text=Notable -HashDbImportDatabaseDialog.jLabel2.text=Type of database\: +HashDbImportDatabaseDialog.jLabel2.text=Type of hash set\: HashDbImportDatabaseDialog.okButton.text=OK HashDbImportDatabaseDialog.cancelButton.text=Cancel HashDbCreateDatabaseDialog.jLabel2.text=Type: @@ -28,13 +28,13 @@ HashDbCreateDatabaseDialog.knownBadRadioButton.text=Notable HashDbCreateDatabaseDialog.cancelButton.text=Cancel ModalNoButtons.CURRENTDB_LABEL.text=(CurrentDb) ModalNoButtons.CURRENTLYON_LABEL.text=Currently Indexing x of y -ModalNoButtons.GO_GET_COFFEE_LABEL.text=Hash databases are currently being indexed, this may take some time. +ModalNoButtons.GO_GET_COFFEE_LABEL.text=Hash sets are currently being indexed, this may take some time. ModalNoButtons.CANCEL_BUTTON.text=Cancel HashDbImportDatabaseDialog.knownRadioButton.text=Known (NSRL or other) HashDbCreateDatabaseDialog.knownRadioButton.text=Known HashDbCreateDatabaseDialog.saveAsButton.text=Save As... HashDbCreateDatabaseDialog.hashSetNameTextField.text= -HashDbImportDatabaseDialog.jLabel3.text=Database Path: +HashDbImportDatabaseDialog.jLabel3.text=Hash Set Path: HashDbCreateDatabaseDialog.sendIngestMessagesCheckbox.text=Send ingest inbox messages for each hit HashDbImportDatabaseDialog.sendIngestMessagesCheckbox.text=Send ingest inbox message for each hit HashDbImportDatabaseDialog.hashSetNameTextField.text= @@ -42,17 +42,17 @@ HashDbImportDatabaseDialog.openButton.text=Open... HashDbCreateDatabaseDialog.jLabel3.text=Name: HashDbCreateDatabaseDialog.okButton.text=OK HashDbCreateDatabaseDialog.databasePathTextField.text= -AddContentToHashDbAction.ContentMenu.noHashDbsConfigd=No hash databases configured -AddContentToHashDbAction.ContentMenu.createDbItem=Create database... -AddContentToHashDbAction.addFilesToHashSet.addToHashDbErr1.text=Add to Hash Database Error -AddContentToHashDbAction.addFilesToHashSet.addToHashDbErr2.text=Add to Hash Database Error -AddContentToHashDbAction.addFilesToHashSet.addToHashDbErr3.text=Add to Hash Database Error -AddContentToHashDbAction.addFilesToHashSet.unableToAddFileMsg=Unable to add {0} to the hash database. -AddContentToHashDbAction.addFilesToHashSet.unableToAddFileEmptyMsg=Unable to add {0} to the hash database. File has no content. -AddContentToHashDbAction.addFilesToHashSet.unableToAddFileSzMsg=Unable to add the {0} to the hash database. Hashes have not been calculated. Please configure and run an appropriate ingest module. +AddContentToHashDbAction.ContentMenu.noHashDbsConfigd=No hash sets configured +AddContentToHashDbAction.ContentMenu.createDbItem=Create hash set... +AddContentToHashDbAction.addFilesToHashSet.addToHashDbErr1.text=Add to Hash Set Error +AddContentToHashDbAction.addFilesToHashSet.addToHashDbErr2.text=Add to Hash Set Error +AddContentToHashDbAction.addFilesToHashSet.addToHashDbErr3.text=Add to Hash Set Error +AddContentToHashDbAction.addFilesToHashSet.unableToAddFileMsg=Unable to add {0} to the hash set. +AddContentToHashDbAction.addFilesToHashSet.unableToAddFileEmptyMsg=Unable to add {0} to the hash set. File has no content. +AddContentToHashDbAction.addFilesToHashSet.unableToAddFileSzMsg=Unable to add the {0} to the hash set. Hashes have not been calculated. Please configure and run an appropriate ingest module. HashDatabaseOptionsPanelController.moduleErr=Module Error HashDatabaseOptionsPanelController.moduleErrMsg=A module caused an error listening to HashDatabaseOptionsPanelController updates. See log to determine which module. Some data could be incomplete. -HashDbConfigPanel.noSelectionText=No database selected +HashDbConfigPanel.noSelectionText= HashDbConfigPanel.errorGettingPathText=Error occurred getting path HashDbConfigPanel.errorGettingIndexStatusText=Error occurred getting status HashDbConfigPanel.indexButtonText.index=Index @@ -62,33 +62,33 @@ HashDbConfigPanel.indexStatusText.indexOnly=Index only HashDbConfigPanel.indexStatusText.indexed=Indexed HashDbConfigPanel.indexButtonText.reIndex=Re-Index HashDbConfigPanel.indexStatusText.noIndex=No index -HashDbConfigPanel.dbsNotIndexedMsg=The following databases are not indexed, would you like to index them now? \n {0} -HashDbConfigPanel.dbNotIndexedMsg=The following database is not indexed, would you like to index it now? \n{0} -HashDbConfigPanel.unindexedDbsMsg=Unindexed databases -HashDbConfigPanel.allUnindexedDbsRmFromListMsg=All unindexed databases will be removed from the list +HashDbConfigPanel.dbsNotIndexedMsg=The following hash sets are not indexed, would you like to index them now? \n {0} +HashDbConfigPanel.dbNotIndexedMsg=The following hash set is not indexed, would you like to index it now? \n{0} +HashDbConfigPanel.unindexedDbsMsg=Unindexed hash sets +HashDbConfigPanel.allUnindexedDbsRmFromListMsg=All unindexed hash sets will be removed from the list HashDbConfigPanel.nameColLbl=Name HashDbConfigPanel.editingCellsNotSupportedMsg=Editing of cells is not supported HashDbCreateDatabaseDialog.defaultFileName=hashset -HashDbCreateDatabaseDialog.createHashDbMsg=Create Hash Database -HashDbCreateDatabaseDialog.hashDbMustHaveFileExtensionMsg=The hash database file must have a .{0} extension. +HashDbCreateDatabaseDialog.createHashDbMsg=Create Hash Set +HashDbCreateDatabaseDialog.hashDbMustHaveFileExtensionMsg=The hash set file must have a .{0} extension. HashDbCreateDatabaseDialog.fileNameErr=File Name Error HashDbCreateDatabaseDialog.fileNameAlreadyExistsMsg=A file with this name already exists. Please choose a new file name. HashDbCreateDatabaseDialog.fileExistsErr=File Already Exists Error HashDbCreateDatabaseDialog.mustEnterHashSetNameMsg=A hash set name must be entered. -HashDbCreateDatabaseDialog.createHashDbErr=Create Hash Database Error -HashDbCreateDatabaseDialog.mustEnterHashDbPathMsg=A database path must be entered. -HashDbCreateDatabaseDialog.errMsg.hashDbCreationErr=Hash database creation error -HashDbCreateDatabaseDialog.cannotCreateFileAtLocMsg=Cannot create a hash database file at the selected location. -HashDbCreateDatabaseDialog.failedToCreateHashDbMsg=Failed to create the hash database. -HashDbImportDatabaseDialog.importHashDbMsg=Import Hash Database -HashDbImportDatabaseDialog.fileNameExtFilter.text=Hash Database File -HashDbImportDatabaseDialog.failedToGetDbPathMsg=Failed to get the path of the selected database. -HashDbImportDatabaseDialog.importHashDbErr=Import Hash Database Error -HashDbImportDatabaseDialog.mustSelectHashDbFilePathMsg=A hash database file path must be selected. -HashDbImportDatabaseDialog.hashDbDoesNotExistMsg=The selected hash database does not exist. -HashDbImportDatabaseDialog.errorMessage.failedToOpenHashDbMsg=Failed to open hash database at {0}. +HashDbCreateDatabaseDialog.createHashDbErr=Create Hash Set Error +HashDbCreateDatabaseDialog.mustEnterHashDbPathMsg=A hash set path must be entered. +HashDbCreateDatabaseDialog.errMsg.hashDbCreationErr=Hash set creation error +HashDbCreateDatabaseDialog.cannotCreateFileAtLocMsg=Cannot create a hash set file at the selected location. +HashDbCreateDatabaseDialog.failedToCreateHashDbMsg=Failed to create the hash set. +HashDbImportDatabaseDialog.importHashDbMsg=Import Hash Set +HashDbImportDatabaseDialog.fileNameExtFilter.text=Hash Set File +HashDbImportDatabaseDialog.failedToGetDbPathMsg=Failed to get the path of the selected hash set. +HashDbImportDatabaseDialog.importHashDbErr=Import Hash Set Error +HashDbImportDatabaseDialog.mustSelectHashDbFilePathMsg=A hash set file path must be selected. +HashDbImportDatabaseDialog.hashDbDoesNotExistMsg=The selected hash set does not exist. +HashDbImportDatabaseDialog.errorMessage.failedToOpenHashDbMsg=Failed to open hash set at {0}. HashDbIngestModule.moduleName=Hash Lookup -HashDbIngestModule.moduleDescription=Identifies known and notable files using supplied hash databases, such as a standard NSRL database. +HashDbIngestModule.moduleDescription=Identifies known and notable files using supplied hash sets, such as a standard NSRL hash set. HashDbIngestModule.fileReadErrorMsg=Read Error\: {0} HashDbIngestModule.calcHashValueErr=Error encountered while calculating the hash value for {0}. HashDbIngestModule.hashLookupErrorMsg=Hash Lookup Error\: {0} @@ -97,25 +97,25 @@ HashDbIngestModule.lookingUpKnownBadHashValueErr=Error encountered while looking HashDbIngestModule.lookingUpKnownHashValueErr=Error encountered while looking up known hash value for {0}. HashDbIngestModule.postToBB.fileName=File Name HashDbIngestModule.postToBB.md5Hash=MD5 Hash -HashDbIngestModule.postToBB.hashsetName=Hashset Name +HashDbIngestModule.postToBB.hashsetName=Hash Set Name HashDbIngestModule.postToBB.knownBadMsg=Notable\: {0} HashDbIngestModule.complete.knownBadsFound=Notables found\: HashDbIngestModule.complete.totalCalcTime=Total Calculation Time HashDbIngestModule.complete.totalLookupTime=Total Lookup Time -HashDbIngestModule.complete.databasesUsed=Databases Used\: +HashDbIngestModule.complete.databasesUsed=Hash Sets Used\: HashDbIngestModule.complete.hashLookupResults=Hash Lookup Results HashDbManager.moduleErrorListeningToUpdatesMsg=A module caused an error listening to HashDbManager updates. See log to determine which module. Some data could be incomplete. HashDbManager.replacingDuplicateHashsetNameMsg=Duplicate hash set name {0} found.\nReplacing with {1}. -HashDbManager.openHashDbErr=Open Hash Database Error -HashDbManager.unableToOpenHashDbMsg=Unable to open {0} hash database. +HashDbManager.openHashDbErr=Open Hash Set Error +HashDbManager.unableToOpenHashDbMsg=Unable to open {0} hash set. HashDbManager.savedBackupOfOldConfigMsg={0}\nA backup copy of the old configuration has been saved as\n{1} -HashDbManager.baseMessage.updatedFormatHashDbConfig=The format of the hash database configuration file has been updated. +HashDbManager.baseMessage.updatedFormatHashDbConfig=The format of the hash set configuration file has been updated. HashDbManager.msgBoxTitle.confFileFmtChanged=Configuration File Format Changed -HashDbManager.dlgMsg.dbNotFoundAtLoc=Database {0} could not be found at location\n{1}\nWould you like to search for the file? -HashDbManager.dlgTitle.MissingDb=Missing Database +HashDbManager.dlgMsg.dbNotFoundAtLoc=Hash set {0} could not be found at location\n{1}\nWould you like to search for the file? +HashDbManager.dlgTitle.MissingDb=Missing Hash Set HashDbManager.progress.indexingHashSet=Indexing {0} -HashDbManager.dlgMsg.errorIndexingHashSet=Error indexing {0} hash database. -HashDbManager.hashDbIndexingErr=Hash Database Indexing Error +HashDbManager.dlgMsg.errorIndexingHashSet=Error indexing {0} hash set. +HashDbManager.hashDbIndexingErr=Hash Set Indexing Error HashDbPanelSearchAction.actionName=File Search by MD5 Hash HashDbSearchAction.dlgMsg.noFilesHaveMD5Calculated=No files currently have an MD5 hash calculated, run HashDB ingest first. HashDbSearchManager.MD5HashSearch=MD5 Hash Search @@ -128,56 +128,56 @@ HashDbSearchPanel.errorText.invalidMD5HashMsg=Error\: That is not a valid MD5 ha HashDbSearchThread.progress.cancellingSearch={0} (Cancelling...) HashDbSearchThread.name.searching=Searching HashDbSearchThread.noMoreFilesWithMD5Msg=No other files with the same MD5 hash were found. -ModalNoButtons.indexingDbsTitle=Indexing databases -ModalNoButtons.indexingDbTitle=Indexing database -ModalNoButtons.exitHashDbIndexingMsg=You are about to exit out of indexing your hash databases. \n\ +ModalNoButtons.indexingDbsTitle=Indexing hash sets +ModalNoButtons.indexingDbTitle=Indexing hash set +ModalNoButtons.exitHashDbIndexingMsg=You are about to exit out of indexing your hash sets. \n\ The generated index will be left unusable. If you choose to continue,\n\ please delete the corresponding -md5.idx file in the hash folder.\n\ Exit indexing? ModalNoButtons.dlgTitle.unfinishedIndexing=Unfinished Indexing -ModalNoButtons.indexThis.currentlyIndexing1Db=Currently indexing 1 database +ModalNoButtons.indexThis.currentlyIndexing1Db=Currently indexing 1 hash set ModalNoButtons.indexThese.currentlyIndexing1OfNDbs=Currently indexing 1 of {0} ModalNoButtons.propChg.currentlyIndexingXofN=Currently indexing {0} of {1} -HashDbManager.duplicateHashSetNameExceptionMsg=The hash set name ''{0}'' has already been used for another hash database. -HashDbManager.hashDbDoesNotExistExceptionMsg=No hash database found at\n{0} +HashDbManager.duplicateHashSetNameExceptionMsg=The hash set name ''{0}'' has already been used for another hash set. +HashDbManager.hashDbDoesNotExistExceptionMsg=No hash set found at\n{0} HashDbManager.hashDbFileExistsExceptionMsg=A file already exists at\n{0} -HashDbManager.hashDbAlreadyAddedExceptionMsg=The hash database at\n{0}\nhas already been created or imported. -HashDbManager.illegalHashDbFileNameExtensionMsg=The hash database file name must have a .{0} extension. +HashDbManager.hashDbAlreadyAddedExceptionMsg=The hash set at\n{0}\nhas already been created or imported. +HashDbManager.illegalHashDbFileNameExtensionMsg=The hash set file name must have a .{0} extension. HashDbManager.moduleErr=Module Error HashDbManager.knownBad.text=Notable HashDbManager.known.text=Known -HashDbManager.fileNameExtensionFilter.title=Hash Database File +HashDbManager.fileNameExtensionFilter.title=Hash Set File HashDbSearchAction.dlgMsg.title=File Search by MD5 Hash HashDbSearchAction.getName.text=Hash Search HashDbSearchPanel.dlgMsg.title=File Search by MD5 Hash -AddContentToHashDbAction.singleSelectionName=Add file to hash database -AddContentToHashDbAction.multipleSelectionName=Add files to hash database +AddContentToHashDbAction.singleSelectionName=Add file to hash set +AddContentToHashDbAction.multipleSelectionName=Add files to hash set HashDbManager.ingestRunningExceptionMsg=Ingest is ongoing; this service will be unavailable until it finishes. HashDbManager.saveErrorExceptionMsg=Error saving hash configuration -HashLookupSettingsPanel.jButton3.text=Import Database +HashLookupSettingsPanel.jButton3.text=Import Hash Set HashLookupSettingsPanel.jLabel6.text=Type: HashLookupSettingsPanel.jLabel4.text=Location: HashLookupSettingsPanel.jLabel2.text=Name: -HashLookupModuleSettingsPanel.alwaysCalcHashesCheckbox.text=Calculate MD5 even if no hash database is selected -HashLookupModuleSettingsPanel.knownHashDbsLabel.text=Select known hash databases to use: -HashLookupModuleSettingsPanel.knownBadHashDbsLabel.text=Select notable hash databases to use: +HashLookupModuleSettingsPanel.alwaysCalcHashesCheckbox.text=Calculate MD5 even if no hash set is selected +HashLookupModuleSettingsPanel.knownHashDbsLabel.text=Select known hash sets to use: +HashLookupModuleSettingsPanel.knownBadHashDbsLabel.text=Select notable hash sets to use: AddContentToHashDbAction.addFilesToHashSet.files=files AddContentToHashDbAction.addFilesToHashSet.file=file HashDbManager.errCreatingIndex.title=Error creating index HashDbManager.errCreatingIndex.msg=Error creating index\: {0} HashLookupModuleFactory.getIngestJobSettingsPanel.exception.msg=Expected settings argument to be instanceof HashLookupModuleSettings HashLookupModuleFactory.createFileIngestModule.exception.msg=Expected settings argument to be instanceof HashLookupModuleSettings -HashLookupModuleSettingsPanel.alwaysCalcHashesCheckbox.toolTipText=Calculate MD5 even if no hash database is selected +HashLookupModuleSettingsPanel.alwaysCalcHashesCheckbox.toolTipText=Calculate MD5 even if no hash set is selected HashDbSearchPanel.hashTable.defaultModel.title.text=MD5 Hashes -AddHashValuesToDatabaseDialog.JDialog.Title=Add Hashes to Database +AddHashValuesToDatabaseDialog.JDialog.Title=Add Hashes to Hash Set AddHashValuesToDatabaseDialog.instructionLabel.text_1=Paste MD5 hash values (one per line) below: AddHashValuesToDatabaseDialog.cancelButton.text_2=Cancel -AddHashValuesToDatabaseDialog.AddValuesToHashDatabaseButton.text_2=Add Hashes to Database +AddHashValuesToDatabaseDialog.AddValuesToHashDatabaseButton.text_2=Add Hashes to Hash Set AddHashValuesToDatabaseDialog.pasteFromClipboardButton.text_2=Paste From Clipboard AddHashValuesToDatabaseProgressDialog.okButton.text=OK AddHashValuesToDatabaseProgressDialog.statusLabel.text=status -AddHashValuesToDatabaseProgressDialog.title=Add Hashes to Database Progress -AddHashValuesToDatabaseDialog.title=Add Hashes to Database +AddHashValuesToDatabaseProgressDialog.title=Add Hashes to Hash Set Progress +AddHashValuesToDatabaseDialog.title=Add Hashes to Hash Set AddHashValuesToDatabaseProgressDialog.showErrorsButton.text=Show Errors AddHashValuesToDatabaseProgressDialog.addHashValuesToDatabase.parsing=Parsing text for MD5 hashes... AddHashValuesToDatabaseProgressDialog.addHashValuesToDatabase.invalidHash=The input contains invalid hash. @@ -185,47 +185,47 @@ AddHashValuesToDatabaseProgressDialog.addHashValuesToDatabase.invaliHash.msg=Inv AddHashValuesToDatabaseProgressDialog.addHashValuesToDatabase.noHashesToAdd=There are no hashes to add. AddHashValuesToDatabaseProgressDialog.addHashValuesToDatabase.success={0} Hashes added successfully. AddHashValuesToDatabaseProgressDialog.addHashValuesToDatabase.errorAddingValidHash=There is an error adding valid hashes. -AddHashValuesToDatabaseProgressDialog.addHashValuesToDatabase.errorAddingValidHash.msg=Error adding valid hashes to the database: +AddHashValuesToDatabaseProgressDialog.addHashValuesToDatabase.errorAddingValidHash.msg=Error adding valid hashes to the hash set: HashLookupSettingsPanel.ingestWarningLabel.text=Ingest is ongoing, some settings will be unavailable until it finishes. -HashLookupSettingsPanel.addHashesToDatabaseButton.text=Add Hashes to Database -HashLookupSettingsPanel.indexPathLabel.text=No database selected +HashLookupSettingsPanel.addHashesToDatabaseButton.text=Add Hashes to Hash Set +HashLookupSettingsPanel.indexPathLabel.text= HashLookupSettingsPanel.indexPathLabelLabel.text=Index Path: HashLookupSettingsPanel.createDatabaseButton.toolTipText= -HashLookupSettingsPanel.createDatabaseButton.text=New database +HashLookupSettingsPanel.createDatabaseButton.text=New Hash Set HashLookupSettingsPanel.optionsLabel.text=Options HashLookupSettingsPanel.informationLabel.text=Information HashLookupSettingsPanel.sendIngestMessagesCheckBox.text=Send ingest inbox message for each hit HashLookupSettingsPanel.indexButton.text=Index HashLookupSettingsPanel.indexLabel.text=Index Status: -HashLookupSettingsPanel.hashDbIndexStatusLabel.text=No database selected -HashLookupSettingsPanel.hashDbTypeLabel.text=No database selected +HashLookupSettingsPanel.hashDbIndexStatusLabel.text= +HashLookupSettingsPanel.hashDbTypeLabel.text= HashLookupSettingsPanel.typeLabel.text=Type: -HashLookupSettingsPanel.locationLabel.text=Database Path: -HashLookupSettingsPanel.hashDbLocationLabel.text=No database selected -HashLookupSettingsPanel.hashDbNameLabel.text=No database selected +HashLookupSettingsPanel.locationLabel.text=Hash Set Path: +HashLookupSettingsPanel.hashDbLocationLabel.text= +HashLookupSettingsPanel.hashDbNameLabel.text= HashLookupSettingsPanel.nameLabel.text=Name: -HashLookupSettingsPanel.hashDatabasesLabel.text=Hash Databases: +HashLookupSettingsPanel.hashDatabasesLabel.text=Hash Sets: HashLookupSettingsPanel.importDatabaseButton.toolTipText= -HashLookupSettingsPanel.importDatabaseButton.text=Import database -HashLookupSettingsPanel.deleteDatabaseButton.text=Delete database -ImportHashDatabaseDialog.lbFilePath.text=Database Path: -ImportHashDatabaseDialog.tfDatabaseName.tooltip=Name for this database -ImportHashDatabaseDialog.tfDatabaseVersion.tooltip.text=Database Version Number -ImportHashDatabaseDialog.tfDatabaseName.tooltip=Name for this database -ImportHashDatabaseDialog.tfDatabaseVersion.tooltip.text=Database Version Number +HashLookupSettingsPanel.importDatabaseButton.text=Import Hash Set +HashLookupSettingsPanel.deleteDatabaseButton.text=Delete Hash Set +ImportHashDatabaseDialog.lbFilePath.text=Hash Set Path: +ImportHashDatabaseDialog.tfDatabaseName.tooltip=Name for this hash set +ImportHashDatabaseDialog.tfDatabaseVersion.tooltip.text=Hash Set Version Number +ImportHashDatabaseDialog.tfDatabaseName.tooltip=Name for this hash set +ImportHashDatabaseDialog.tfDatabaseVersion.tooltip.text=Hash Set Version Number ImportCentralRepoDbProgressDialog.lbProgress.text=Starting import... ImportCentralRepoDbProgressDialog.bnOk.text=OK ImportCentralRepoDbProgressDialog.bnCancel.text=Cancel HashLookupSettingsPanel.versionLabel.text_1=Version: -HashLookupSettingsPanel.hashDbVersionLabel.text_1=No database selected +HashLookupSettingsPanel.hashDbVersionLabel.text_1= HashLookupSettingsPanel.orgLabel.text_1=Organization: -HashLookupSettingsPanel.hashDbOrgLabel.text_1=No database selected +HashLookupSettingsPanel.hashDbOrgLabel.text_1= HashLookupSettingsPanel.readOnlyLabel.text_1=Read only: -HashLookupSettingsPanel.hashDbReadOnlyLabel.text_1=No database selected +HashLookupSettingsPanel.hashDbReadOnlyLabel.text_1= ImportCentralRepoDbProgressDialog.jLabel1.text=Importing hash set into the central repository HashDbImportDatabaseDialog.lbVersion.text=Version: HashDbImportDatabaseDialog.lbOrg.text=Source Organization: -HashDbImportDatabaseDialog.readOnlyCheckbox.text=Make database read-only +HashDbImportDatabaseDialog.readOnlyCheckbox.text=Make hash set read-only HashDbImportDatabaseDialog.orgButton.text=Manage Organizations HashDbImportDatabaseDialog.versionTextField.text=1.0 HashDbImportDatabaseDialog.fileTypeRadioButton.text=Local @@ -236,4 +236,4 @@ HashDbCreateDatabaseDialog.fileTypeRadioButton.text=Local HashDbCreateDatabaseDialog.centralRepoRadioButton.text=Remote (Central Repository) HashDbCreateDatabaseDialog.lbOrg.text=Source Organization: HashDbCreateDatabaseDialog.orgButton.text=Manage Organizations -HashDbCreateDatabaseDialog.databasePathLabel.text=Database Path: +HashDbCreateDatabaseDialog.databasePathLabel.text=Hash Set Path: diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDatabaseOptionsPanelController.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDatabaseOptionsPanelController.java index ed77c459c5..a4d85eef9b 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDatabaseOptionsPanelController.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDatabaseOptionsPanelController.java @@ -35,7 +35,7 @@ import org.sleuthkit.autopsy.coreutils.Logger; keywordsCategory = "HashDatabase", id = "HashDatabase") // moved messages to Bundle.properties -//@org.openide.util.NbBundle.Messages({"OptionsCategory_Name_HashDatabase=Hash Database", "OptionsCategory_Keywords_HashDatabase=Hash Database"}) +//@org.openide.util.NbBundle.Messages({"OptionsCategory_Name_HashDatabase=Hash Set", "OptionsCategory_Keywords_HashDatabase=Hash Set"}) public final class HashDatabaseOptionsPanelController extends OptionsPanelController { private HashLookupSettingsPanel panel; diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.java index 0c5d94277b..aff3e7d196 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.java @@ -420,7 +420,7 @@ final class HashDbImportDatabaseDialog extends javax.swing.JDialog { } ModuleSettings.setConfigSetting(ModuleSettings.MAIN_SETTINGS, LAST_FILE_PATH_KEY, databaseFile.getParent()); } catch (IOException ex) { - Logger.getLogger(HashDbImportDatabaseDialog.class.getName()).log(Level.SEVERE, "Failed to get path of selected database", ex); //NON-NLS + Logger.getLogger(HashDbImportDatabaseDialog.class.getName()).log(Level.SEVERE, "Failed to get path of selected hash set", ex); //NON-NLS JOptionPane.showMessageDialog(this, NbBundle.getMessage(this.getClass(), "HashDbImportDatabaseDialog.failedToGetDbPathMsg")); diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbIngestModule.java index 696a105416..022cda8a9c 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbIngestModule.java @@ -51,9 +51,9 @@ import org.sleuthkit.datamodel.TskData; import org.sleuthkit.datamodel.TskException; @NbBundle.Messages({ - "HashDbIngestModule.noKnownBadHashDbSetMsg=No notable hash database set.", + "HashDbIngestModule.noKnownBadHashDbSetMsg=No notable hash set.", "HashDbIngestModule.knownBadFileSearchWillNotExecuteWarn=Notable file search will not be executed.", - "HashDbIngestModule.noKnownHashDbSetMsg=No known hash database set.", + "HashDbIngestModule.noKnownHashDbSetMsg=No known hash set.", "HashDbIngestModule.knownFileSearchWillNotExecuteWarn=Known file search will not be executed." }) public class HashDbIngestModule implements FileIngestModule { @@ -95,7 +95,7 @@ public class HashDbIngestModule implements FileIngestModule { public void startUp(org.sleuthkit.autopsy.ingest.IngestJobContext context) throws IngestModuleException { jobId = context.getJobId(); if (!hashDbManager.verifyAllDatabasesLoadedCorrectly()) { - throw new IngestModuleException("Could not load all hash databases"); + throw new IngestModuleException("Could not load all hash sets"); } updateEnabledHashSets(hashDbManager.getKnownBadFileHashSets(), knownBadHashSets); updateEnabledHashSets(hashDbManager.getKnownFileHashSets(), knownHashSets); @@ -136,7 +136,7 @@ public class HashDbIngestModule implements FileIngestModule { enabledHashSets.add(db); } } catch (TskCoreException ex) { - logger.log(Level.WARNING, "Error getting index status for " + db.getDisplayName()+ " hash database", ex); //NON-NLS + logger.log(Level.WARNING, "Error getting index status for " + db.getDisplayName()+ " hash set", ex); //NON-NLS } } } diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbManager.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbManager.java index c032bdc83e..78981d4787 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbManager.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbManager.java @@ -276,14 +276,14 @@ public class HashDbManager implements PropertyChangeListener { boolean readOnly) throws TskCoreException{ if(! EamDb.isEnabled()){ - throw new TskCoreException("Could not load central repository database " + hashSetName + " - central repository is not enabled"); + throw new TskCoreException("Could not load central repository hash set " + hashSetName + " - central repository is not enabled"); } CentralRepoHashSet db = new CentralRepoHashSet(hashSetName, version, referenceSetID, searchDuringIngest, sendIngestMessages, knownFilesType, readOnly); if(! db.isValid()){ - throw new TskCoreException("Error finding database " + hashSetName + " in central repository"); + throw new TskCoreException("Error finding hash set " + hashSetName + " in central repository"); } // Add the hash database to the collection @@ -320,7 +320,7 @@ public class HashDbManager implements PropertyChangeListener { hashSetPaths.add(indexPath); } } catch (TskCoreException ex) { - Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error getting index path of " + hashDb.getHashSetName() + " hash database after indexing", ex); //NON-NLS + Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error getting index path of " + hashDb.getHashSetName() + " hash set after indexing", ex); //NON-NLS } } } @@ -363,7 +363,7 @@ public class HashDbManager implements PropertyChangeListener { hashSetPaths.remove(hashDatabase.getIndexPath()); } } catch (TskCoreException ex) { - Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error getting index path of " + hashDatabase.getHashSetName() + " hash database when removing the database", ex); //NON-NLS + Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error getting index path of " + hashDatabase.getHashSetName() + " hash set when removing the hash set", ex); //NON-NLS } try { @@ -371,13 +371,13 @@ public class HashDbManager implements PropertyChangeListener { hashSetPaths.remove(hashDatabase.getDatabasePath()); } } catch (TskCoreException ex) { - Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error getting database path of " + hashDatabase.getHashSetName() + " hash database when removing the database", ex); //NON-NLS + Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error getting hash set path of " + hashDatabase.getHashSetName() + " hash set when removing the hash set", ex); //NON-NLS } try { hashDatabase.close(); } catch (TskCoreException ex) { - Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error closing " + hashDb.getHashSetName() + " hash database when removing the database", ex); //NON-NLS + Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error closing " + hashDb.getHashSetName() + " hash set when removing the hash set", ex); //NON-NLS } } @@ -480,7 +480,7 @@ public class HashDbManager implements PropertyChangeListener { updateableDbs.add(db); } } catch (TskCoreException ex) { - Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error checking updateable status of " + db.getHashSetName() + " hash database", ex); //NON-NLS + Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error checking updateable status of " + db.getHashSetName() + " hash set", ex); //NON-NLS } } return updateableDbs; @@ -532,7 +532,7 @@ public class HashDbManager implements PropertyChangeListener { try { ((SleuthkitHashSet)database).close(); } catch (TskCoreException ex) { - Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error closing " + database.getHashSetName() + " hash database", ex); //NON-NLS + Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error closing " + database.getHashSetName() + " hash set", ex); //NON-NLS } } } @@ -554,7 +554,7 @@ public class HashDbManager implements PropertyChangeListener { * * @param settings The settings to configure. */ - @Messages({"# {0} - database name", "HashDbManager.noDbPath.message=Couldn't get valid database path for: {0}", + @Messages({"# {0} - hash set name", "HashDbManager.noDbPath.message=Couldn't get valid hash set path for: {0}", "HashDbManager.centralRepoLoadError.message=Error loading central repository hash sets"}) private void configureSettings(HashLookupSettings settings) { allDatabasesLoadedCorrectly = true; @@ -578,7 +578,7 @@ public class HashDbManager implements PropertyChangeListener { } } } catch (TskCoreException ex) { - Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error opening hash database", ex); //NON-NLS + Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error opening hash set", ex); //NON-NLS JOptionPane.showMessageDialog(null, NbBundle.getMessage(this.getClass(), "HashDbManager.unableToOpenHashDbMsg", hashDbInfo.getHashSetName()), @@ -592,7 +592,7 @@ public class HashDbManager implements PropertyChangeListener { try{ updateHashSetsFromCentralRepository(); } catch (TskCoreException ex){ - Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error opening hash database", ex); //NON-NLS + Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error opening hash set", ex); //NON-NLS JOptionPane.showMessageDialog(null, Bundle.HashDbManager_centralRepoLoadError_message(), @@ -615,7 +615,7 @@ public class HashDbManager implements PropertyChangeListener { allDatabasesLoadedCorrectly = true; } catch (HashLookupSettings.HashLookupSettingsException ex) { allDatabasesLoadedCorrectly = false; - logger.log(Level.SEVERE, "Could not overwrite hash database settings.", ex); + logger.log(Level.SEVERE, "Could not overwrite hash set settings.", ex); } } } @@ -1232,9 +1232,10 @@ public class HashDbManager implements PropertyChangeListener { } else { type = TskData.FileKnown.KNOWN; } - EamGlobalFileInstance fileInstance = new EamGlobalFileInstance(referenceSetID, file.getMd5Hash(), - type, comment); + try{ + EamGlobalFileInstance fileInstance = new EamGlobalFileInstance(referenceSetID, file.getMd5Hash(), + type, comment); EamDb.getInstance().addReferenceInstance(fileInstance,EamDb.getInstance().getCorrelationTypeById(CorrelationAttribute.FILES_TYPE_ID)); } catch (EamDbException ex){ throw new TskCoreException("Error adding hashes to " + getDisplayName(), ex); @@ -1259,8 +1260,12 @@ public class HashDbManager implements PropertyChangeListener { type = TskData.FileKnown.BAD; } else { type = TskData.FileKnown.KNOWN; - } - globalFileInstances.add(new EamGlobalFileInstance(referenceSetID, hashEntry.getMd5Hash(), type, hashEntry.getComment())); + } + try { + globalFileInstances.add(new EamGlobalFileInstance(referenceSetID, hashEntry.getMd5Hash(), type, hashEntry.getComment())); + } catch (EamDbException ex){ + throw new TskCoreException("Error adding hashes to " + getDisplayName(), ex); + } } try{ @@ -1344,7 +1349,7 @@ public class HashDbManager implements PropertyChangeListener { try{ return EamDb.getInstance().referenceSetIsValid(this.referenceSetID, this.hashSetName, this.version); } catch (EamDbException ex){ - Logger.getLogger(CentralRepoHashSet.class.getName()).log(Level.SEVERE, "Error validating hash database " + hashSetName, ex); //NON-NLS + Logger.getLogger(CentralRepoHashSet.class.getName()).log(Level.SEVERE, "Error validating hash set " + hashSetName, ex); //NON-NLS return false; } } diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupModuleSettings.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupModuleSettings.java index 99dd50d291..99204ab1ca 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupModuleSettings.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupModuleSettings.java @@ -47,7 +47,7 @@ final class HashLookupModuleSettings implements IngestModuleIngestJobSettings { try{ databaseInfoList = HashLookupSettings.convertHashSetList(hashDbList); } catch (HashLookupSettings.HashLookupSettingsException ex){ - Logger.getLogger(HashLookupModuleSettings.class.getName()).log(Level.SEVERE, "Error creating hash database settings.", ex); //NON-NLS + Logger.getLogger(HashLookupModuleSettings.class.getName()).log(Level.SEVERE, "Error creating hash set settings.", ex); //NON-NLS databaseInfoList = new ArrayList<>(); } } @@ -87,7 +87,7 @@ final class HashLookupModuleSettings implements IngestModuleIngestJobSettings { dbInfo.setSearchDuringIngest(true); databaseInfoList.add(dbInfo); } catch (TskCoreException ex){ - Logger.getLogger(HashLookupModuleSettings.class.getName()).log(Level.SEVERE, "Error creating hash database settings for " + db.getHashSetName(), ex); //NON-NLS + Logger.getLogger(HashLookupModuleSettings.class.getName()).log(Level.SEVERE, "Error creating hash set settings for " + db.getHashSetName(), ex); //NON-NLS } } for(HashDb db:disabledHashSets){ @@ -96,7 +96,7 @@ final class HashLookupModuleSettings implements IngestModuleIngestJobSettings { dbInfo.setSearchDuringIngest(false); databaseInfoList.add(dbInfo); } catch (TskCoreException ex){ - Logger.getLogger(HashLookupModuleSettings.class.getName()).log(Level.SEVERE, "Error creating hash database settings for " + db.getHashSetName(), ex); //NON-NLS + Logger.getLogger(HashLookupModuleSettings.class.getName()).log(Level.SEVERE, "Error creating hash set settings for " + db.getHashSetName(), ex); //NON-NLS } } @@ -152,7 +152,7 @@ final class HashLookupModuleSettings implements IngestModuleIngestJobSettings { try{ databaseInfoList = HashLookupSettings.convertHashSetList(HashDbManager.getInstance().getAllHashSets()); } catch (HashLookupSettings.HashLookupSettingsException ex){ - Logger.getLogger(HashLookupModuleSettings.class.getName()).log(Level.SEVERE, "Error updating hash database settings.", ex); //NON-NLS + Logger.getLogger(HashLookupModuleSettings.class.getName()).log(Level.SEVERE, "Error updating hash set settings.", ex); //NON-NLS return; } diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupSettings.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupSettings.java index 948f18451d..68bf4a32b2 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupSettings.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupSettings.java @@ -80,7 +80,7 @@ final class HashLookupSettings implements Serializable { try{ dbInfoList.add(new HashDbInfo(db)); } catch (TskCoreException ex){ - logger.log(Level.SEVERE, "Could not load database settings for {0}", db.getHashSetName()); + logger.log(Level.SEVERE, "Could not load hash set settings for {0}", db.getHashSetName()); } } return dbInfoList; @@ -128,7 +128,7 @@ final class HashLookupSettings implements Serializable { return filesSetsSettings; } } catch (IOException | ClassNotFoundException ex) { - throw new HashLookupSettingsException("Could not read hash database settings.", ex); + throw new HashLookupSettingsException("Could not read hash set settings.", ex); } } @@ -285,7 +285,7 @@ final class HashLookupSettings implements Serializable { out.writeObject(settings); return true; } catch (Exception ex) { - logger.log(Level.SEVERE, "Could not write hash database settings."); + logger.log(Level.SEVERE, "Could not write hash set settings."); return false; } } diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupSettingsPanel.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupSettingsPanel.java index d0530bb2f9..6e972437f0 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupSettingsPanel.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupSettingsPanel.java @@ -175,7 +175,7 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan try { addHashesToDatabaseButton.setEnabled(!ingestIsRunning && db.isUpdateable()); } catch (TskCoreException ex) { - Logger.getLogger(HashLookupSettingsPanel.class.getName()).log(Level.SEVERE, "Error identifying if the database is updateable.", ex); //NON-NLS + Logger.getLogger(HashLookupSettingsPanel.class.getName()).log(Level.SEVERE, "Error identifying if the hash set is updateable.", ex); //NON-NLS addHashesToDatabaseButton.setEnabled(false); } @@ -192,14 +192,14 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan try { hashDbLocationLabel.setText(shortenPath(db.getDatabasePath())); } catch (TskCoreException ex) { - Logger.getLogger(HashLookupSettingsPanel.class.getName()).log(Level.SEVERE, "Error getting database path of " + db.getHashSetName() + " hash database", ex); //NON-NLS + Logger.getLogger(HashLookupSettingsPanel.class.getName()).log(Level.SEVERE, "Error getting hash set path of " + db.getHashSetName() + " hash set", ex); //NON-NLS hashDbLocationLabel.setText(ERROR_GETTING_PATH_TEXT); } try { indexPathLabel.setText(shortenPath(hashDb.getIndexPath())); } catch (TskCoreException ex) { - Logger.getLogger(HashLookupSettingsPanel.class.getName()).log(Level.SEVERE, "Error getting index path of " + db.getHashSetName() + " hash database", ex); //NON-NLS + Logger.getLogger(HashLookupSettingsPanel.class.getName()).log(Level.SEVERE, "Error getting index path of " + db.getHashSetName() + " hash set", ex); //NON-NLS indexPathLabel.setText(ERROR_GETTING_PATH_TEXT); } @@ -237,7 +237,7 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan indexButton.setEnabled(true); } } catch (TskCoreException ex) { - Logger.getLogger(HashLookupSettingsPanel.class.getName()).log(Level.SEVERE, "Error getting index state of hash database", ex); //NON-NLS + Logger.getLogger(HashLookupSettingsPanel.class.getName()).log(Level.SEVERE, "Error getting index state of hash set", ex); //NON-NLS hashDbIndexStatusLabel.setText(ERROR_GETTING_INDEX_STATUS_TEXT); hashDbIndexStatusLabel.setForeground(Color.red); indexButton.setText(NbBundle.getMessage(this.getClass(), "HashDbConfigPanel.indexButtonText.index")); @@ -299,7 +299,7 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan } @Override - @Messages({"HashLookupSettingsPanel.saveFail.message=Couldn't save hash db settings.", + @Messages({"HashLookupSettingsPanel.saveFail.message=Couldn't save hash set settings.", "HashLookupSettingsPanel.saveFail.title=Save Fail"}) public void saveSettings() { // Clear out the list of new central repo hash sets. They don't need to be @@ -316,7 +316,7 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan unindexed.add(hashDatabase); } } catch (TskCoreException ex) { - Logger.getLogger(HashLookupSettingsPanel.class.getName()).log(Level.SEVERE, "Error getting index info for hash database", ex); //NON-NLS + Logger.getLogger(HashLookupSettingsPanel.class.getName()).log(Level.SEVERE, "Error getting index info for hash set", ex); //NON-NLS } } } @@ -515,7 +515,7 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan try { return hashSets.get(rowIndex).isValid(); } catch (TskCoreException ex) { - Logger.getLogger(HashSetTableModel.class.getName()).log(Level.SEVERE, "Error getting index info for hash database", ex); //NON-NLS + Logger.getLogger(HashSetTableModel.class.getName()).log(Level.SEVERE, "Error getting index info for hash set", ex); //NON-NLS return false; } } diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/KdbHashSetParser.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/KdbHashSetParser.java index 5935784087..94d2724995 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/KdbHashSetParser.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/KdbHashSetParser.java @@ -64,7 +64,7 @@ public class KdbHashSetParser implements HashSetParser { totalHashes = resultSet.getLong("count"); } else { close(); - throw new TskCoreException("Error getting hash count from database " + filename); + throw new TskCoreException("Error getting hash count from hash set " + filename); } // Get the hashes @@ -72,7 +72,7 @@ public class KdbHashSetParser implements HashSetParser { // At this point, getNextHash can read each hash from the result set } catch (ClassNotFoundException | SQLException ex) { - throw new TskCoreException("Error opening/reading database " + filename, ex); + throw new TskCoreException("Error opening/reading hash set " + filename, ex); } } @@ -101,10 +101,10 @@ public class KdbHashSetParser implements HashSetParser { totalHashesRead++; return sb.toString(); } else { - throw new TskCoreException("Could not read expected number of hashes from database " + filename); + throw new TskCoreException("Could not read expected number of hashes from hash set " + filename); } } catch (SQLException ex) { - throw new TskCoreException("Error reading hash from result set for database " + filename, ex); + throw new TskCoreException("Error reading hash from result set for hash set " + filename, ex); } } diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java b/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java index 0c8aa263c3..ef20238c48 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java +++ b/Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java @@ -63,7 +63,7 @@ class ReportGenerator { */ private ReportProgressPanel progressPanel; - private String reportPathFormatString; + private static final String REPORT_PATH_FMT_STR = "%s" + File.separator + "%s %s %s" + File.separator; private final ReportGenerationPanel reportGenerationPanel = new ReportGenerationPanel(); static final String REPORTS_DIR = "Reports"; //NON-NLS @@ -89,12 +89,6 @@ class ReportGenerator { * Creates a report generator. */ ReportGenerator() { - // Create the root reports directory path of the form: /Reports/ / - DateFormat dateFormat = new SimpleDateFormat("MM-dd-yyyy-HH-mm-ss"); - Date date = new Date(); - String dateNoTime = dateFormat.format(date); - this.reportPathFormatString = currentCase.getReportDirectory() + File.separator + currentCase.getDisplayName() + " %s " + dateNoTime + File.separator; - this.errorList = new ArrayList<>(); } @@ -136,19 +130,12 @@ class ReportGenerator { /** * Run the GeneralReportModules using a SwingWorker. */ - void generateGeneralReport(GeneralReportModule generalReportModule) { + void generateGeneralReport(GeneralReportModule generalReportModule) throws IOException { if (generalReportModule != null) { - reportPathFormatString = String.format(reportPathFormatString, generalReportModule.getName()); - // Create the root reports directory. - try { - FileUtil.createFolder(new File(reportPathFormatString)); - } catch (IOException ex) { - errorList.add(NbBundle.getMessage(this.getClass(), "ReportGenerator.errList.failedMakeRptFolder")); - logger.log(Level.SEVERE, "Failed to make report folder, may be unable to generate reports.", ex); //NON-NLS - } - setupProgressPanel(generalReportModule); + String reportDir = createReportDirectory(generalReportModule); + setupProgressPanel(generalReportModule, reportDir); ReportWorker worker = new ReportWorker(() -> { - generalReportModule.generateReport(reportPathFormatString, progressPanel); + generalReportModule.generateReport(reportDir, progressPanel); }); worker.execute(); displayProgressPanel(); @@ -163,19 +150,12 @@ class ReportGenerator { * @param tagSelections the enabled/disabled state of the tag names * to be included in the report */ - void generateTableReport(TableReportModule tableReport, Map artifactTypeSelections, Map tagNameSelections) { + void generateTableReport(TableReportModule tableReport, Map artifactTypeSelections, Map tagNameSelections) throws IOException { if (tableReport != null && null != artifactTypeSelections) { - reportPathFormatString = String.format(reportPathFormatString, tableReport.getName()); - // Create the root reports directory. - try { - FileUtil.createFolder(new File(reportPathFormatString)); - } catch (IOException ex) { - errorList.add(NbBundle.getMessage(this.getClass(), "ReportGenerator.errList.failedMakeRptFolder")); - logger.log(Level.SEVERE, "Failed to make report folder, may be unable to generate reports.", ex); //NON-NLS - } - setupProgressPanel(tableReport); + String reportDir = createReportDirectory(tableReport); + setupProgressPanel(tableReport, reportDir); ReportWorker worker = new ReportWorker(() -> { - tableReport.startReport(reportPathFormatString); + tableReport.startReport(reportDir); TableReportGenerator generator = new TableReportGenerator(artifactTypeSelections, tagNameSelections, progressPanel, tableReport); generator.execute(); tableReport.endReport(); @@ -194,23 +174,16 @@ class ReportGenerator { * @param enabledInfo the Information that should be included about each * file in the report. */ - void generateFileListReport(FileReportModule fileReportModule, Map enabledInfo) { + void generateFileListReport(FileReportModule fileReportModule, Map enabledInfo) throws IOException { if (fileReportModule != null && null != enabledInfo) { - reportPathFormatString = String.format(reportPathFormatString, fileReportModule.getName()); - // Create the root reports directory. - try { - FileUtil.createFolder(new File(reportPathFormatString)); - } catch (IOException ex) { - errorList.add(NbBundle.getMessage(this.getClass(), "ReportGenerator.errList.failedMakeRptFolder")); - logger.log(Level.SEVERE, "Failed to make report folder, may be unable to generate reports.", ex); //NON-NLS - } + String reportDir = createReportDirectory(fileReportModule); List enabled = new ArrayList<>(); for (Entry e : enabledInfo.entrySet()) { if (e.getValue()) { enabled.add(e.getKey()); } } - setupProgressPanel(fileReportModule); + setupProgressPanel(fileReportModule, reportDir); ReportWorker worker = new ReportWorker(() -> { if (progressPanel.getStatus() != ReportStatus.CANCELED) { progressPanel.start(); @@ -221,7 +194,7 @@ class ReportGenerator { List files = getFiles(); int numFiles = files.size(); if (progressPanel.getStatus() != ReportStatus.CANCELED) { - fileReportModule.startReport(reportPathFormatString); + fileReportModule.startReport(reportDir); fileReportModule.startTable(enabled); } progressPanel.setIndeterminate(false); @@ -276,15 +249,31 @@ class ReportGenerator { } } - private void setupProgressPanel(ReportModule module) { + private void setupProgressPanel(ReportModule module, String reportDir) { String reportFilePath = module.getRelativeFilePath(); if (!reportFilePath.isEmpty()) { - this.progressPanel = reportGenerationPanel.addReport(module.getName(), String.format(reportPathFormatString, module.getName()) + reportFilePath); + this.progressPanel = reportGenerationPanel.addReport(module.getName(), reportDir + reportFilePath); } else { this.progressPanel = reportGenerationPanel.addReport(module.getName(), null); } } + private static String createReportDirectory(ReportModule module) throws IOException { + Case currentCase = Case.getCurrentCase(); + // Create the root reports directory path of the form: /Reports/ / + DateFormat dateFormat = new SimpleDateFormat("MM-dd-yyyy-HH-mm-ss"); + Date date = new Date(); + String dateNoTime = dateFormat.format(date); + String reportPath = String.format(REPORT_PATH_FMT_STR, currentCase.getReportDirectory(), currentCase.getDisplayName(), module.getName(), dateNoTime); + // Create the root reports directory. + try { + FileUtil.createFolder(new File(reportPath)); + } catch (IOException ex) { + throw new IOException("Failed to make report folder, unable to generate reports.", ex); + } + return reportPath; + } + private class ReportWorker extends SwingWorker { private final Runnable doInBackground; diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportWizardAction.java b/Core/src/org/sleuthkit/autopsy/report/ReportWizardAction.java index 49f37008be..618cba2a38 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ReportWizardAction.java +++ b/Core/src/org/sleuthkit/autopsy/report/ReportWizardAction.java @@ -2,7 +2,7 @@ * * Autopsy Forensic Browser * - * Copyright 2013-2015 Basis Technology Corp. + * Copyright 2013-2018 Basis Technology Corp. * * Copyright 2012 42six Solutions. * Contact: aebadirad 42six com @@ -26,12 +26,14 @@ import java.awt.Component; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.beans.PropertyChangeEvent; +import java.io.IOException; import java.text.MessageFormat; import java.util.EnumSet; import java.util.Map; import javax.swing.ImageIcon; import javax.swing.JButton; import org.openide.DialogDisplayer; +import org.openide.NotifyDescriptor; import org.openide.WizardDescriptor; import org.openide.awt.ActionID; import org.openide.awt.ActionReference; @@ -70,12 +72,17 @@ public final class ReportWizardAction extends CallableSystemAction implements Pr TableReportModule tableReport = (TableReportModule) wiz.getProperty("tableModule"); GeneralReportModule generalReport = (GeneralReportModule) wiz.getProperty("generalModule"); FileReportModule fileReport = (FileReportModule) wiz.getProperty("fileModule"); - if (tableReport != null) { - generator.generateTableReport(tableReport, (Map) wiz.getProperty("artifactStates"), (Map) wiz.getProperty("tagStates")); //NON-NLS - } else if (generalReport != null) { - generator.generateGeneralReport(generalReport); - } else if (fileReport != null) { - generator.generateFileListReport(fileReport, (Map) wiz.getProperty("fileReportOptions")); //NON-NLS + try { + if (tableReport != null) { + generator.generateTableReport(tableReport, (Map) wiz.getProperty("artifactStates"), (Map) wiz.getProperty("tagStates")); //NON-NLS + } else if (generalReport != null) { + generator.generateGeneralReport(generalReport); + } else if (fileReport != null) { + generator.generateFileListReport(fileReport, (Map) wiz.getProperty("fileReportOptions")); //NON-NLS + } + } catch (IOException e) { + NotifyDescriptor descriptor = new NotifyDescriptor.Message(e.getMessage(), NotifyDescriptor.ERROR_MESSAGE); + DialogDisplayer.getDefault().notify(descriptor); } } } diff --git a/Core/src/org/sleuthkit/autopsy/report/taggedhashes/AddTaggedHashesToHashDb.java b/Core/src/org/sleuthkit/autopsy/report/taggedhashes/AddTaggedHashesToHashDb.java index 1de1db7eaa..d3560c514c 100644 --- a/Core/src/org/sleuthkit/autopsy/report/taggedhashes/AddTaggedHashesToHashDb.java +++ b/Core/src/org/sleuthkit/autopsy/report/taggedhashes/AddTaggedHashesToHashDb.java @@ -55,7 +55,7 @@ public class AddTaggedHashesToHashDb implements GeneralReportModule { @Override public String getDescription() { - return "Adds hashes of tagged files to a hash database."; + return "Adds hashes of tagged files to a hash set."; } @Override @@ -92,17 +92,17 @@ public class AddTaggedHashesToHashDb implements GeneralReportModule { try { hashSet.addHashes(tag.getContent(), Case.getCurrentCase().getDisplayName()); } catch (TskCoreException ex) { - Logger.getLogger(AddTaggedHashesToHashDb.class.getName()).log(Level.SEVERE, "Error adding hash for obj_id = " + tag.getContent().getId() + " to hash database " + hashSet.getHashSetName(), ex); + Logger.getLogger(AddTaggedHashesToHashDb.class.getName()).log(Level.SEVERE, "Error adding hash for obj_id = " + tag.getContent().getId() + " to hash set " + hashSet.getHashSetName(), ex); failedExports.add(tag.getContent().getName()); } } else { - JOptionPane.showMessageDialog(null, "Unable to add the " + (tags.size() > 1 ? "files" : "file") + " to the hash database. Hashes have not been calculated. Please configure and run an appropriate ingest module.", "Add to Hash Database Error", JOptionPane.ERROR_MESSAGE); + JOptionPane.showMessageDialog(null, "Unable to add the " + (tags.size() > 1 ? "files" : "file") + " to the hash set. Hashes have not been calculated. Please configure and run an appropriate ingest module.", "Add to Hash Set Error", JOptionPane.ERROR_MESSAGE); break; } } } } catch (TskCoreException ex) { - Logger.getLogger(AddTaggedHashesToHashDb.class.getName()).log(Level.SEVERE, "Error adding to hash database", ex); + Logger.getLogger(AddTaggedHashesToHashDb.class.getName()).log(Level.SEVERE, "Error adding to hash set", ex); JOptionPane.showMessageDialog(null, "Error getting selected tags for case.", "Hash Export Error", JOptionPane.ERROR_MESSAGE); } } diff --git a/Core/test/qa-functional/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepoDatamodelTest.java b/Core/test/qa-functional/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepoDatamodelTest.java new file mode 100644 index 0000000000..cce1e807e1 --- /dev/null +++ b/Core/test/qa-functional/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepoDatamodelTest.java @@ -0,0 +1,2620 @@ +/* + * Central Repository + * + * Copyright 2018 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.centralrepository.datamodel; + +import java.io.IOException; +import java.util.Map; +import java.util.List; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Set; +import java.util.HashSet; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.stream.Collectors; +import junit.framework.Test; +import junit.framework.TestCase; +import org.apache.commons.io.FileUtils; +import org.netbeans.junit.NbModuleSuite; +import org.openide.util.Exceptions; +import org.python.icu.impl.Assert; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.casemodule.CaseActionException; +import org.sleuthkit.autopsy.casemodule.CaseDetails; +import org.sleuthkit.autopsy.coreutils.ModuleSettings; +import org.sleuthkit.datamodel.TskData; +import static junit.framework.Assert.assertFalse; +import static junit.framework.Assert.assertTrue; + +/** + * + */ +public class CentralRepoDatamodelTest extends TestCase { + + private static final String PROPERTIES_FILE = "CentralRepository"; + private static final String CR_DB_NAME = "testcentralrepo.db"; + private static final Path testDirectory = Paths.get(System.getProperty("java.io.tmpdir"), "CentralRepoDatamodelTest"); + SqliteEamDbSettings dbSettingsSqlite; + + private CorrelationCase case1; + private CorrelationCase case2; + private CorrelationDataSource dataSource1fromCase1; + private CorrelationDataSource dataSource2fromCase1; + private CorrelationDataSource dataSource1fromCase2; + private EamOrganization org1; + private EamOrganization org2; + CorrelationAttribute.Type fileType; + + private Map propertiesMap = null; + + public static Test suite() { + NbModuleSuite.Configuration conf = NbModuleSuite.createConfiguration(CentralRepoDatamodelTest.class). + clusters(".*"). + enableModules(".*"); + return conf.suite(); + } + + @Override + public void setUp() { + dbSettingsSqlite = new SqliteEamDbSettings(); + + // Delete the test directory, if it exists + if (testDirectory.toFile().exists()) { + try { + FileUtils.deleteDirectory(testDirectory.toFile()); + } catch (IOException ex) { + Assert.fail(ex); + } + } + assertFalse("Unable to delete existing test directory", testDirectory.toFile().exists()); + + // Create the test directory + testDirectory.toFile().mkdirs(); + assertTrue("Unable to create test directory", testDirectory.toFile().exists()); + + // Save the current central repo settings + propertiesMap = ModuleSettings.getConfigSettings(PROPERTIES_FILE); + + try { + dbSettingsSqlite.setDbName(CR_DB_NAME); + dbSettingsSqlite.setDbDirectory(testDirectory.toString()); + if (!dbSettingsSqlite.dbDirectoryExists()) { + dbSettingsSqlite.createDbDirectory(); + } + + assertTrue("Failed to created central repo directory " + dbSettingsSqlite.getDbDirectory(), dbSettingsSqlite.dbDirectoryExists()); + + boolean result = dbSettingsSqlite.initializeDatabaseSchema() + && dbSettingsSqlite.insertDefaultDatabaseContent(); + + assertTrue("Failed to initialize central repo database", result); + + dbSettingsSqlite.saveSettings(); + EamDbUtil.setUseCentralRepo(true); + EamDbPlatformEnum.setSelectedPlatform(EamDbPlatformEnum.SQLITE.name()); + EamDbPlatformEnum.saveSelectedPlatform(); + } catch (Exception ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + Path crDbFilePath = Paths.get(testDirectory.toString(), CR_DB_NAME); + assertTrue("Failed to create central repo database at " + crDbFilePath, crDbFilePath.toFile().exists()); + + // Set up some default objects to be used by the tests + try { + case1 = new CorrelationCase("case1_uuid", "case1"); + case1 = EamDb.getInstance().newCase(case1); + assertTrue("Failed to create test object case1", case1 != null); + + case2 = new CorrelationCase("case2_uuid", "case2"); + case2 = EamDb.getInstance().newCase(case2); + assertTrue("Failed to create test object case2", case2 != null); + + dataSource1fromCase1 = new CorrelationDataSource(case1.getID(), "dataSource1_deviceID", "dataSource1"); + EamDb.getInstance().newDataSource(dataSource1fromCase1); + dataSource1fromCase1 = EamDb.getInstance().getDataSource(case1, dataSource1fromCase1.getDeviceID()); + assertTrue("Failed to create test object dataSource1fromCase1", dataSource1fromCase1 != null); + + dataSource2fromCase1 = new CorrelationDataSource(case1.getID(), "dataSource2_deviceID", "dataSource2"); + EamDb.getInstance().newDataSource(dataSource2fromCase1); + dataSource2fromCase1 = EamDb.getInstance().getDataSource(case1, dataSource2fromCase1.getDeviceID()); + assertTrue("Failed to create test object dataSource2fromCase1", dataSource2fromCase1 != null); + + dataSource1fromCase2 = new CorrelationDataSource(case2.getID(), "dataSource3_deviceID", "dataSource3"); + EamDb.getInstance().newDataSource(dataSource1fromCase2); + dataSource1fromCase2 = EamDb.getInstance().getDataSource(case2, dataSource1fromCase2.getDeviceID()); + assertTrue("Failed to create test object dataSource1fromCase2", dataSource1fromCase2 != null); + + org1 = new EamOrganization("org1"); + org1.setOrgID((int) EamDb.getInstance().newOrganization(org1)); + + org2 = new EamOrganization("org2"); + org2.setOrgID((int) EamDb.getInstance().newOrganization(org2)); + + // Store the file type object for later use + fileType = EamDb.getInstance().getCorrelationTypeById(CorrelationAttribute.FILES_TYPE_ID); + assertTrue("getCorrelationTypeById(FILES_TYPE_ID) returned null", fileType != null); + + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + } + + @Override + public void tearDown() { + + // Restore the original properties + ModuleSettings.setConfigSettings(PROPERTIES_FILE, propertiesMap); + + // Close and delete the test case and central repo db + try { + EamDb.getInstance().shutdownConnections(); + FileUtils.deleteDirectory(testDirectory.toFile()); + } catch (EamDbException | IOException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + assertFalse("Error deleting test directory " + testDirectory.toString(), testDirectory.toFile().exists()); + } + + /** + * Test the notable status of artifacts + * addArtifact(CorrelationAttribute eamArtifact) tests: + * - Test that two artifacts created with BAD status still have it when fetched from the database + * - Test that two artifacts created with BAD and KNOWN status still have the correct status when fetched from the database + * setArtifactInstanceKnownStatus(CorrelationAttribute eamArtifact, TskData.FileKnown knownStatus) tests: + * - Test updating status + * - Test updating artifact with two instances + * - Test updating null artifact + * - Test updating artifact with null known status + * - Test updating artifact with null case + * - Test updating artifact with null data source + * getArtifactInstancesKnownBad(CorrelationAttribute.Type aType, String value) tests: + * - Test getting two notable instances + * - Test getting notable instances where one instance is notable and the other is known + * - Test getting notable instances with null type + * - Test getting notable instances with null value + * getCountArtifactInstancesKnownBad(CorrelationAttribute.Type aType, String value) tests: + * - Test getting count of two notable instances + * - Test getting notable instance count where one instance is notable and the other is known + * - Test getting notable instance count with null type + * - Test getting notable instance count with null value + * getListCasesHavingArtifactInstancesKnownBad(CorrelationAttribute.Type aType, String value) tests: + * - Test getting cases with notable instances (all instances are notable) + * - Test getting cases with notable instances (only one instance is notable) + * - Test getting cases with null type + * - Test getting cases with null value + */ + public void testNotableArtifactStatus() { + + String notableHashInBothCases = "e34a8899ef6468b74f8a1048419ccc8b"; + String notableHashInOneCaseKnownOther = "d293f2f5cebcb427cde3bb95db5e1797"; + String hashToChangeToNotable = "23bd4ea37ec6304e75ac723527472a0f"; + + // Add two instances with notable status + try { + CorrelationAttribute attr = new CorrelationAttribute(fileType, notableHashInBothCases); + attr.addInstance(new CorrelationAttributeInstance(case1, dataSource1fromCase1, "path1", + "", TskData.FileKnown.BAD)); + attr.addInstance(new CorrelationAttributeInstance(case2, dataSource1fromCase2, "path2", + "", TskData.FileKnown.BAD)); + EamDb.getInstance().addArtifact(attr); + + List attrs = EamDb.getInstance().getArtifactInstancesByTypeValue(fileType, notableHashInBothCases); + assertTrue("getArtifactInstancesByTypeValue returned " + attrs.size() + " values - expected 2", attrs.size() == 2); + for (CorrelationAttributeInstance a : attrs) { + assertTrue("Artifact did not have expected BAD status", a.getKnownStatus().equals(TskData.FileKnown.BAD)); + } + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Add two instances with one notable, one known + try { + CorrelationAttribute attr = new CorrelationAttribute(fileType, notableHashInOneCaseKnownOther); + attr.addInstance(new CorrelationAttributeInstance(case1, dataSource1fromCase1, "path3", + "", TskData.FileKnown.BAD)); + attr.addInstance(new CorrelationAttributeInstance(case2, dataSource1fromCase2, "path4", + "", TskData.FileKnown.KNOWN)); + EamDb.getInstance().addArtifact(attr); + + List attrs = EamDb.getInstance().getArtifactInstancesByTypeValue(fileType, notableHashInOneCaseKnownOther); + assertTrue("getArtifactInstancesByTypeValue returned " + attrs.size() + " values - expected 2", attrs.size() == 2); + for (CorrelationAttributeInstance a : attrs) { + if (case1.getCaseUUID().equals(a.getCorrelationCase().getCaseUUID())) { + assertTrue("Artifact did not have expected BAD status", a.getKnownStatus().equals(TskData.FileKnown.BAD)); + } else if (case2.getCaseUUID().equals(a.getCorrelationCase().getCaseUUID())) { + assertTrue("Artifact did not have expected KNOWN status", a.getKnownStatus().equals(TskData.FileKnown.KNOWN)); + } else { + Assert.fail("getArtifactInstancesByTypeValue returned unexpected case"); + } + } + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Add an artifact and then update its status + try { + CorrelationAttribute attr = new CorrelationAttribute(fileType, hashToChangeToNotable); + attr.addInstance(new CorrelationAttributeInstance(case1, dataSource1fromCase2, "path5", + "", TskData.FileKnown.KNOWN)); + EamDb.getInstance().addArtifact(attr); + + EamDb.getInstance().setArtifactInstanceKnownStatus(attr, TskData.FileKnown.BAD); + + List attrs = EamDb.getInstance().getArtifactInstancesByTypeValue(fileType, hashToChangeToNotable); + assertTrue("getArtifactInstancesByTypeValue returned " + attrs.size() + " values - expected 1", attrs.size() == 1); + assertTrue("Artifact status did not change to BAD", attrs.get(0).getKnownStatus().equals(TskData.FileKnown.BAD)); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Try to update artifact with two CorrelationAttributeInstance instances + try { + CorrelationAttribute attr = new CorrelationAttribute(fileType, "badHash"); + attr.addInstance(new CorrelationAttributeInstance(case1, dataSource1fromCase1, "badPath", + "", TskData.FileKnown.KNOWN)); + attr.addInstance(new CorrelationAttributeInstance(case1, dataSource1fromCase2, "badPath", + "", TskData.FileKnown.KNOWN)); + + EamDb.getInstance().setArtifactInstanceKnownStatus(attr, TskData.FileKnown.BAD); + Assert.fail("setArtifactInstanceKnownStatus failed to throw exception for multiple Correlation Attribute Instances"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Try to update null artifact + try { + EamDb.getInstance().setArtifactInstanceKnownStatus(null, TskData.FileKnown.BAD); + Assert.fail("setArtifactInstanceKnownStatus failed to throw exception for null correlation attribute"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Try to update artifact with null known status + try { + CorrelationAttribute attr = new CorrelationAttribute(fileType, "badHash"); + attr.addInstance(new CorrelationAttributeInstance(case1, dataSource1fromCase1, "badPath", + "", TskData.FileKnown.KNOWN)); + + EamDb.getInstance().setArtifactInstanceKnownStatus(attr, null); + Assert.fail("setArtifactInstanceKnownStatus failed to throw exception for null known status"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Try to update artifact with null case + try { + CorrelationAttribute attr = new CorrelationAttribute(fileType, "badHash"); + attr.addInstance(new CorrelationAttributeInstance(null, dataSource1fromCase1, "badPath", + "", TskData.FileKnown.KNOWN)); + + EamDb.getInstance().setArtifactInstanceKnownStatus(attr, TskData.FileKnown.BAD); + Assert.fail("setArtifactInstanceKnownStatus failed to throw exception for null case"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Try to update artifact with null data source + try { + CorrelationAttribute attr = new CorrelationAttribute(fileType, "badHash"); + attr.addInstance(new CorrelationAttributeInstance(case1, null, "badPath", + "", TskData.FileKnown.KNOWN)); + + EamDb.getInstance().setArtifactInstanceKnownStatus(attr, TskData.FileKnown.BAD); + Assert.fail("setArtifactInstanceKnownStatus failed to throw exception for null case"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test getting two notable instances + try { + List attrs = EamDb.getInstance().getArtifactInstancesKnownBad(fileType, notableHashInBothCases); + assertTrue("getArtifactInstancesKnownBad returned " + attrs.size() + " values - expected 2", attrs.size() == 2); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting notable instances where one instance is notable and the other is known + try { + List attrs = EamDb.getInstance().getArtifactInstancesKnownBad(fileType, notableHashInOneCaseKnownOther); + assertTrue("getArtifactInstancesKnownBad returned " + attrs.size() + " values - expected 1", attrs.size() == 1); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting notable instances with null type + try { + EamDb.getInstance().getArtifactInstancesKnownBad(null, notableHashInOneCaseKnownOther); + Assert.fail("getArtifactInstancesKnownBad failed to throw exception for null type"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test getting notable instances with null value (should work fine) + try { + List attrs = EamDb.getInstance().getArtifactInstancesKnownBad(fileType, null); + assertTrue("getArtifactInstancesKnownBad returned " + attrs.size() + " values - expected ", attrs.isEmpty()); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting count of two notable instances + try { + long count = EamDb.getInstance().getCountArtifactInstancesKnownBad(fileType, notableHashInBothCases); + assertTrue("getCountArtifactInstancesKnownBad returned " + count + " values - expected 2", count == 2); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting notable instance count where one instance is notable and the other is known + try { + long count = EamDb.getInstance().getCountArtifactInstancesKnownBad(fileType, notableHashInOneCaseKnownOther); + assertTrue("getCountArtifactInstancesKnownBad returned " + count + " values - expected 1", count == 1); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting notable instance count with null type + try { + EamDb.getInstance().getCountArtifactInstancesKnownBad(null, notableHashInOneCaseKnownOther); + Assert.fail("getCountArtifactInstancesKnownBad failed to throw exception for null type"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test getting notable instance count with null value (should work fine) + try { + long count = EamDb.getInstance().getCountArtifactInstancesKnownBad(fileType, null); + assertTrue("getCountArtifactInstancesKnownBad returned " + count + " values - expected ", count == 0); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting cases with notable instances (all instances are notable) + try { + List cases = EamDb.getInstance().getListCasesHavingArtifactInstancesKnownBad(fileType, notableHashInBothCases); + assertTrue("getListCasesHavingArtifactInstancesKnownBad returned " + cases.size() + " values - expected 2", cases.size() == 2); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting cases with notable instances (only one instance is notable) + try { + List cases = EamDb.getInstance().getListCasesHavingArtifactInstancesKnownBad(fileType, notableHashInOneCaseKnownOther); + assertTrue("getListCasesHavingArtifactInstancesKnownBad returned " + cases.size() + " values - expected 1", cases.size() == 1); + assertTrue("getListCasesHavingArtifactInstancesKnownBad returned unexpected case " + cases.get(0), case1.getDisplayName().equals(cases.get(0))); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting cases with null type + try { + EamDb.getInstance().getListCasesHavingArtifactInstancesKnownBad(null, notableHashInOneCaseKnownOther); + Assert.fail("getListCasesHavingArtifactInstancesKnownBad failed to throw exception for null type"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test getting cases with null value (should work fine) + try { + List cases = EamDb.getInstance().getListCasesHavingArtifactInstancesKnownBad(fileType, null); + assertTrue("getListCasesHavingArtifactInstancesKnownBad returned " + cases.size() + " values - expected ", cases.isEmpty()); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + } + + /** + * Test the methods associated with bulk artifacts (prepareBulkArtifact and + * bulkInsertArtifacts). + * First test the normal use case of a large number of valid artifacts getting added. + * Next test the error conditions: + * - Test preparing artifact with null type + * - Test preparing artifact with null case + * - Test preparing artifact with null data source + * - Test preparing artifact with null path + * - Test preparing artifact with null known status + */ + public void testBulkArtifacts() { + + // Test normal addition of bulk artifacts + // Steps: + // - Make a list of artifacts roughly half the threshold size + // - Call prepareBulkArtifact on all of them + // - Verify that nothing has been written to the database + // - Make a list of artifacts equal to the threshold size + // - Call prepareBulkArtifact on all of them + // - Verify that the bulk threshold number of them were written to the database + // - Call bulkInsertArtifacts to insert the remainder + // - Verify that the database now has all the artifacts + try { + // Make sure there are no artifacts in the database to start + long originalArtifactCount = EamDb.getInstance().getCountArtifactInstancesByCaseDataSource(case1.getCaseUUID(), dataSource1fromCase1.getDeviceID()); + assertTrue("getCountArtifactInstancesByCaseDataSource returned non-zero count", originalArtifactCount == 0); + + // Create the first list, which will have (bulkThreshold / 2) entries + List list1 = new ArrayList<>(); + for (int i = 0; i < dbSettingsSqlite.getBulkThreshold() / 2; i++) { + String value = "bulkInsertValue1_" + String.valueOf(i); + String path = "C:\\bulkInsertPath1\\file" + String.valueOf(i); + + CorrelationAttribute attr = new CorrelationAttribute(fileType, value); + attr.addInstance(new CorrelationAttributeInstance(case1, dataSource1fromCase1, path)); + list1.add(attr); + } + + // Queue up the current list. There should not be enough to trigger the insert + for (CorrelationAttribute attr : list1) { + EamDb.getInstance().prepareBulkArtifact(attr); + } + + // Check that nothing has been written yet + assertTrue("Artifacts written to database before threshold was reached", + originalArtifactCount == EamDb.getInstance().getCountArtifactInstancesByCaseDataSource(case1.getCaseUUID(), dataSource1fromCase1.getDeviceID())); + + // Make a second list with length equal to bulkThreshold + List list2 = new ArrayList<>(); + for (int i = 0; i < dbSettingsSqlite.getBulkThreshold(); i++) { + String value = "bulkInsertValue2_" + String.valueOf(i); + String path = "C:\\bulkInsertPath2\\file" + String.valueOf(i); + + CorrelationAttribute attr = new CorrelationAttribute(fileType, value); + attr.addInstance(new CorrelationAttributeInstance(case1, dataSource1fromCase1, path)); + list2.add(attr); + } + + // Queue up the current list. This will trigger an insert partway through + for (CorrelationAttribute attr : list2) { + EamDb.getInstance().prepareBulkArtifact(attr); + } + + // There should now be bulkThreshold artifacts in the database + long count = EamDb.getInstance().getCountArtifactInstancesByCaseDataSource(case1.getCaseUUID(), dataSource1fromCase1.getDeviceID()); + assertTrue("Artifact count " + count + " does not match bulkThreshold " + dbSettingsSqlite.getBulkThreshold(), count == dbSettingsSqlite.getBulkThreshold()); + + // Now call bulkInsertArtifacts() to insert the rest of queue + EamDb.getInstance().bulkInsertArtifacts(); + count = EamDb.getInstance().getCountArtifactInstancesByCaseDataSource(case1.getCaseUUID(), dataSource1fromCase1.getDeviceID()); + int expectedCount = list1.size() + list2.size(); + assertTrue("Artifact count " + count + " does not match expected count " + expectedCount, count == expectedCount); + + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test preparing artifact with null type + try { + CorrelationAttribute attr = new CorrelationAttribute(null, "value"); + EamDb.getInstance().prepareBulkArtifact(attr); + Assert.fail("prepareBulkArtifact failed to throw exception for null type"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test preparing artifact with null case + try { + CorrelationAttribute attr = new CorrelationAttribute(fileType, "value"); + attr.addInstance(new CorrelationAttributeInstance(null, dataSource1fromCase1, "path")); + EamDb.getInstance().prepareBulkArtifact(attr); + EamDb.getInstance().bulkInsertArtifacts(); + Assert.fail("bulkInsertArtifacts failed to throw exception for null case"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test preparing artifact with null data source + try { + CorrelationAttribute attr = new CorrelationAttribute(fileType, "value"); + attr.addInstance(new CorrelationAttributeInstance(case1, null, "path")); + EamDb.getInstance().prepareBulkArtifact(attr); + EamDb.getInstance().bulkInsertArtifacts(); + Assert.fail("prepareBulkArtifact failed to throw exception for null data source"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test preparing artifact with null path + // CorrelationAttributeInstance will throw an exception + try { + CorrelationAttribute attr = new CorrelationAttribute(fileType, "value"); + attr.addInstance(new CorrelationAttributeInstance(case1, dataSource1fromCase1, null)); + Assert.fail("CorrelationAttributeInstance failed to throw exception for null path"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test preparing artifact with null known status + try { + CorrelationAttribute attr = new CorrelationAttribute(fileType, "value"); + attr.addInstance(new CorrelationAttributeInstance(case1, dataSource1fromCase1, "path", "comment", null)); + EamDb.getInstance().prepareBulkArtifact(attr); + EamDb.getInstance().bulkInsertArtifacts(); + Assert.fail("prepareBulkArtifact failed to throw exception for null known status"); + } catch (EamDbException ex) { + // This is the expected behavior + } + } + + /** + * Test most methods related to artifacts + * addArtifact(CorrelationAttribute eamArtifact) tests: + * - Test adding artifact with one instance + * - Test adding artifact with one instance in each data source + * - Test adding artifact with two instances in the same data source + * - Test adding email artifact + * - Test adding phone artifact + * - Test adding domain artifact + * - Test adding device artifact + * - Test adding artifact with null case + * - Test adding artifact with invalid case ID + * - Test adding artifact with null data source + * - Test adding artifact with invalid data source ID + * - Test adding artifact with null path + * - Test adding artifact with null known status + * - Test adding artifact with null correlation type + * - Test adding artifact with null value + * getArtifactInstancesByTypeValue(CorrelationAttribute.Type aType, String value) tests: + * - Test getting three expected instances + * - Test getting no expected instances + * - Test with null type + * - Test with null value + * getArtifactInstancesByPath(CorrelationAttribute.Type aType, String filePath) tests: + * - Test with existing path + * - Test with non-existent path + * - Test with null type + * - Test with null path + * getCountArtifactInstancesByTypeValue(CorrelationAttribute.Type aType, String value) tests: + * - Test getting three expected instances + * - Test getting no expected instances + * - Test with null type + * - Test with null value + * getFrequencyPercentage(CorrelationAttribute corAttr) tests: + * - Test value in every data source + * - Test value in one data source twice + * - Test email + * - Test value in no data sources + * - Test with null type + * - Test with null attribute + * getCountArtifactInstancesByCaseDataSource(String caseUUID, String dataSourceID) tests: + * - Test data source with seven instances + * - Test with null case UUID + * - Test with null device ID + * getCountUniqueCaseDataSourceTuplesHavingTypeValue(CorrelationAttribute.Type aType, String value) tests: + * - Test value in every data source + * - Test value in one data source twice + * - Test value in no data sources + * - Test with null type + * - Test with null value + */ + public void testArtifacts() { + + String inAllDataSourcesHash = "6cddb0e31787b79cfdcc0676b98a71ce"; + String inAllDataSourcesPath = "C:\\files\\path0.txt"; + String inDataSource1twiceHash = "b2f5ff47436671b6e533d8dc3614845d"; + String inDataSource1twicePath1 = "C:\\files\\path1.txt"; + String inDataSource1twicePath2 = "C:\\files\\path2.txt"; + String onlyInDataSource3Hash = "2af54305f183778d87de0c70c591fae4"; + String onlyInDataSource3Path = "C:\\files\\path3.txt"; + + // These will all go in dataSource1fromCase1 + String emailValue = "test@gmail.com"; + String emailPath = "C:\\files\\emailPath.txt"; + String phoneValue = "202-555-1234"; + String phonePath = "C:\\files\\phonePath.txt"; + String domainValue = "www.mozilla.com"; + String domainPath = "C:\\files\\domainPath.txt"; + String devIdValue = "94B21234"; + String devIdPath = "C:\\files\\devIdPath.txt"; + + // Store the email type + CorrelationAttribute.Type emailType; + try { + emailType = EamDb.getInstance().getCorrelationTypeById(CorrelationAttribute.EMAIL_TYPE_ID); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + return; + } + + // Test adding attribute with one instance + try { + CorrelationAttribute attr = new CorrelationAttribute(fileType, onlyInDataSource3Hash); + CorrelationAttributeInstance inst = new CorrelationAttributeInstance(case2, dataSource1fromCase2, onlyInDataSource3Path); + attr.addInstance(inst); + EamDb.getInstance().addArtifact(attr); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test adding attribute with an instance in each data source + try { + CorrelationAttribute attr = new CorrelationAttribute(fileType, inAllDataSourcesHash); + CorrelationAttributeInstance inst1 = new CorrelationAttributeInstance(case1, dataSource1fromCase1, inAllDataSourcesPath); + attr.addInstance(inst1); + CorrelationAttributeInstance inst2 = new CorrelationAttributeInstance(case1, dataSource2fromCase1, inAllDataSourcesPath); + attr.addInstance(inst2); + CorrelationAttributeInstance inst3 = new CorrelationAttributeInstance(case2, dataSource1fromCase2, inAllDataSourcesPath); + attr.addInstance(inst3); + EamDb.getInstance().addArtifact(attr); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test adding attribute with two instances in one data source + try { + CorrelationAttribute attr = new CorrelationAttribute(fileType, inDataSource1twiceHash); + CorrelationAttributeInstance inst1 = new CorrelationAttributeInstance(case1, dataSource1fromCase1, inDataSource1twicePath1); + attr.addInstance(inst1); + CorrelationAttributeInstance inst2 = new CorrelationAttributeInstance(case1, dataSource1fromCase1, inDataSource1twicePath2); + attr.addInstance(inst2); + EamDb.getInstance().addArtifact(attr); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test adding the other types + // Test adding an email artifact + try { + CorrelationAttribute attr = new CorrelationAttribute(emailType, emailValue); + CorrelationAttributeInstance inst = new CorrelationAttributeInstance(case1, dataSource1fromCase1, emailPath); + attr.addInstance(inst); + EamDb.getInstance().addArtifact(attr); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test adding a phone artifact + try { + CorrelationAttribute attr = new CorrelationAttribute(EamDb.getInstance().getCorrelationTypeById(CorrelationAttribute.PHONE_TYPE_ID), + phoneValue); + CorrelationAttributeInstance inst = new CorrelationAttributeInstance(case1, dataSource1fromCase1, phonePath); + attr.addInstance(inst); + EamDb.getInstance().addArtifact(attr); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test adding a domain artifact + try { + CorrelationAttribute attr = new CorrelationAttribute(EamDb.getInstance().getCorrelationTypeById(CorrelationAttribute.DOMAIN_TYPE_ID), + domainValue); + CorrelationAttributeInstance inst = new CorrelationAttributeInstance(case1, dataSource1fromCase1, domainPath); + attr.addInstance(inst); + EamDb.getInstance().addArtifact(attr); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test adding a device ID artifact + try { + CorrelationAttribute attr = new CorrelationAttribute(EamDb.getInstance().getCorrelationTypeById(CorrelationAttribute.USBID_TYPE_ID), + devIdValue); + CorrelationAttributeInstance inst = new CorrelationAttributeInstance(case1, dataSource1fromCase1, devIdPath); + attr.addInstance(inst); + EamDb.getInstance().addArtifact(attr); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test CorrelationAttributeInstance failure cases + // Create an attribute to use in the next few tests + CorrelationAttribute failAttr; + try { + failAttr = new CorrelationAttribute(fileType, "badInstances"); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + return; + } + + // Test adding instance with null case + try { + CorrelationAttributeInstance inst = new CorrelationAttributeInstance(null, dataSource1fromCase2, "badPath"); + failAttr.addInstance(inst); + EamDb.getInstance().addArtifact(failAttr); + Assert.fail("addArtifact failed to throw exception for null case"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test adding instance with invalid case ID + try { + CorrelationCase badCase = new CorrelationCase("badCaseUuid", "badCaseName"); + CorrelationAttributeInstance inst = new CorrelationAttributeInstance(badCase, dataSource1fromCase2, "badPath"); + failAttr.addInstance(inst); + EamDb.getInstance().addArtifact(failAttr); + Assert.fail("addArtifact failed to throw exception for invalid case"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test adding instance with null data source + try { + CorrelationAttributeInstance inst = new CorrelationAttributeInstance(case1, null, "badPath"); + failAttr.addInstance(inst); + EamDb.getInstance().addArtifact(failAttr); + Assert.fail("addArtifact failed to throw exception for null data source"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test adding instance with invalid data source ID + try { + CorrelationDataSource badDS = new CorrelationDataSource(case1.getID(), "badDSUuid", "badDSName"); + CorrelationAttributeInstance inst = new CorrelationAttributeInstance(case1, badDS, "badPath"); + failAttr.addInstance(inst); + EamDb.getInstance().addArtifact(failAttr); + Assert.fail("addArtifact failed to throw exception for invalid data source"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test adding instance with null path + // This will fail in the CorrelationAttributeInstance constructor + try { + new CorrelationAttributeInstance(case1, dataSource1fromCase1, null); + Assert.fail("CorrelationAttributeInstance failed to throw exception for null path"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test adding instance with null known status + try { + CorrelationAttributeInstance inst = new CorrelationAttributeInstance(case1, dataSource1fromCase1, null, "comment", null); + failAttr.addInstance(inst); + EamDb.getInstance().addArtifact(failAttr); + Assert.fail("addArtifact failed to throw exception for null known status"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test CorrelationAttribute failure cases + // Test null type + try { + CorrelationAttribute attr = new CorrelationAttribute(null, "badInstances"); + EamDb.getInstance().addArtifact(attr); + Assert.fail("addArtifact failed to throw exception for null type"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test null value + // This will fail in the CorrelationAttribute constructor + try { + new CorrelationAttribute(fileType, null); + Assert.fail("addArtifact failed to throw exception for null value"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test getting instances with expected resuls + try { + List instances = EamDb.getInstance().getArtifactInstancesByTypeValue(fileType, inAllDataSourcesHash); + assertTrue("getArtifactInstancesByTypeValue returned " + instances.size() + " results - expected 3", instances.size() == 3); + + // This test works because all the instances of this hash were set to the same path + for (CorrelationAttributeInstance inst : instances) { + assertTrue("getArtifactInstancesByTypeValue returned instance with unexpected path " + inst.getFilePath(), + inAllDataSourcesPath.equalsIgnoreCase(inst.getFilePath())); + } + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting instances expecting no results + try { + List instances = EamDb.getInstance().getArtifactInstancesByTypeValue( + emailType, inAllDataSourcesHash); + assertTrue("getArtifactInstancesByTypeValue returned " + instances.size() + " results - expected 0", instances.isEmpty()); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting instances with null type + try { + EamDb.getInstance().getArtifactInstancesByTypeValue(null, inAllDataSourcesHash); + Assert.fail("getArtifactInstancesByTypeValue failed to throw exception for null type"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test getting instances with null value + // Should just return nothing + try { + List instances = EamDb.getInstance().getArtifactInstancesByTypeValue(fileType, null); + assertTrue("getArtifactInstancesByTypeValue returned non-empty list for null value", instances.isEmpty()); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting instances with path that should produce results + try { + List instances = EamDb.getInstance().getArtifactInstancesByPath(fileType, inAllDataSourcesPath); + assertTrue("getArtifactInstancesByPath returned " + instances.size() + " objects - expected 3", instances.size() == 3); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting instances with path that should not produce results + try { + List instances = EamDb.getInstance().getArtifactInstancesByPath(fileType, "xyz"); + assertTrue("getArtifactInstancesByPath returned " + instances.size() + " objects - expected 0", instances.isEmpty()); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting instances with null type + try { + EamDb.getInstance().getArtifactInstancesByPath(null, inAllDataSourcesPath); + Assert.fail("getArtifactInstancesByPath failed to throw exception for null type"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test getting instances with null path + try { + EamDb.getInstance().getArtifactInstancesByPath(fileType, null); + Assert.fail("getArtifactInstancesByPath failed to throw exception for null path"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test getting instance count with path that should produce results + try { + long count = EamDb.getInstance().getCountArtifactInstancesByTypeValue(fileType, inAllDataSourcesHash); + assertTrue("getCountArtifactInstancesByTypeValue returned " + count + " - expected 3", count == 3); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting instance count with path that should not produce results + try { + long count = EamDb.getInstance().getCountArtifactInstancesByTypeValue(fileType, "xyz"); + assertTrue("getCountArtifactInstancesByTypeValue returned " + count + " - expected 0", count == 0); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting instance count with null type + try { + EamDb.getInstance().getCountArtifactInstancesByTypeValue(null, inAllDataSourcesHash); + Assert.fail("getCountArtifactInstancesByTypeValue failed to throw exception for null type"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test getting instance count with null value + try { + EamDb.getInstance().getCountArtifactInstancesByTypeValue(fileType, null); + Assert.fail("getCountArtifactInstancesByTypeValue failed to throw exception for null value"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test getting frequency of value that is in all three data sources + try { + CorrelationAttribute attr = new CorrelationAttribute(fileType, inAllDataSourcesHash); + int freq = EamDb.getInstance().getFrequencyPercentage(attr); + assertTrue("getFrequencyPercentage returned " + freq + " - expected 100", freq == 100); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting frequency of value that appears twice in a single data source + try { + CorrelationAttribute attr = new CorrelationAttribute(fileType, inDataSource1twiceHash); + int freq = EamDb.getInstance().getFrequencyPercentage(attr); + assertTrue("getFrequencyPercentage returned " + freq + " - expected 33", freq == 33); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting frequency of non-file type + try { + CorrelationAttribute attr = new CorrelationAttribute(emailType, emailValue); + int freq = EamDb.getInstance().getFrequencyPercentage(attr); + assertTrue("getFrequencyPercentage returned " + freq + " - expected 33", freq == 33); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting frequency of non-existent value + try { + CorrelationAttribute attr = new CorrelationAttribute(fileType, "randomValue"); + int freq = EamDb.getInstance().getFrequencyPercentage(attr); + assertTrue("getFrequencyPercentage returned " + freq + " - expected 0", freq == 0); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting frequency with null type + try { + CorrelationAttribute attr = new CorrelationAttribute(null, "randomValue"); + EamDb.getInstance().getFrequencyPercentage(attr); + Assert.fail("getFrequencyPercentage failed to throw exception for null type"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test getting frequency with null attribute + try { + EamDb.getInstance().getFrequencyPercentage(null); + Assert.fail("getFrequencyPercentage failed to throw exception for null attribute"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test getting count for dataSource1fromCase1 (includes all types) + try { + long count = EamDb.getInstance().getCountArtifactInstancesByCaseDataSource(case1.getCaseUUID(), dataSource1fromCase1.getDeviceID()); + assertTrue("getCountArtifactInstancesByCaseDataSource returned " + count + " - expected 7", count == 7); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting count with null case UUID + try { + long count = EamDb.getInstance().getCountArtifactInstancesByCaseDataSource(null, dataSource1fromCase1.getDeviceID()); + assertTrue("getCountArtifactInstancesByCaseDataSource returned " + count + " - expected 0", count == 0); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting count with null device ID + try { + long count = EamDb.getInstance().getCountArtifactInstancesByCaseDataSource(case1.getCaseUUID(), null); + assertTrue("getCountArtifactInstancesByCaseDataSource returned " + count + " - expected 0", count == 0); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting data source count for entry that is in all three + try { + long count = EamDb.getInstance().getCountUniqueCaseDataSourceTuplesHavingTypeValue(fileType, inAllDataSourcesHash); + assertTrue("getCountUniqueCaseDataSourceTuplesHavingTypeValue returned " + count + " - expected 3", count == 3); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting data source count for entry that is in one data source twice + try { + long count = EamDb.getInstance().getCountUniqueCaseDataSourceTuplesHavingTypeValue(fileType, inDataSource1twiceHash); + assertTrue("getCountUniqueCaseDataSourceTuplesHavingTypeValue returned " + count + " - expected 1", count == 1); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting data source count for entry that is not in any data sources + try { + long count = EamDb.getInstance().getCountUniqueCaseDataSourceTuplesHavingTypeValue(fileType, "abcdef"); + assertTrue("getCountUniqueCaseDataSourceTuplesHavingTypeValue returned " + count + " - expected 0", count == 0); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting data source count for null type + try { + EamDb.getInstance().getCountUniqueCaseDataSourceTuplesHavingTypeValue(null, "abcdef"); + Assert.fail("getCountUniqueCaseDataSourceTuplesHavingTypeValue failed to throw exception for null type"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test getting data source count for null value + try { + long count = EamDb.getInstance().getCountUniqueCaseDataSourceTuplesHavingTypeValue(fileType, null); + assertTrue("getCountUniqueCaseDataSourceTuplesHavingTypeValue returned " + count + " - expected 0", count == 0); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + } + + /** + * Test methods related to correlation types + * newCorrelationType(CorrelationAttribute.Type newType) tests: + * - Test with valid data + * - Test with duplicate data + * - Test with null name + * - Test with null db name + * - Test with null type + * getDefinedCorrelationTypes() tests: + * - Test that the expected number are returned + * getEnabledCorrelationTypes() tests: + * - Test that the expected number are returned + * getSupportedCorrelationTypes() tests: + * - Test that the expected number are returned + * getCorrelationTypeById(int typeId) tests: + * - Test with valid ID + * - Test with invalid ID + * updateCorrelationType(CorrelationAttribute.Type aType) tests: + * - Test with existing type + * - Test with non-existent type + * - Test updating to null name + * - Test with null type + */ + public void testCorrelationTypes() { + + CorrelationAttribute.Type customType; + String customTypeName = "customType"; + String customTypeDb = "custom_type"; + + // Test new type with valid data + try { + customType = new CorrelationAttribute.Type(customTypeName, customTypeDb, false, false); + customType.setId(EamDb.getInstance().newCorrelationType(customType)); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + return; + } + + // Test new type with duplicate data + try { + CorrelationAttribute.Type temp = new CorrelationAttribute.Type(customTypeName, customTypeDb, false, false); + EamDb.getInstance().newCorrelationType(temp); + Assert.fail("newCorrelationType failed to throw exception for duplicate name/db table"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test new type with null name + try { + CorrelationAttribute.Type temp = new CorrelationAttribute.Type(null, "temp_type", false, false); + EamDb.getInstance().newCorrelationType(temp); + Assert.fail("newCorrelationType failed to throw exception for null name table"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test new type with null db name + // The constructor should fail in this case + try { + new CorrelationAttribute.Type("temp", null, false, false); + Assert.fail("CorrelationAttribute.Type failed to throw exception for null db table name"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test new type with null type + try { + EamDb.getInstance().newCorrelationType(null); + Assert.fail("newCorrelationType failed to throw exception for null type"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test getting all correlation types + try { + List types = EamDb.getInstance().getDefinedCorrelationTypes(); + + // We expect 6 total - 5 default and the custom one made earlier + assertTrue("getDefinedCorrelationTypes returned " + types.size() + " entries - expected 6", types.size() == 6); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting enabled correlation types + try { + List types = EamDb.getInstance().getEnabledCorrelationTypes(); + + // We expect 5 - the custom type is disabled + assertTrue("getDefinedCorrelationTypes returned " + types.size() + " enabled entries - expected 5", types.size() == 5); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting supported correlation types + try { + List types = EamDb.getInstance().getSupportedCorrelationTypes(); + + // We expect 5 - the custom type is not supported + assertTrue("getDefinedCorrelationTypes returned " + types.size() + " supported entries - expected 5", types.size() == 5); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting the type with a valid ID + try { + CorrelationAttribute.Type temp = EamDb.getInstance().getCorrelationTypeById(customType.getId()); + assertTrue("getCorrelationTypeById returned type with unexpected name " + temp.getDisplayName(), customTypeName.equals(temp.getDisplayName())); + assertTrue("getCorrelationTypeById returned type with unexpected db table name " + temp.getDbTableName(), customTypeDb.equals(temp.getDbTableName())); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting the type with a invalid ID + try { + EamDb.getInstance().getCorrelationTypeById(5555); + Assert.fail("getCorrelationTypeById failed to throw exception for invalid ID"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test updating a valid type + try { + String newName = "newName"; + String newDbTable = "new_db_table"; + customType.setDisplayName(newName); + customType.setDbTableName(newDbTable); + customType.setEnabled(true); // These were originally false + customType.setSupported(true); + + EamDb.getInstance().updateCorrelationType(customType); + + // Get a fresh copy from the database + CorrelationAttribute.Type temp = EamDb.getInstance().getCorrelationTypeById(customType.getId()); + + assertTrue("updateCorrelationType failed to update name", newName.equals(temp.getDisplayName())); + assertTrue("updateCorrelationType failed to update db table name", newDbTable.equals(temp.getDbTableName())); + assertTrue("updateCorrelationType failed to update enabled status", temp.isEnabled()); + assertTrue("updateCorrelationType failed to update supported status", temp.isSupported()); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test updating a type with an invalid ID + // Nothing should happen + try { + CorrelationAttribute.Type temp = new CorrelationAttribute.Type(customTypeName, customTypeDb, false, false); + temp.setId(12345); + EamDb.getInstance().updateCorrelationType(temp); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test updating a type to a null name + try { + customType.setDisplayName(null); + EamDb.getInstance().updateCorrelationType(customType); + Assert.fail("updateCorrelationType failed to throw exception for null name"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test updating a null type + try { + customType.setDisplayName(null); + EamDb.getInstance().updateCorrelationType(customType); + Assert.fail("updateCorrelationType failed to throw exception for null type"); + } catch (EamDbException ex) { + // This is the expected behavior + } + } + + /** + * Test the methods related to organizations + * newOrganization(EamOrganization eamOrg) tests: + * - Test with just org name + * - Test with org name and poc info + * - Test adding duplicate org + * - Test adding null org + * - Test adding org with null name + * getOrganizations() tests: + * - Test getting the list of orgs + * getOrganizationByID(int orgID) tests: + * - Test with valid ID + * - Test with invalid ID + * updateOrganization(EamOrganization updatedOrganization) tests: + * - Test updating valid org + * - Test updating invalid org + * - Test updating null org + * - Test updating org to null name + * deleteOrganization(EamOrganization organizationToDelete) tests: + * - Test deleting org that isn't in use + * - Test deleting org that is in use + * - Test deleting invalid org + * - Test deleting null org + */ + public void testOrganizations() { + + EamOrganization orgA; + String orgAname = "orgA"; + EamOrganization orgB; + String orgBname = "orgB"; + String orgBpocName = "pocName"; + String orgBpocEmail = "pocEmail"; + String orgBpocPhone = "pocPhone"; + + // Test adding a basic organization + try { + orgA = new EamOrganization(orgAname); + orgA.setOrgID((int) EamDb.getInstance().newOrganization(orgA)); + assertTrue("Organization ID is still -1 after adding to db", orgA.getOrgID() != -1); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + return; + } + + // Test adding an organization with additional fields + try { + orgB = new EamOrganization(orgBname, orgBpocName, orgBpocEmail, orgBpocPhone); + orgB.setOrgID((int) EamDb.getInstance().newOrganization(orgB)); + assertTrue("Organization ID is still -1 after adding to db", orgB.getOrgID() != -1); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + return; + } + + // Test adding a duplicate organization + try { + EamOrganization temp = new EamOrganization(orgAname); + EamDb.getInstance().newOrganization(temp); + Assert.fail("newOrganization failed to throw exception for duplicate org name"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test adding null organization + try { + EamDb.getInstance().newOrganization(null); + Assert.fail("newOrganization failed to throw exception for null org"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test adding organization with null name + try { + EamOrganization temp = new EamOrganization(null); + EamDb.getInstance().newOrganization(temp); + Assert.fail("newOrganization failed to throw exception for null name"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test getting organizations + // We expect five - the default org, two from setUp, and two from this method + try { + List orgs = EamDb.getInstance().getOrganizations(); + assertTrue("getOrganizations returned null list", orgs != null); + assertTrue("getOrganizations returned " + orgs.size() + " orgs - expected 5", orgs.size() == 5); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting org with valid ID + try { + EamOrganization temp = EamDb.getInstance().getOrganizationByID(orgB.getOrgID()); + assertTrue("getOrganizationByID returned null for valid ID", temp != null); + assertTrue("getOrganizationByID returned unexpected name for organization", orgBname.equals(temp.getName())); + assertTrue("getOrganizationByID returned unexpected poc name for organization", orgBpocName.equals(temp.getPocName())); + assertTrue("getOrganizationByID returned unexpected poc email for organization", orgBpocEmail.equals(temp.getPocEmail())); + assertTrue("getOrganizationByID returned unexpected poc phone for organization", orgBpocPhone.equals(temp.getPocPhone())); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting org with invalid ID + try { + EamDb.getInstance().getOrganizationByID(12345); + Assert.fail("getOrganizationByID failed to throw exception for invalid ID"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test updating valid org + try { + String newName = "newOrgName"; + String newPocName = "newPocName"; + String newPocEmail = "newPocEmail"; + String newPocPhone = "newPocPhone"; + orgA.setName(newName); + orgA.setPocName(newPocName); + orgA.setPocEmail(newPocEmail); + orgA.setPocPhone(newPocPhone); + + EamDb.getInstance().updateOrganization(orgA); + + EamOrganization copyOfA = EamDb.getInstance().getOrganizationByID(orgA.getOrgID()); + + assertTrue("getOrganizationByID returned null for valid ID", copyOfA != null); + assertTrue("updateOrganization failed to update org name", newName.equals(copyOfA.getName())); + assertTrue("updateOrganization failed to update poc name", newPocName.equals(copyOfA.getPocName())); + assertTrue("updateOrganization failed to update poc email", newPocEmail.equals(copyOfA.getPocEmail())); + assertTrue("updateOrganization failed to update poc phone", newPocPhone.equals(copyOfA.getPocPhone())); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test updating invalid org + // Shouldn't do anything + try { + EamOrganization temp = new EamOrganization("invalidOrg"); + temp.setOrgID(3434); + EamDb.getInstance().updateOrganization(temp); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test updating null org + try { + EamDb.getInstance().updateOrganization(null); + Assert.fail("updateOrganization failed to throw exception for null org"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test updating org to null name + try { + EamOrganization copyOfA = EamDb.getInstance().getOrganizationByID(orgA.getOrgID()); + copyOfA.setName(null); + EamDb.getInstance().updateOrganization(copyOfA); + Assert.fail("updateOrganization failed to throw exception for null name"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test deleting existing org that isn't in use + try { + EamOrganization orgToDelete = new EamOrganization("deleteThis"); + orgToDelete.setOrgID((int) EamDb.getInstance().newOrganization(orgToDelete)); + int orgCount = EamDb.getInstance().getOrganizations().size(); + + EamDb.getInstance().deleteOrganization(orgToDelete); + assertTrue("getOrganizations returned unexpected count after deletion", orgCount - 1 == EamDb.getInstance().getOrganizations().size()); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test deleting existing org that is in use + try { + // Make a new org + EamOrganization inUseOrg = new EamOrganization("inUseOrg"); + inUseOrg.setOrgID((int) EamDb.getInstance().newOrganization(inUseOrg)); + + // Make a reference set that uses it + EamGlobalSet tempSet = new EamGlobalSet(inUseOrg.getOrgID(), "inUseOrgTest", "1.0", TskData.FileKnown.BAD, false, fileType); + EamDb.getInstance().newReferenceSet(tempSet); + + // It should now throw an exception if we try to delete it + EamDb.getInstance().deleteOrganization(inUseOrg); + Assert.fail("deleteOrganization failed to throw exception for in use organization"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test deleting non-existent org + // Should do nothing + try { + EamOrganization temp = new EamOrganization("temp"); + temp.setOrgID(9876); + EamDb.getInstance().deleteOrganization(temp); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test deleting null org + try { + EamDb.getInstance().deleteOrganization(null); + Assert.fail("deleteOrganization failed to throw exception for null organization"); + } catch (EamDbException ex) { + // This is the expected behavior + } + } + + /** + * Tests for adding / retrieving reference instances + * Only the files type is currently implemented + * addReferenceInstance(EamGlobalFileInstance eamGlobalFileInstance, CorrelationAttribute.Type correlationType) tests: + * - Test adding multiple valid entries + * - Test invalid reference set ID + * - Test null hash (EamGlobalFileInstance constructor) + * - Test null known status (EamGlobalFileInstance constructor) + * - Test null correlation type + * bulkInsertReferenceTypeEntries(Set globalInstances, CorrelationAttribute.Type contentType) tests: + * - Test with large valid list + * - Test with null list + * - Test with invalid reference set ID + * - Test with null correlation type + * getReferenceInstancesByTypeValue(CorrelationAttribute.Type aType, String aValue) tests: + * - Test with valid entries + * - Test with non-existent value + * - Test with invalid type + * - Test with null type + * - Test with null value + * isFileHashInReferenceSet(String hash, int referenceSetID)tests: + * - Test existing hash/ID + * - Test non-existent (but valid) hash/ID + * - Test invalid ID + * - Test null hash + * isValueInReferenceSet(String value, int referenceSetID, int correlationTypeID) tests: + * - Test existing value/ID + * - Test non-existent (but valid) value/ID + * - Test invalid ID + * - Test null value + * - Test invalid type ID + * isArtifactKnownBadByReference(CorrelationAttribute.Type aType, String value) tests: + * - Test notable value + * - Test known value + * - Test non-existent value + * - Test null value + * - Test null type + * - Test invalid type + */ + public void testReferenceSetInstances() { + + // After the two initial testing blocks, the reference sets should contain: + // notableSet1 - notableHash1, inAllSetsHash + // notableSet2 - inAllSetsHash + // knownSet1 - knownHash1, inAllSetsHash + EamGlobalSet notableSet1; + int notableSet1id; + EamGlobalSet notableSet2; + int notableSet2id; + EamGlobalSet knownSet1; + int knownSet1id; + + String notableHash1 = "d46feecd663c41648dbf690d9343cf4b"; + String knownHash1 = "39c844daee70485143da4ff926601b5b"; + String inAllSetsHash = "6449b39bb23c42879fa0c243726e27f7"; + + CorrelationAttribute.Type emailType; + + // Store the email type object for later use + try { + emailType = EamDb.getInstance().getCorrelationTypeById(CorrelationAttribute.EMAIL_TYPE_ID); + assertTrue("getCorrelationTypeById(EMAIL_TYPE_ID) returned null", emailType != null); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + return; + } + + // Set up a few reference sets + try { + notableSet1 = new EamGlobalSet(org1.getOrgID(), "notable set 1", "1.0", TskData.FileKnown.BAD, false, fileType); + notableSet1id = EamDb.getInstance().newReferenceSet(notableSet1); + notableSet2 = new EamGlobalSet(org1.getOrgID(), "notable set 2", "2.4", TskData.FileKnown.BAD, false, fileType); + notableSet2id = EamDb.getInstance().newReferenceSet(notableSet2); + knownSet1 = new EamGlobalSet(org1.getOrgID(), "known set 1", "5.5.4", TskData.FileKnown.KNOWN, false, fileType); + knownSet1id = EamDb.getInstance().newReferenceSet(knownSet1); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + return; + } + + // Test adding file instances with valid data + try { + EamGlobalFileInstance temp = new EamGlobalFileInstance(notableSet1id, inAllSetsHash, TskData.FileKnown.BAD, "comment1"); + EamDb.getInstance().addReferenceInstance(temp, fileType); + + temp = new EamGlobalFileInstance(notableSet2id, inAllSetsHash, TskData.FileKnown.BAD, "comment2"); + EamDb.getInstance().addReferenceInstance(temp, fileType); + + temp = new EamGlobalFileInstance(knownSet1id, inAllSetsHash, TskData.FileKnown.KNOWN, "comment3"); + EamDb.getInstance().addReferenceInstance(temp, fileType); + + temp = new EamGlobalFileInstance(notableSet1id, notableHash1, TskData.FileKnown.BAD, "comment4"); + EamDb.getInstance().addReferenceInstance(temp, fileType); + + temp = new EamGlobalFileInstance(knownSet1id, knownHash1, TskData.FileKnown.KNOWN, "comment5"); + EamDb.getInstance().addReferenceInstance(temp, fileType); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test adding file instance with invalid reference set ID + try { + EamGlobalFileInstance temp = new EamGlobalFileInstance(2345, inAllSetsHash, TskData.FileKnown.BAD, "comment"); + EamDb.getInstance().addReferenceInstance(temp, fileType); + Assert.fail("addReferenceInstance failed to throw exception for invalid ID"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test creating file instance with null hash + // Since it isn't possible to get a null hash into the EamGlobalFileInstance, skip trying to + // call addReferenceInstance and just test the EamGlobalFileInstance constructor + try { + new EamGlobalFileInstance(notableSet1id, null, TskData.FileKnown.BAD, "comment"); + Assert.fail("EamGlobalFileInstance failed to throw exception for null hash"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test adding file instance with null known status + // Since it isn't possible to get a null known status into the EamGlobalFileInstance, skip trying to + // call addReferenceInstance and just test the EamGlobalFileInstance constructor + try { + new EamGlobalFileInstance(notableSet1id, inAllSetsHash, null, "comment"); + Assert.fail("EamGlobalFileInstance failed to throw exception for null type"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test adding file instance with null correlation type + try { + EamGlobalFileInstance temp = new EamGlobalFileInstance(notableSet1id, inAllSetsHash, TskData.FileKnown.BAD, "comment"); + EamDb.getInstance().addReferenceInstance(temp, null); + Assert.fail("addReferenceInstance failed to throw exception for null type"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test bulk insert with large valid set + try { + // Create a list of global file instances. Make enough that the bulk threshold should be hit once. + Set instances = new HashSet<>(); + String bulkTestHash = "bulktesthash_"; + for (int i = 0; i < dbSettingsSqlite.getBulkThreshold() * 1.5; i++) { + String hash = bulkTestHash + String.valueOf(i); + instances.add(new EamGlobalFileInstance(notableSet2id, hash, TskData.FileKnown.BAD, null)); + } + + // Insert the list + EamDb.getInstance().bulkInsertReferenceTypeEntries(instances, fileType); + + // There's no way to get a count of the number of entries in the database, so just do a spot check + if (dbSettingsSqlite.getBulkThreshold() > 10) { + String hash = bulkTestHash + "10"; + assertTrue("Sample bulk insert instance not found", EamDb.getInstance().isFileHashInReferenceSet(hash, notableSet2id)); + } + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test bulk add file instance with null list + try { + EamDb.getInstance().bulkInsertReferenceTypeEntries(null, fileType); + Assert.fail("bulkInsertReferenceTypeEntries failed to throw exception for null list"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test bulk add file instance with invalid reference set ID + try { + Set tempSet = new HashSet<>(Arrays.asList(new EamGlobalFileInstance(2345, inAllSetsHash, TskData.FileKnown.BAD, "comment"))); + EamDb.getInstance().bulkInsertReferenceTypeEntries(tempSet, fileType); + Assert.fail("bulkInsertReferenceTypeEntries failed to throw exception for invalid ID"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test bulk add file instance with null correlation type + try { + Set tempSet = new HashSet<>(Arrays.asList(new EamGlobalFileInstance(notableSet1id, inAllSetsHash, TskData.FileKnown.BAD, "comment"))); + EamDb.getInstance().bulkInsertReferenceTypeEntries(tempSet, null); + Assert.fail("bulkInsertReferenceTypeEntries failed to throw exception for null type"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test getting reference instances with valid data + try { + List temp = EamDb.getInstance().getReferenceInstancesByTypeValue(fileType, inAllSetsHash); + assertTrue("getReferenceInstancesByTypeValue returned " + temp.size() + " instances - expected 3", temp.size() == 3); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting reference instances with non-existent data + try { + List temp = EamDb.getInstance().getReferenceInstancesByTypeValue(fileType, "testHash"); + assertTrue("getReferenceInstancesByTypeValue returned " + temp.size() + " instances for non-existent value - expected 0", temp.isEmpty()); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting reference instances an invalid type (the email table is not yet implemented) + try { + EamDb.getInstance().getReferenceInstancesByTypeValue(emailType, inAllSetsHash); + Assert.fail("getReferenceInstancesByTypeValue failed to throw exception for invalid table"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test getting reference instances with null type + try { + EamDb.getInstance().getReferenceInstancesByTypeValue(null, inAllSetsHash); + Assert.fail("getReferenceInstancesByTypeValue failed to throw exception for null type"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test getting reference instances with null value + try { + List temp = EamDb.getInstance().getReferenceInstancesByTypeValue(fileType, null); + assertTrue("getReferenceInstancesByTypeValue returned non-empty list given null value", temp.isEmpty()); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test checking existing hash/ID + try { + assertTrue("isFileHashInReferenceSet returned false for valid data", EamDb.getInstance().isFileHashInReferenceSet(knownHash1, knownSet1id)); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test checking non-existent (but valid) hash/ID + try { + assertFalse("isFileHashInReferenceSet returned true for non-existent data", EamDb.getInstance().isFileHashInReferenceSet(knownHash1, notableSet1id)); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test checking invalid reference set ID + try { + assertFalse("isFileHashInReferenceSet returned true for invalid data", EamDb.getInstance().isFileHashInReferenceSet(knownHash1, 5678)); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test checking null hash + try { + assertFalse("isFileHashInReferenceSet returned true for null hash", EamDb.getInstance().isFileHashInReferenceSet(null, knownSet1id)); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test checking existing hash/ID + try { + assertTrue("isValueInReferenceSet returned false for valid data", + EamDb.getInstance().isValueInReferenceSet(knownHash1, knownSet1id, fileType.getId())); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test checking non-existent (but valid) hash/ID + try { + assertFalse("isValueInReferenceSet returned true for non-existent data", + EamDb.getInstance().isValueInReferenceSet(knownHash1, notableSet1id, fileType.getId())); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test checking invalid reference set ID + try { + assertFalse("isValueInReferenceSet returned true for invalid data", + EamDb.getInstance().isValueInReferenceSet(knownHash1, 5678, fileType.getId())); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test checking null hash + try { + assertFalse("isValueInReferenceSet returned true for null value", + EamDb.getInstance().isValueInReferenceSet(null, knownSet1id, fileType.getId())); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test checking invalid type + try { + EamDb.getInstance().isValueInReferenceSet(knownHash1, knownSet1id, emailType.getId()); + Assert.fail("isValueInReferenceSet failed to throw exception for invalid type"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test known bad with notable data + try { + assertTrue("isArtifactKnownBadByReference returned false for notable value", + EamDb.getInstance().isArtifactKnownBadByReference(fileType, notableHash1)); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test known bad with known data + try { + assertFalse("isArtifactKnownBadByReference returned true for known value", + EamDb.getInstance().isArtifactKnownBadByReference(fileType, knownHash1)); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test known bad with non-existent data + try { + assertFalse("isArtifactKnownBadByReference returned true for non-existent value", + EamDb.getInstance().isArtifactKnownBadByReference(fileType, "abcdef")); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test known bad with null hash + try { + assertFalse("isArtifactKnownBadByReference returned true for null value", + EamDb.getInstance().isArtifactKnownBadByReference(fileType, null)); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test known bad with null type + try { + EamDb.getInstance().isArtifactKnownBadByReference(null, knownHash1); + Assert.fail("isArtifactKnownBadByReference failed to throw exception from null type"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test known bad with invalid type + try { + assertFalse("isArtifactKnownBadByReference returned true for invalid type", EamDb.getInstance().isArtifactKnownBadByReference(emailType, null)); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + } + + /** + * Test method for the methods related to reference sets (does not include + * instance testing) Only the files type is currently implemented + * newReferenceSet(EamGlobalSet eamGlobalSet) tests: + * - Test creating notable reference set + * - Test creating known reference set + * - Test creating duplicate reference set + * - Test creating almost duplicate reference set + * - Test with invalid org ID + * - Test with null name + * - Test with null version + * - Test with null known status + * - Test with null file type + * referenceSetIsValid(int referenceSetID, String referenceSetName, String version) tests: + * - Test on existing reference set + * - Test on invalid reference set + * - Test with null name + * - Test with null version + * referenceSetExists(String referenceSetName, String version) tests: + * - Test on existing reference set + * - Test on invalid reference set + * - Test with null name + * - Test with null version + * getReferenceSetByID(int globalSetID) tests: + * - Test with valid ID + * - Test with invalid ID + * getAllReferenceSets(CorrelationAttribute.Type correlationType) tests: + * - Test getting all file sets + * - Test getting all email sets + * - Test with null type parameter + * deleteReferenceSet(int referenceSetID) tests: + * - Test on valid reference set ID + * - Test on invalid reference set ID + * getReferenceSetOrganization(int referenceSetID) tests: + * - Test on valid reference set ID + * - Test on invalid reference set ID + */ + public void testReferenceSets() { + String set1name = "referenceSet1"; + String set1version = "1.0"; + EamGlobalSet set1; + int set1id; + String set2name = "referenceSet2"; + EamGlobalSet set2; + EamGlobalSet set3; + + // Test creating a notable reference set + try { + set1 = new EamGlobalSet(org1.getOrgID(), set1name, set1version, TskData.FileKnown.BAD, false, fileType); + set1id = EamDb.getInstance().newReferenceSet(set1); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + return; + } + + // Test creating a known reference set + try { + set2 = new EamGlobalSet(org2.getOrgID(), set2name, "", TskData.FileKnown.KNOWN, false, fileType); + EamDb.getInstance().newReferenceSet(set2); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + return; + } + + // Test creating a reference set with the same name and version + try { + EamGlobalSet temp = new EamGlobalSet(org1.getOrgID(), set1name, "1.0", TskData.FileKnown.BAD, false, fileType); + EamDb.getInstance().newReferenceSet(temp); + Assert.fail("newReferenceSet failed to throw exception from duplicate name/version pair"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test creating a reference set with the same name but different version + try { + set3 = new EamGlobalSet(org1.getOrgID(), set1name, "2.0", TskData.FileKnown.BAD, false, fileType); + EamDb.getInstance().newReferenceSet(set3); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + return; + } + + // Test creating a reference set with invalid org ID + try { + EamGlobalSet temp = new EamGlobalSet(5000, "tempName", "", TskData.FileKnown.BAD, false, fileType); + EamDb.getInstance().newReferenceSet(temp); + Assert.fail("newReferenceSet failed to throw exception from invalid org ID"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test creating a reference set with null name + try { + EamGlobalSet temp = new EamGlobalSet(org2.getOrgID(), null, "", TskData.FileKnown.BAD, false, fileType); + EamDb.getInstance().newReferenceSet(temp); + Assert.fail("newReferenceSet failed to throw exception from null name"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test creating a reference set with null version + try { + EamGlobalSet temp = new EamGlobalSet(org2.getOrgID(), "tempName", null, TskData.FileKnown.BAD, false, fileType); + EamDb.getInstance().newReferenceSet(temp); + Assert.fail("newReferenceSet failed to throw exception from null version"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test creating a reference set with null file known status + try { + EamGlobalSet temp = new EamGlobalSet(org2.getOrgID(), "tempName", "", null, false, fileType); + EamDb.getInstance().newReferenceSet(temp); + Assert.fail("newReferenceSet failed to throw exception from null file known status"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test creating a reference set with null file type + try { + EamGlobalSet temp = new EamGlobalSet(org2.getOrgID(), "tempName", "", TskData.FileKnown.BAD, false, null); + EamDb.getInstance().newReferenceSet(temp); + Assert.fail("newReferenceSet failed to throw exception from null file type"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test validation with a valid reference set + try { + assertTrue("referenceSetIsValid returned false for valid reference set", EamDb.getInstance().referenceSetIsValid(set1id, set1name, set1version)); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test validation with an invalid reference set + try { + assertFalse("referenceSetIsValid returned true for invalid reference set", EamDb.getInstance().referenceSetIsValid(5000, set1name, set1version)); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test validation with a null name + try { + assertFalse("referenceSetIsValid returned true with null name", EamDb.getInstance().referenceSetIsValid(set1id, null, set1version)); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test validation with a null version + try { + assertFalse("referenceSetIsValid returned true with null version", EamDb.getInstance().referenceSetIsValid(set1id, set1name, null)); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test existence with a valid reference set + try { + assertTrue("referenceSetExists returned false for valid reference set", EamDb.getInstance().referenceSetExists(set1name, set1version)); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test existence with an invalid reference set + try { + assertFalse("referenceSetExists returned true for invalid reference set", EamDb.getInstance().referenceSetExists(set1name, "5.5")); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test existence with null name + try { + assertFalse("referenceSetExists returned true for null name", EamDb.getInstance().referenceSetExists(null, "1.0")); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test existence with null version + try { + assertFalse("referenceSetExists returned true for null version", EamDb.getInstance().referenceSetExists(set1name, null)); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting global set with valid ID + try { + EamGlobalSet temp = EamDb.getInstance().getReferenceSetByID(set1id); + assertTrue("getReferenceSetByID returned null for valid ID", temp != null); + assertTrue("getReferenceSetByID returned set with incorrect name and/or version", + set1name.equals(temp.getSetName()) && set1version.equals(temp.getVersion())); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting global set with invalid ID + try { + EamGlobalSet temp = EamDb.getInstance().getReferenceSetByID(1234); + assertTrue("getReferenceSetByID returned non-null result for invalid ID", temp == null); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting all file reference sets + try { + List referenceSets = EamDb.getInstance().getAllReferenceSets(fileType); + assertTrue("getAllReferenceSets(FILES) returned unexpected number", referenceSets.size() == 3); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting all email reference sets + try { + List referenceSets = EamDb.getInstance().getAllReferenceSets(EamDb.getInstance().getCorrelationTypeById(CorrelationAttribute.EMAIL_TYPE_ID)); + assertTrue("getAllReferenceSets(EMAIL) returned unexpected number", referenceSets.isEmpty()); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test null argument to getAllReferenceSets + try { + EamDb.getInstance().getAllReferenceSets(null); + Assert.fail("getAllReferenceSets failed to throw exception from null type argument"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test deleting an existing reference set + // First: create a new reference set, check that it's in the database, and get the number of reference sets + // Second: delete the reference set, check that it is no longer in the database, and the total number of sets decreased by one + try { + EamGlobalSet setToDelete = new EamGlobalSet(org1.getOrgID(), "deleteThis", "deleteThisVersion", TskData.FileKnown.BAD, false, fileType); + int setToDeleteID = EamDb.getInstance().newReferenceSet(setToDelete); + assertTrue("setToDelete wasn't found in database", EamDb.getInstance().referenceSetIsValid(setToDeleteID, setToDelete.getSetName(), setToDelete.getVersion())); + int currentCount = EamDb.getInstance().getAllReferenceSets(fileType).size(); + + EamDb.getInstance().deleteReferenceSet(setToDeleteID); + assertFalse("Deleted reference set was found in database", EamDb.getInstance().referenceSetIsValid(setToDeleteID, setToDelete.getSetName(), setToDelete.getVersion())); + assertTrue("Unexpected number of reference sets in database after deletion", currentCount - 1 == EamDb.getInstance().getAllReferenceSets(fileType).size()); + + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test deleting a non-existent reference set + // The expectation is that nothing will happen + try { + int currentCount = EamDb.getInstance().getAllReferenceSets(fileType).size(); + EamDb.getInstance().deleteReferenceSet(1234); + assertTrue("Number of reference sets changed after deleting non-existent set", currentCount == EamDb.getInstance().getAllReferenceSets(fileType).size()); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting reference set organization for valid ID with org set + try { + EamOrganization org = EamDb.getInstance().getReferenceSetOrganization(set1id); + assertTrue("getReferenceSetOrganization returned null for valid set", org != null); + assertTrue("getReferenceSetOrganization returned the incorrect organization", org.getOrgID() == org1.getOrgID()); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting reference set organization for non-existent reference set + try { + EamDb.getInstance().getReferenceSetOrganization(4567); + Assert.fail("getReferenceSetOrganization failed to throw exception for invalid reference set ID"); + } catch (EamDbException ex) { + // This is the expected behavior + } + } + + /** + * Test method for the methods related to the data sources table + * newDataSource(CorrelationDataSource eamDataSource) tests: + * - Test with valid data + * - Test with duplicate data + * - Test with duplicate device ID and name but different case + * - Test with invalid case ID + * - Test with null device ID + * - Test with null name + * getDataSource(CorrelationCase correlationCase, String dataSourceDeviceId) tests: + * - Test with valid data + * - Test with non-existent data + * - Test with null correlationCase + * - Test with null device ID + * getDataSources()tests: + * - Test that the count and device IDs are as expected + * getCountUniqueDataSources() tests: + * - Test that the result is as expected + */ + public void testDataSources() { + final String dataSourceAname = "dataSourceA"; + final String dataSourceAid = "dataSourceA_deviceID"; + CorrelationDataSource dataSourceA; + CorrelationDataSource dataSourceB; + + // Test creating a data source with valid case, name, and ID + try { + dataSourceA = new CorrelationDataSource(case2.getID(), dataSourceAid, dataSourceAname); + EamDb.getInstance().newDataSource(dataSourceA); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + return; + } + + // Test creating a data source with the same case, name, and ID + try { + CorrelationDataSource temp = new CorrelationDataSource(case2.getID(), dataSourceAid, dataSourceAname); + EamDb.getInstance().newDataSource(temp); + Assert.fail("newDataSource did not throw exception from duplicate data source"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test creating a data source with the same name and ID but different case + try { + dataSourceB = new CorrelationDataSource(case1.getID(), dataSourceAid, dataSourceAname); + EamDb.getInstance().newDataSource(dataSourceB); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + return; + } + + // Test creating a data source with an invalid case ID + try { + CorrelationDataSource temp = new CorrelationDataSource(5000, "tempID", "tempName"); + EamDb.getInstance().newDataSource(temp); + Assert.fail("newDataSource did not throw exception from invalid case ID"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test creating a data source with null device ID + try { + CorrelationDataSource temp = new CorrelationDataSource(case2.getID(), null, "tempName"); + EamDb.getInstance().newDataSource(temp); + Assert.fail("newDataSource did not throw exception from null device ID"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test creating a data source with null name + try { + CorrelationDataSource temp = new CorrelationDataSource(case2.getID(), "tempID", null); + EamDb.getInstance().newDataSource(temp); + Assert.fail("newDataSource did not throw exception from null name"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test getting a data source with valid case and ID + try { + CorrelationDataSource temp = EamDb.getInstance().getDataSource(case2, dataSourceAid); + assertTrue("Failed to get data source", temp != null); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting a data source with non-existent ID + try { + CorrelationDataSource temp = EamDb.getInstance().getDataSource(case2, "badID"); + assertTrue("getDataSource returned non-null value for non-existent data source", temp == null); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting a data source with a null case + try { + EamDb.getInstance().getDataSource(null, dataSourceAid); + Assert.fail("getDataSource did not throw exception from null case"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test getting a data source with null ID + try { + CorrelationDataSource temp = EamDb.getInstance().getDataSource(case2, null); + assertTrue("getDataSource returned non-null value for null data source", temp == null); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting the list of data sources + // There should be five data sources, and we'll check for the expected device IDs + try { + List dataSources = EamDb.getInstance().getDataSources(); + List devIdList + = dataSources.stream().map(c -> c.getDeviceID()).collect(Collectors.toList()); + assertTrue("getDataSources returned unexpected number of data sources", dataSources.size() == 5); + assertTrue("getDataSources is missing expected data sources", + devIdList.contains(dataSourceAid) + && devIdList.contains(dataSource1fromCase1.getDeviceID()) + && devIdList.contains(dataSource2fromCase1.getDeviceID()) + && devIdList.contains(dataSource1fromCase2.getDeviceID())); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test the data source count + try { + assertTrue("getCountUniqueDataSources returned unexpected number of data sources", + EamDb.getInstance().getCountUniqueDataSources() == 5); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + } + + /** + * Test method for the methods related to the cases table + * newCase(CorrelationCase eamCase) tests: + * - Test valid data + * - Test null UUID + * - Test null case name + * - Test repeated UUID + * newCase(Case autopsyCase) tests: + * - Test valid data + * - Test null autopsyCase + * updateCase(CorrelationCase eamCase) tests: + * - Test with valid data, checking all fields + * - Test null eamCase + * getCase(Case autopsyCase) tests: + * - Test with current Autopsy case + * getCaseByUUID(String caseUUID) + * - Test with UUID that is in the database + * - Test with UUID that is not in the database + * - Test with null UUID + * getCases() tests: + * - Test getting all cases, checking the count and fields + * bulkInsertCases(List cases) + * - Test on a list of cases larger than the bulk insert threshold. + * - Test on a null list + */ + public void testCases() { + final String caseAname = "caseA"; + final String caseAuuid = "caseA_uuid"; + CorrelationCase caseA; + CorrelationCase caseB; + + try { + // Set up an Autopsy case for testing + try { + Case.createAsCurrentCase(Case.CaseType.SINGLE_USER_CASE, testDirectory.toString(), new CaseDetails("CentralRepoDatamodelTestCase")); + } catch (CaseActionException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + assertTrue("Failed to create test case", testDirectory.toFile().exists()); + + // Test creating a case with valid name and uuid + try { + caseA = new CorrelationCase(caseAuuid, caseAname); + caseA = EamDb.getInstance().newCase(caseA); + assertTrue("Failed to create case", caseA != null); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + return; + } + + // Test null uuid + try { + CorrelationCase tempCase = new CorrelationCase(null, "nullUuidCase"); + EamDb.getInstance().newCase(tempCase); + Assert.fail("newCase did not throw expected exception from null uuid"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test null name + try { + CorrelationCase tempCase = new CorrelationCase("nullCaseUuid", null); + EamDb.getInstance().newCase(tempCase); + Assert.fail("newCase did not throw expected exception from null name"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test creating a case with an already used UUID + // This should just return the existing case object. Check that the total + // number of cases does not increase. + try { + int nCases = EamDb.getInstance().getCases().size(); + CorrelationCase tempCase = new CorrelationCase(caseAuuid, "newCaseWithSameUUID"); + tempCase = EamDb.getInstance().newCase(tempCase); + assertTrue("newCase returned null for existing UUID", tempCase != null); + assertTrue("newCase created a new case for an already existing UUID", nCases == EamDb.getInstance().getCases().size()); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test creating a case from an Autopsy case + // The case may already be in the database - the result is the same either way + try { + caseB = EamDb.getInstance().newCase(Case.getCurrentCase()); + assertTrue("Failed to create correlation case from Autopsy case", caseB != null); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + return; + } + + // Test null Autopsy case + try { + Case nullCase = null; + EamDb.getInstance().newCase(nullCase); + Assert.fail("newCase did not throw expected exception from null case"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test update case + // Will update the fields of an existing case object, save it, and then + // pull a new copy out of the database + try { + assertTrue(caseA != null); + String caseNumber = "12-34-56"; + String creationDate = "01/12/2018"; + String displayName = "Test Case"; + String examinerEmail = "john@sample.com"; + String examinerName = "John Doe"; + String examinerPhone = "123-555-4567"; + String notes = "Notes"; + + caseA.setCaseNumber(caseNumber); + caseA.setCreationDate(creationDate); + caseA.setDisplayName(displayName); + caseA.setExaminerEmail(examinerEmail); + caseA.setExaminerName(examinerName); + caseA.setExaminerPhone(examinerPhone); + caseA.setNotes(notes); + caseA.setOrg(org1); + + EamDb.getInstance().updateCase(caseA); + + // Retrievex a new copy of the case from the database to check that the + // fields were properly updated + CorrelationCase updatedCase = EamDb.getInstance().getCaseByUUID(caseA.getCaseUUID()); + + assertTrue("updateCase failed to update case number", caseNumber.equals(updatedCase.getCaseNumber())); + assertTrue("updateCase failed to update creation date", creationDate.equals(updatedCase.getCreationDate())); + assertTrue("updateCase failed to update display name", displayName.equals(updatedCase.getDisplayName())); + assertTrue("updateCase failed to update examiner email", examinerEmail.equals(updatedCase.getExaminerEmail())); + assertTrue("updateCase failed to update examiner name", examinerName.equals(updatedCase.getExaminerName())); + assertTrue("updateCase failed to update examiner phone number", examinerPhone.equals(updatedCase.getExaminerPhone())); + assertTrue("updateCase failed to update notes", notes.equals(updatedCase.getNotes())); + assertTrue("updateCase failed to update org (org is null)", updatedCase.getOrg() != null); + assertTrue("updateCase failed to update org (org ID is wrong)", org1.getOrgID() == updatedCase.getOrg().getOrgID()); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test update case with null case + try { + EamDb.getInstance().updateCase(null); + Assert.fail("updateCase did not throw expected exception from null case"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test getting a case from an Autopsy case + try { + CorrelationCase tempCase = EamDb.getInstance().getCase(Case.getCurrentCase()); + assertTrue("getCase returned null for current Autopsy case", tempCase != null); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting a case by UUID + try { + CorrelationCase tempCase = EamDb.getInstance().getCaseByUUID(caseAuuid); + assertTrue("Failed to get case by UUID", tempCase != null); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting a case with a non-existent UUID + try { + CorrelationCase tempCase = EamDb.getInstance().getCaseByUUID("badUUID"); + assertTrue("getCaseByUUID returned non-null case for non-existent UUID", tempCase == null); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting a case with null UUID + try { + CorrelationCase tempCase = EamDb.getInstance().getCaseByUUID(null); + assertTrue("getCaseByUUID returned non-null case for null UUID", tempCase == null); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test getting the list of cases + // The test is to make sure the three cases we know are in the database are in the list + try { + List caseList = EamDb.getInstance().getCases(); + List uuidList + = caseList.stream().map(c -> c.getCaseUUID()).collect(Collectors.toList()); + assertTrue("getCases is missing data for existing cases", uuidList.contains(case1.getCaseUUID()) + && uuidList.contains(case2.getCaseUUID()) && (uuidList.contains(caseA.getCaseUUID())) + && uuidList.contains(caseB.getCaseUUID())); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test bulk case insert + try { + // Create a list of correlation cases. Make enough that the bulk threshold should be hit once. + List cases = new ArrayList<>(); + String bulkTestUuid = "bulkTestUUID_"; + String bulkTestName = "bulkTestName_"; + for (int i = 0; i < dbSettingsSqlite.getBulkThreshold() * 1.5; i++) { + String name = bulkTestUuid + String.valueOf(i); + String uuid = bulkTestName + String.valueOf(i); + cases.add(new CorrelationCase(uuid, name)); + } + + // Get the current case count + int nCases = EamDb.getInstance().getCases().size(); + + // Insert the big list of cases + EamDb.getInstance().bulkInsertCases(cases); + + // Check that the case count is what is expected + assertTrue("bulkInsertCases did not insert the expected number of cases", nCases + cases.size() == EamDb.getInstance().getCases().size()); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test bulk case insert with null list + try { + EamDb.getInstance().bulkInsertCases(null); + Assert.fail("bulkInsertCases did not throw expected exception from null list"); + } catch (EamDbException ex) { + // This is the expected behavior + } + } finally { + try { + Case.closeCurrentCase(); + // This seems to help in allowing the Autopsy case to be deleted + try { + Thread.sleep(2000); + } catch (Exception ex) { + + } + } catch (CaseActionException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + } + } + + /** + * Test method for the three methods related to the db_info table + * newDbInfo(String name, String value) tests: + * - Test valid data + * - Test null name + * - Test null value + * getDbInfo(String name) + * - Test getting value for existing name + * - Test getting value for non-existing name + * - Test getting value for null name + * updateDbInfo(String name, String value) + * - Test updating existing name to valid new value + * - Test updating existing name to null value + * - Test updating null name + * - Test updating non-existing name to new value + */ + public void testDbInfo() { + final String name1 = "testName1"; + final String name2 = "testName2"; + final String name3 = "testName3"; + final String value1 = "testValue1"; + final String value2 = "testValue2"; + + // Test setting a valid value in DbInfo + try { + EamDb.getInstance().newDbInfo(name1, value1); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Test null name + try { + EamDb.getInstance().newDbInfo(null, value1); + Assert.fail("newDbInfo did not throw expected exception from null name"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Test null value + try { + EamDb.getInstance().newDbInfo(name2, null); + Assert.fail("newDbInfo did not throw expected exception from null value"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Try getting the dbInfo entry that should exist + try { + String tempVal = EamDb.getInstance().getDbInfo(name1); + assertTrue("dbInfo value for name1 does not match", value1.equals(tempVal)); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Try getting the dbInfo entry that should not exist + try { + String tempVal = EamDb.getInstance().getDbInfo(name3); + assertTrue("dbInfo value is unexpectedly non-null given non-existent name", tempVal == null); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Try getting dbInfo for a null value + try { + String tempVal = EamDb.getInstance().getDbInfo(null); + assertTrue("dbInfo value is unexpectedly non-null given null name", tempVal == null); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Try updating an existing value to a valid new value + try { + EamDb.getInstance().updateDbInfo(name1, value2); + assertTrue("dbInfo value failed to update to expected value", value2.equals(EamDb.getInstance().getDbInfo(name1))); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Try updating an existing value to null + try { + EamDb.getInstance().updateDbInfo(name1, null); + Assert.fail("updateDbInfo did not throw expected exception from null value"); + } catch (EamDbException ex) { + // This is the expected behavior + } + + // Try updating a null name + // This seems like SQLite would throw an exception, but it does not + try { + EamDb.getInstance().updateDbInfo(null, value1); + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex); + } + + // Try updating the value for a non-existant name + try { + EamDb.getInstance().updateDbInfo(name1, null); + Assert.fail("updateDbInfo did not throw expected exception from non-existent name"); + } catch (EamDbException ex) { + // This is the expected behavior + } + } + +} diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/configuration/SharedConfiguration.java b/Experimental/src/org/sleuthkit/autopsy/experimental/configuration/SharedConfiguration.java index 1720b4b647..860019238b 100644 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/configuration/SharedConfiguration.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/configuration/SharedConfiguration.java @@ -876,7 +876,7 @@ public class SharedConfiguration { if (!sharedDbPath.exists()) { if (!sharedDbPath.mkdirs()) { - throw new SharedConfigurationException("Error creating shared hash database directory " + sharedDbPath.getAbsolutePath()); + throw new SharedConfigurationException("Error creating shared hash set directory " + sharedDbPath.getAbsolutePath()); } } @@ -1010,7 +1010,7 @@ public class SharedConfiguration { if (!localDb.getParentFile().exists()) { if (!localDb.getParentFile().mkdirs()) { - throw new SharedConfigurationException("Error creating hash database directory " + localDb.getParentFile().getAbsolutePath()); + throw new SharedConfigurationException("Error creating hash set directory " + localDb.getParentFile().getAbsolutePath()); } } @@ -1025,7 +1025,7 @@ public class SharedConfiguration { break; } } catch (TskCoreException ex) { - throw new SharedConfigurationException(String.format("Error getting hash database path info for %s", localDb.getParentFile().getAbsolutePath()), ex); + throw new SharedConfigurationException(String.format("Error getting hash set path info for %s", localDb.getParentFile().getAbsolutePath()), ex); } } @@ -1033,7 +1033,7 @@ public class SharedConfiguration { try { HashDbManager.getInstance().removeHashDatabase(matchingDb); } catch (HashDbManager.HashDbManagerException ex) { - throw new SharedConfigurationException(String.format("Error updating hash database info for %s", localDb.getAbsolutePath()), ex); + throw new SharedConfigurationException(String.format("Error updating hash set info for %s", localDb.getAbsolutePath()), ex); } } @@ -1130,7 +1130,7 @@ public class SharedConfiguration { } } } catch (TskCoreException ex) { - throw new SharedConfigurationException("Unable to read hash databases", ex); + throw new SharedConfigurationException("Unable to read hash sets", ex); } return results; } diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/actions/DeleteTagAction.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/actions/DeleteTagAction.java index a21d163fdc..40451e8443 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/actions/DeleteTagAction.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/actions/DeleteTagAction.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2017 Basis Technology Corp. + * Copyright 2017-2018 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -51,7 +51,7 @@ import org.sleuthkit.datamodel.TskData; */ public class DeleteTagAction extends Action { - private static final Logger LOGGER = Logger.getLogger(DeleteTagAction.class.getName()); + private static final Logger logger = Logger.getLogger(DeleteTagAction.class.getName()); private final ImageGalleryController controller; private final long fileId; @@ -83,17 +83,11 @@ public class DeleteTagAction extends Action { protected Void doInBackground() throws Exception { DrawableTagsManager tagsManager = controller.getTagsManager(); - // Pull the from the global context to avoid unnecessary calls - // to the database. - final Collection selectedFilesList - = new HashSet<>(Utilities.actionsGlobalContext().lookupAll(AbstractFile.class)); - AbstractFile file = selectedFilesList.iterator().next(); - try { - LOGGER.log(Level.INFO, "Removing tag {0} from {1}", new Object[]{tagName.getDisplayName(), file.getName()}); //NON-NLS + logger.log(Level.INFO, "Removing tag {0} from {1}", new Object[]{tagName.getDisplayName(), contentTag.getContent().getName()}); //NON-NLS tagsManager.deleteContentTag(contentTag); } catch (TskCoreException tskCoreException) { - LOGGER.log(Level.SEVERE, "Error untagging file", tskCoreException); //NON-NLS + logger.log(Level.SEVERE, "Error untagging file", tskCoreException); //NON-NLS Platform.runLater(() -> new Alert(Alert.AlertType.ERROR, Bundle.DeleteDrawableTagAction_deleteTag_alert(fileId)).show() ); @@ -107,7 +101,7 @@ public class DeleteTagAction extends Action { try { get(); } catch (InterruptedException | ExecutionException ex) { - LOGGER.log(Level.SEVERE, "Unexpected exception while untagging file", ex); //NON-NLS + logger.log(Level.SEVERE, "Unexpected exception while untagging file", ex); //NON-NLS } } }.execute(); diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AccountsText.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AccountsText.java index e5f17cef12..00d5670151 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AccountsText.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AccountsText.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011-2017 Basis Technology Corp. + * Copyright 2011-2018 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -284,15 +284,6 @@ class AccountsText implements IndexedText { } @Override - @NbBundle.Messages({"AccountsText.getMarkup.noMatchMsg=" - + "
There were no keyword hits on this page. 
" - + "The keyword could have been in the file name." - + "
Advance to another page if present, or to view the original text, choose File Text" - + "
in the drop down menu to the right...
", - "AccountsText.getMarkup.queryFailedMsg=" - + "
Failed to retrieve keyword hit results."
-        + " 
Confirm that Autopsy can connect to the Solr server. " - + "
"}) public String getText() { try { loadPageInfo(); //inits once @@ -321,7 +312,7 @@ class AccountsText implements IndexedText { return "
" + highlightedText + "
"; //NON-NLS } catch (Exception ex) { logger.log(Level.SEVERE, "Error getting highlighted text for Solr doc id " + this.solrObjectId + ", chunkID " + this.currentPage, ex); //NON-NLS - return Bundle.AccountsText_getMarkup_queryFailedMsg(); + return Bundle.IndexedText_errorMessage_errorGettingText(); } } diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties index 8be786efda..38cad58ce5 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties @@ -54,11 +54,6 @@ AbstractKeywordSearchPerformer.search.noFilesInIdxMsg=No files are in inde AbstractKeywordSearchPerformer.search.noFilesIdxdMsg=No files were indexed.
Re-ingest the image with the Keyword Search Module enabled. ExtractedContentViewer.toolTip=Displays extracted text from files and keyword-search results. Requires Keyword Search ingest to be run on a file to activate this viewer. ExtractedContentViewer.getTitle=Indexed Text -ExtractedContentViewer.getSolrContent.knownFileMsg=

{0} is a known file (based on MD5 hash) and does not have text in the index.

-ExtractedContentViewer.getSolrContent.noTxtYetMsg=

{0} does not have text in the index.
It may have no text, not been analyzed yet, or keyword search was not enabled during ingest.

-ExtractedContentViewer.getSolrContent.txtBodyItal={0} -HighlightedMatchesSource.getMarkup.noMatchMsg=Failed to retrieve indexed text for keyword hit. Advance to another page if present, or to view the original text, choose File Text in the drop down menu to the right. Alternatively, you may choose to extract file content and search for the hit using an external application (e.g. a text editor). -HighlightedMatchesSource.getMarkup.queryFailedMsg=
Failed to retrieve keyword hit results. 
Confirm that Autopsy can connect to the Solr server.
HighlightedMatchesSource.toString=Search Results Installer.reportPortError=Indexing server port {0} is not available. Check if your security software does not block {1} and consider changing {2} in {3} property file in the application user folder. Then try rebooting your system if another process was causing the conflict. Installer.reportStopPortError=Indexing server stop port {0} is not available. Consider changing {1} in {2} property file in the application user folder. @@ -229,7 +224,7 @@ KeywordSearchGlobalSearchSettingsPanel.timeRadioButton1.text=20 minutes (slowest KeywordSearchGlobalSearchSettingsPanel.timeRadioButton2.toolTipText=10 minutes (faster overall ingest time than default) KeywordSearchGlobalSearchSettingsPanel.timeRadioButton2.text=10 minutes (slower feedback, faster ingest) KeywordSearchGlobalSearchSettingsPanel.frequencyLabel.text=Results update frequency during ingest: -KeywordSearchGlobalSearchSettingsPanel.skipNSRLCheckBox.toolTipText=Requires Hash DB service to had run previously, or be selected for next ingest. +KeywordSearchGlobalSearchSettingsPanel.skipNSRLCheckBox.toolTipText=Requires Hash Set service to had run previously, or be selected for next ingest. KeywordSearchGlobalSearchSettingsPanel.skipNSRLCheckBox.text=Do not add files in NSRL (known files) to keyword index during ingest KeywordSearchGlobalSearchSettingsPanel.informationLabel.text=Information KeywordSearchGlobalSearchSettingsPanel.settingsLabel.text=Settings @@ -272,7 +267,6 @@ KeywordSearchGlobalSearchSettingsPanel.timeRadioButton5.text=No periodic searche SolrConnectionCheck.HostnameOrPort=Invalid hostname and/or port number. SolrConnectionCheck.Hostname=Invalid hostname. SolrConnectionCheck.MissingHostname=Missing hostname. -RawText.getText.error.msg=Error getting text GlobalListsManagementPanel.newListButton.text=New List GlobalListsManagementPanel.importButton.text=Import List GlobalListsManagementPanel.keywordListsLabel.text=Keyword Lists: diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle_ja.properties b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle_ja.properties index bdcc19f6f6..2442e5bbc4 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle_ja.properties +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle_ja.properties @@ -46,7 +46,6 @@ AbstractKeywordSearchPerformer.search.noFilesIdxdMsg=\u30a4\u30f3\u30c7\u3 ExtractedContentPanel.setMarkup.panelTxt=\u30c6\u30ad\u30b9\u30c8\u30ed\u30fc\u30c9\u4e2d...\u3057\u3070\u3089\u304f\u304a\u5f85\u3061\u304f\u3060\u3055\u3044\u3002 ExtractedContentViewer.toolTip=\u30d5\u30a1\u30a4\u30eb\u3084\u30ad\u30fc\u30ef\u30fc\u30c9\u691c\u7d22\u7d50\u679c\u304b\u3089\u62bd\u51fa\u3055\u308c\u305f\u30c6\u30ad\u30b9\u30c8\u3092\u8868\u793a\u3002\u3053\u306e\u30d3\u30e5\u30fc\u30a2\u3092\u6709\u52b9\u5316\u3059\u308b\u306b\u306f\u3001\u30d5\u30a1\u30a4\u30eb\u306b\u5bfe\u3057\u3066\u30ad\u30fc\u30ef\u30fc\u30c9\u691c\u7d22\u30a4\u30f3\u30b8\u30a7\u30b9\u30c8\u3092\u5b9f\u884c\u3059\u308b\u5fc5\u8981\u304c\u3042\u308a\u307e\u3059\u3002 ExtractedContentViewer.getTitle=\u30a4\u30f3\u30c7\u30c3\u30af\u30b9\u5316\u3055\u308c\u305f\u30c6\u30ad\u30b9\u30c8 -ExtractedContentViewer.getSolrContent.knownFileMsg=

{0}\u306f\u65e2\u77e5\u30d5\u30a1\u30a4\u30eb\u3067\u3059\uff08MDS\u30cf\u30c3\u30b7\u30e5\u306b\u57fa\u3065\u304f\u3068\uff09\u3002\u30a4\u30f3\u30c7\u30c3\u30af\u30b9\u306b\u30c6\u30ad\u30b9\u30c8\u304c\u3042\u308a\u307e\u305b\u3093\u3002

ExtractedContentViewer.getSolrContent.noTxtYetMsg=

{0}\u306e\u30a4\u30f3\u30c7\u30c3\u30af\u30b9\u306b\u30c6\u30ad\u30b9\u30c8\u304c\u3042\u308a\u307e\u305b\u3093\u3002
\u30c6\u30ad\u30b9\u30c8\u304c\u7121\u3044\u304b\u3001\u307e\u3060\u89e3\u6790\u3055\u308c\u3066\u3044\u306a\u3044\u304b\u3001\u30ad\u30fc\u30ef\u30fc\u30c9\u691c\u7d22\u304c\u30a4\u30f3\u30b8\u30a7\u30b9\u30c8\u4e2d\u306b\u6709\u52b9\u5316\u3055\u308c\u3066\u3044\u306a\u304b\u3063\u305f\u304b\u3082\u3057\u308c\u307e\u305b\u3093\u3002

HighlightedMatchesSource.toString=\u691c\u7d22\u7d50\u679c Installer.reportPortError=\u30a4\u30f3\u30c7\u30c3\u30af\u30b9\u30b5\u30fc\u30d0\u30fc\u30dd\u30fc\u30c8 {0} \u306f\u5229\u7528\u3067\u304d\u307e\u305b\u3093\u3002\u4f7f\u7528\u3057\u3066\u3044\u308b\u30bb\u30ad\u30e5\u30ea\u30c6\u30a3\u30bd\u30d5\u30c8\u30a6\u30a7\u30a2\u304c {1} \u3092\u30d6\u30ed\u30c3\u30af\u3057\u3066\u3044\u306a\u3044\u304b\u78ba\u8a8d\u3057\u3001\u30a2\u30d7\u30ea\u30b1\u30fc\u30b7\u30e7\u30f3\u30e6\u30fc\u30b6\u30fc\u30d5\u30a9\u30eb\u30c0\u30fc\u5185\u306e{3}\u30d7\u30ed\u30d1\u30c6\u30a3\u30d5\u30a1\u30a4\u30eb\u306e{2}\u3092\u5909\u66f4\u3059\u308b\u691c\u8a0e\u3092\u3057\u3066\u304f\u3060\u3055\u3044\u3002\u3082\u3057\u4ed6\u306e\u51e6\u7406\u304c\u554f\u984c\u306e\u539f\u56e0\u3067\u3042\u308c\u3070\u3001\u30b7\u30b9\u30c6\u30e0\u3092\u518d\u8d77\u52d5\u3057\u3066\u4e0b\u3055\u3044\u3002 @@ -200,7 +199,6 @@ KeywordSearchIngestModule.doInBackGround.finalizeMsg=- \u6700\u7d42\u51e6\u7406\ KeywordSearchIngestModule.doInBackGround.pendingMsg=\uff08\u30da\u30f3\u30c7\u30a3\u30f3\u30b0\uff09 SearchRunner.doInBackGround.cancelMsg=\uff08\u30ad\u30e3\u30f3\u30bb\u30eb\u4e2d\u2026\uff09 Server.addDoc.exception.msg2=\u30a2\u30c3\u30d7\u30c7\u30fc\u30c8\u30cf\u30f3\u30c9\u30e9\u30fc\u3092\u4f7f\u7528\u3057\u307e\u3057\u305f\u304c\u3001\u30a4\u30f3\u30c7\u30c3\u30af\u30b9\u306b\u6b21\u306e\u30c9\u30ad\u30e5\u30e1\u30f3\u30c8\u3092\u8ffd\u52a0\u3067\u304d\u307e\u305b\u3093\u3067\u3057\u305f\uff1a{0} -ExtractedContentViewer.getSolrContent.txtBodyItal={0} KeywordSearchJobSettingsPanel.keywordSearchEncodings.text=- KeywordSearchJobSettingsPanel.languagesValLabel.text=- KeywordSearchJobSettingsPanel.encodingsLabel.text=\u30a8\u30f3\u30b3\u30fc\u30c7\u30a3\u30f3\u30b0\uff1a @@ -261,7 +259,6 @@ KeywordSearchModuleFactory.getIngestJobSettingsPanel.exception.msg=\u8a2d\u5b9a\ SearchRunner.Searcher.done.err.msg=\u30ad\u30fc\u30ef\u30fc\u30c9\u691c\u7d22\u3092\u5b9f\u884c\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f KeywordSearchGlobalSearchSettingsPanel.timeRadioButton5.text=\u5b9a\u671f\u7684\u691c\u7d22\u7121\u3057 KeywordSearchGlobalSearchSettingsPanel.timeRadioButton5.toolTipText=\u5168\u4f53\u7684\u306b\u4e00\u756a\u901f\u3044\u3067\u3059\u304c\u3001\u51e6\u7406\u304c\u5b8c\u4e86\u3059\u308b\u307e\u3067\u7d50\u679c\u306f\u8868\u793a\u3055\u308c\u307e\u305b\u3093 -HighlightedMatchesSource.getMarkup.queryFailedMsg=
\u30ad\u30fc\u30ef\u30fc\u30c9\u306b\u30d2\u30c3\u30c8\u3057\u305f\u7d50\u679c\u3092\u53d6\u5f97\u3067\u304d\u307e\u305b\u3093\u3067\u3057\u305f\u3002
Autopsy\u304cSolr\u30b5\u30fc\u30d0\u30fc\u306b\u63a5\u7d9a\u3067\u304d\u308b\u3053\u3068\u3092\u78ba\u8a8d\u3057\u3066\u304f\u3060\u3055\u3044\u3002
\ KeywordSearch.openCore.notification.msg=\u30ad\u30fc\u30ef\u30fc\u30c9\u691c\u7d22\u30a4\u30f3\u30c7\u30c3\u30af\u30b9\u3092\u958b\u3051\u307e\u305b\u3093\u3067\u3057\u305f KeywordSearch.closeCore.notification.msg=\u30ad\u30fc\u30ef\u30fc\u30c9\u691c\u7d22\u30a4\u30f3\u30c7\u30c3\u30af\u30b9\u3092\u9589\u3058\u308b\u969b\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f KeywordSearchListsManagementPanel.fileExtensionFilterLb2=\u30ad\u30fc\u30ef\u30fc\u30c9\u30ea\u30b9\u30c8\u30d5\u30a1\u30a4\u30eb\u3092\u30a8\u30f3\u30b1\u30fc\u30b9\u3059\u308b(txt) @@ -272,7 +269,7 @@ SolrConnectionCheck.HostnameOrPort=hostname\u3084\u30dd\u30fc\u30c8\u756a\u53f7\ SolrConnectionCheck.Hostname=hostname\u304c\u7121\u52b9\u3067\u3059\u3002 SolrConnectionCheck.Port=\u30dd\u30fc\u30c8\u756a\u53f7\u304c\u7121\u52b9\u3067\u3059\u3002 SolrConnectionCheck.MissingHostname=hostname\u304c\u6b20\u3051\u3066\u307e\u3059\u3002 -RawText.getText.error.msg=\u30c6\u30ad\u30b9\u30c8\u3092\u53d6\u5f97\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f +ExtractedContentViewer.getText.error.msg=\u30c6\u30ad\u30b9\u30c8\u3092\u53d6\u5f97\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f GlobalListsManagementPanel.exportButton.text=\u30ea\u30b9\u30c8\u3092\u30a8\u30af\u30b9\u30dd\u30fc\u30c8 GlobalListsManagementPanel.deleteListButton.text=\u30ea\u30b9\u30c8\u3092\u524a\u9664 GlobalListsManagementPanel.copyListButton.text=\u30ea\u30b9\u30c8\u3092\u30b3\u30d4\u30fc diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/ExtractedContentPanel.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/ExtractedContentPanel.java index 01647f2a46..bc68962693 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/ExtractedContentPanel.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/ExtractedContentPanel.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011-17 Basis Technology Corp. + * Copyright 2011-2018 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -24,6 +24,7 @@ import java.awt.event.ActionListener; import java.awt.event.ItemEvent; import java.util.ArrayList; import java.util.List; +import java.util.concurrent.CancellationException; import java.util.concurrent.ExecutionException; import java.util.logging.Level; import javax.swing.JTextPane; @@ -647,8 +648,6 @@ class ExtractedContentPanel extends javax.swing.JPanel { return source.getText(); } - @NbBundle.Messages({ - "ExtractedContentPanel.SetMarkup.error=There was an error getting the text for the selected source."}) @Override protected void done() { super.done(); @@ -663,11 +662,9 @@ class ExtractedContentPanel extends javax.swing.JPanel { setPanelText("", false); } - } catch (InterruptedException | ExecutionException ex) { + } catch (InterruptedException | CancellationException | ExecutionException ex) { logger.log(Level.SEVERE, "Error getting marked up text", ex); //NON-NLS - setPanelText(Bundle.ExtractedContentPanel_SetMarkup_error(), true); - } // catch and ignore if we were cancelled - catch (java.util.concurrent.CancellationException ex) { + setPanelText(Bundle.IndexedText_errorMessage_errorGettingText(), true); } updateControls(source); diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/ExtractedContentViewer.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/ExtractedContentViewer.java index dbed68e660..b13e1fbca4 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/ExtractedContentViewer.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/ExtractedContentViewer.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011-2017 Basis Technology Corp. + * Copyright 2011-2018 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/HighlightedText.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/HighlightedText.java index 70d2e69194..14298a044a 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/HighlightedText.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/HighlightedText.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011-2017 Basis Technology Corp. + * Copyright 2011-2018 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -20,7 +20,6 @@ package org.sleuthkit.autopsy.keywordsearch; import com.google.common.collect.Iterators; import com.google.common.collect.Range; -import com.google.common.collect.RangeSet; import com.google.common.collect.TreeRangeSet; import java.util.Arrays; import java.util.Collection; @@ -41,7 +40,6 @@ import org.apache.solr.client.solrj.SolrRequest.METHOD; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.common.SolrDocumentList; import org.openide.util.NbBundle; -import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Version; import org.sleuthkit.autopsy.keywordsearch.KeywordQueryFilter.FilterType; @@ -104,13 +102,13 @@ class HighlightedText implements IndexedText { /** * This constructor is used when keyword hits are accessed from the ad-hoc * search results. In that case we have the entire QueryResults object and - need to arrange the paging. + * need to arrange the paging. * * @param objectId The objectID of the content whose text will be * highlighted. * @param QueryResults The QueryResults for the ad-hoc search from whose - results a selection was made leading to this - HighlightedText. + * results a selection was made leading to this + * HighlightedText. */ HighlightedText(long objectId, QueryResults hits) { this.objectId = objectId; @@ -140,7 +138,6 @@ class HighlightedText implements IndexedText { * This method figures out which pages / chunks have hits. Invoking it a * second time has no effect. */ - @Messages({"HighlightedText.query.exception.msg=Could not perform the query to get chunk info and get highlights:"}) synchronized private void loadPageInfo() throws TskCoreException, KeywordSearchModuleException, NoOpenCoreException { if (isPageInfoLoaded) { return; @@ -158,7 +155,6 @@ class HighlightedText implements IndexedText { this.numberPages = 1; this.currentPage = 1; numberOfHitsPerPage.put(1, 0); - pages.add(1); currentHitPerPage.put(1, 0); isPageInfoLoaded = true; } @@ -207,10 +203,10 @@ class HighlightedText implements IndexedText { isLiteral = hits.getQuery().isLiteral(); /** - * Organize the hits by page, filter as needed. - * We process *every* keyword here because in the case of a regular - * expression search there may be multiple different keyword - * hits located in different chunks for the same file/artifact. + * Organize the hits by page, filter as needed. We process *every* + * keyword here because in the case of a regular expression search there + * may be multiple different keyword hits located in different chunks + * for the same file/artifact. */ for (Keyword k : hits.getKeywords()) { for (KeywordHit hit : hits.getResults(k)) { @@ -428,7 +424,7 @@ class HighlightedText implements IndexedText { return "
" + highlightedContent + "
"; //NON-NLS } catch (TskCoreException | KeywordSearchModuleException | NoOpenCoreException ex) { logger.log(Level.SEVERE, "Error getting highlighted text for Solr doc id " + objectId + ", chunkID " + chunkID + ", highlight query: " + highlightField, ex); //NON-NLS - return NbBundle.getMessage(this.getClass(), "HighlightedMatchesSource.getMarkup.queryFailedMsg"); + return Bundle.IndexedText_errorMessage_errorGettingText(); } } @@ -466,12 +462,13 @@ class HighlightedText implements IndexedText { * to a Solr query. We expect there to only ever be * a single document. * - * @return Either a string with the keyword highlighted via HTML span tags or a string - * indicating that we did not find a hit in the document. + * @return Either a string with the keyword highlighted via HTML span tags + * or a string indicating that we did not find a hit in the + * document. */ static String attemptManualHighlighting(SolrDocumentList solrDocumentList, String highlightField, Collection keywords) { if (solrDocumentList.isEmpty()) { - return NbBundle.getMessage(HighlightedText.class, "HighlightedMatchesSource.getMarkup.noMatchMsg"); + return Bundle.IndexedText_errorMessage_errorGettingText(); } // It doesn't make sense for there to be more than a single document in diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/IndexedText.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/IndexedText.java index 6bf6a4f3ef..17366483e3 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/IndexedText.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/IndexedText.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011-2017 Basis Technology Corp. + * Copyright 2011-2018 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -18,12 +18,19 @@ */ package org.sleuthkit.autopsy.keywordsearch; +import org.openide.util.NbBundle; + /** * Interface to provide HTML text to display in ExtractedContentViewer. There is * a SOLR implementation of this that interfaces with SOLR to highlight the * keyword hits and a version that does not do markup so that you can simply * view the stored text. */ +@NbBundle.Messages({ + "IndexedText.errorMessage.errorGettingText=Error retrieving indexed text.", + "IndexedText.warningMessage.knownFile=This file is a known file (based on MD5 hash) and does not have indexed text.", + "IndexedText.warningMessage.noTextAvailable=No indexed text for this file." +}) interface IndexedText { /** diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/RawText.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/RawText.java index 043738ae30..789de3fd50 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/RawText.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/RawText.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011-2017 Basis Technology Corp. + * Copyright 2011-2018 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -18,7 +18,6 @@ */ package org.sleuthkit.autopsy.keywordsearch; -import java.util.LinkedHashMap; import java.util.logging.Level; import org.apache.solr.client.solrj.SolrServerException; import org.openide.util.NbBundle; @@ -153,10 +152,10 @@ class RawText implements IndexedText { } else if (this.blackboardArtifact != null) { return getArtifactText(); } - } catch (SolrServerException ex) { - logger.log(Level.SEVERE, "Couldn't get extracted content", ex); //NON-NLS + } catch (SolrServerException | NoOpenCoreException ex) { + logger.log(Level.SEVERE, "Couldn't get extracted text", ex); //NON-NLS } - return NbBundle.getMessage(this.getClass(), "RawText.getText.error.msg"); + return Bundle.IndexedText_errorMessage_errorGettingText(); } @NbBundle.Messages({ @@ -186,7 +185,6 @@ class RawText implements IndexedText { return 0; } - @Override public int getNumberPages() { return numPages; @@ -207,11 +205,8 @@ class RawText implements IndexedText { } else { hasChunks = true; } - } catch (KeywordSearchModuleException ex) { - logger.log(Level.WARNING, "Could not get number of chunks: ", ex); //NON-NLS - - } catch (NoOpenCoreException ex) { - logger.log(Level.WARNING, "Could not get number of chunks: ", ex); //NON-NLS + } catch (KeywordSearchModuleException | NoOpenCoreException ex) { + logger.log(Level.SEVERE, "Could not get number of chunks: ", ex); //NON-NLS } } @@ -225,31 +220,32 @@ class RawText implements IndexedText { * chunks. This means we need to address the content * pages specially. * - * @return the extracted content + * @return the extracted text * - * @throws SolrServerException if something goes wrong + * @throws NoOpenCoreException If no Solr core is available. + * @throws SolrServerException If there's a Solr communication or parsing + * issue. */ - private String getContentText(int currentPage, boolean hasChunks) throws SolrServerException { + private String getContentText(int currentPage, boolean hasChunks) throws NoOpenCoreException, SolrServerException { final Server solrServer = KeywordSearch.getServer(); if (hasChunks == false) { //if no chunks, it is safe to assume there is no text content //because we are storing extracted text in chunks only //and the non-chunk stores meta-data only - String name = content.getName(); String msg = null; + if (content instanceof AbstractFile) { //we know it's AbstractFile, but do quick check to make sure if we index other objects in future boolean isKnown = TskData.FileKnown.KNOWN.equals(((AbstractFile) content).getKnown()); if (isKnown && KeywordSearchSettings.getSkipKnown()) { - msg = NbBundle.getMessage(this.getClass(), "ExtractedContentViewer.getSolrContent.knownFileMsg", name); + msg = Bundle.IndexedText_warningMessage_knownFile(); } } if (msg == null) { - msg = NbBundle.getMessage(this.getClass(), "ExtractedContentViewer.getSolrContent.noTxtYetMsg", name); + msg = Bundle.IndexedText_warningMessage_noTextAvailable(); } - String htmlMsg = NbBundle.getMessage(this.getClass(), "ExtractedContentViewer.getSolrContent.txtBodyItal", msg); - return htmlMsg; + return msg; } int chunkId = currentPage; @@ -262,33 +258,46 @@ class RawText implements IndexedText { } //not cached - try { - String indexedText = solrServer.getSolrContent(this.objectId, chunkId); - if (indexedText == null) indexedText = ""; - cachedString = EscapeUtil.escapeHtml(indexedText).trim(); - StringBuilder sb = new StringBuilder(cachedString.length() + 20); - sb.append("
").append(cachedString).append("
"); //NON-NLS - cachedString = sb.toString(); - cachedChunk = chunkId; - } catch (NoOpenCoreException ex) { - logger.log(Level.SEVERE, "No open core", ex); //NON-NLS - return ""; + String indexedText = solrServer.getSolrContent(this.objectId, chunkId); + if (indexedText == null) { + if (content instanceof AbstractFile) { + return Bundle.IndexedText_errorMessage_errorGettingText(); + } else { + return Bundle.IndexedText_warningMessage_noTextAvailable(); + } + } else if (indexedText.isEmpty()) { + return Bundle.IndexedText_warningMessage_noTextAvailable(); } + + cachedString = EscapeUtil.escapeHtml(indexedText).trim(); + StringBuilder sb = new StringBuilder(cachedString.length() + 20); + sb.append("
").append(cachedString).append("
"); //NON-NLS + cachedString = sb.toString(); + cachedChunk = chunkId; + return cachedString; } - - private String getArtifactText() throws SolrServerException{ - try { - String indexedText = KeywordSearch.getServer().getSolrContent(this.objectId, 1); - if (indexedText == null) indexedText = ""; - indexedText = EscapeUtil.escapeHtml(indexedText).trim(); - StringBuilder sb = new StringBuilder(indexedText.length() + 20); - sb.append("
").append(indexedText).append("
"); //NON-NLS - return sb.toString(); - } catch (NoOpenCoreException ex) { - logger.log(Level.SEVERE, "No open core", ex); //NON-NLS - return ""; + + /** + * Get extracted artifact for a node from Solr + * + * @return the extracted text + * + * @throws NoOpenCoreException If no Solr core is available. + * @throws SolrServerException If there's a Solr communication or parsing + * issue. + */ + private String getArtifactText() throws NoOpenCoreException, SolrServerException { + String indexedText = KeywordSearch.getServer().getSolrContent(this.objectId, 1); + if (indexedText == null || indexedText.isEmpty()) { + return Bundle.IndexedText_errorMessage_errorGettingText(); } + + indexedText = EscapeUtil.escapeHtml(indexedText).trim(); + StringBuilder sb = new StringBuilder(indexedText.length() + 20); + sb.append("
").append(indexedText).append("
"); //NON-NLS + + return sb.toString(); } - + } diff --git a/Testing/src/org/sleuthkit/autopsy/testing/AutopsyTestCases.java b/Testing/src/org/sleuthkit/autopsy/testing/AutopsyTestCases.java index c716081672..d1860120a6 100644 --- a/Testing/src/org/sleuthkit/autopsy/testing/AutopsyTestCases.java +++ b/Testing/src/org/sleuthkit/autopsy/testing/AutopsyTestCases.java @@ -208,7 +208,7 @@ public class AutopsyTestCases { databases.stream().map((database) -> { JButtonOperator importButtonOperator = new JButtonOperator(hashMainDialogOperator, "Import"); importButtonOperator.pushNoBlock(); - JDialog addDatabaseDialog = JDialogOperator.waitJDialog("Import Hash Database", false, false); + JDialog addDatabaseDialog = JDialogOperator.waitJDialog("Import Hash Set", false, false); JDialogOperator addDatabaseDialogOperator = new JDialogOperator(addDatabaseDialog); JButtonOperator browseButtonOperator = new JButtonOperator(addDatabaseDialogOperator, "Open...", 0); browseButtonOperator.pushNoBlock(); diff --git a/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties b/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties index c890aff42c..8da9daefe4 100644 --- a/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties +++ b/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties @@ -1,5 +1,5 @@ #Updated by build script -#Tue, 23 Jan 2018 11:13:26 -0500 +#Tue, 23 Jan 2018 11:28:07 -0500 LBL_splash_window_title=Starting Autopsy SPLASH_HEIGHT=314 SPLASH_WIDTH=538 diff --git a/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties b/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties index 5178eab0a3..5daf2c9d7e 100644 --- a/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties +++ b/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties @@ -1,4 +1,4 @@ #Updated by build script -#Tue, 23 Jan 2018 11:13:26 -0500 +#Tue, 23 Jan 2018 11:28:07 -0500 CTL_MainWindow_Title=Autopsy 4.5.0 CTL_MainWindow_Title_No_Project=Autopsy 4.5.0 diff --git a/build.xml b/build.xml index 4eaa7df1c8..d044f68f06 100644 --- a/build.xml +++ b/build.xml @@ -32,7 +32,10 @@ - + + + + @@ -82,7 +85,13 @@ - + + + + + + + @@ -91,8 +100,17 @@ + - + + + + + + + + + diff --git a/ruleset.xml b/ruleset.xml index d4f2f075fc..9c8f7e34c2 100644 --- a/ruleset.xml +++ b/ruleset.xml @@ -1,13 +1,280 @@ - + xsi:schemaLocation="http://pmd.sourceforge.net/ruleset/2.0.0 http://pmd.sourceforge.net/ruleset_2_0_0.xsd"> - Ruleset used by Autopsy + Ruleset used by Autopsy - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/test/script/regression.py b/test/script/regression.py index afe1ea4976..3e9ee2e585 100644 --- a/test/script/regression.py +++ b/test/script/regression.py @@ -810,7 +810,7 @@ class TestConfiguration(object): if parsed_config.getElementsByTagName("singleUser_golddir"): self.singleUser_gold = parsed_config.getElementsByTagName("singleUser_golddir")[0].getAttribute("value").encode().decode("utf_8") if parsed_config.getElementsByTagName("timing"): - self.timing = parsed_config.getElementsByTagName("timing")[0].getAttribute("value").encode().decode("utf_8") + self.timing = ("True" == parsed_config.getElementsByTagName("timing")[0].getAttribute("value").encode().decode("utf_8")) if parsed_config.getElementsByTagName("autopsyPlatform"): self.autopsyPlatform = parsed_config.getElementsByTagName("autopsyPlatform")[0].getAttribute("value").encode().decode("utf_8") # Multi-user settings @@ -1392,7 +1392,7 @@ class Logs(object): try: Logs._fill_ingest_data(test_data) except Exception as e: - Errors.print_error("Error: Unknown fatal error when filling test_config data.") + Errors.print_error("Error when filling test_config data.") Errors.print_error(str(e) + "\n") logging.critical(traceback.format_exc()) # If running in verbose mode (-v) @@ -1454,10 +1454,10 @@ class Logs(object): Errors.print_error("Error: Unable to open autopsy.log.0.") Errors.print_error(str(e) + "\n") logging.warning(traceback.format_exc()) - # Start date must look like: "Fri Mar 27 13:27:34 EDT 2015" + # Start date must look like: "" # End date must look like: "Mon Jul 16 13:02:42 2012" # *** If logging time format ever changes this will break *** - start = datetime.datetime.strptime(test_data.start_date, "%a %b %d %H:%M:%S %Z %Y") + start = datetime.datetime.strptime(test_data.start_date, "%Y-%m-%d %H:%M:%S.%f") end = datetime.datetime.strptime(test_data.end_date, "%a %b %d %H:%M:%S %Y") test_data.total_test_time = str(end - start) @@ -1469,7 +1469,6 @@ class Logs(object): test_data.heap_space = search_logs("Heap memory usage:", test_data)[0].rstrip().split(": ")[1] ingest_line = search_logs("Ingest (including enqueue)", test_data)[0] test_data.total_ingest_time = get_word_at(ingest_line, 6).rstrip() - message_line_count = find_msg_in_log_set("Ingest messages count:", test_data) test_data.indexed_files = message_line_count