diff --git a/Core/build.xml b/Core/build.xml index df747ff257..a1290ef2e8 100644 --- a/Core/build.xml +++ b/Core/build.xml @@ -94,11 +94,11 @@ - + - diff --git a/Core/nbproject/project.properties b/Core/nbproject/project.properties index 87aa0fb282..623ef11bd2 100644 --- a/Core/nbproject/project.properties +++ b/Core/nbproject/project.properties @@ -93,7 +93,7 @@ file.reference.javax.ws.rs-api-2.0.1.jar=release/modules/ext/javax.ws.rs-api-2.0 file.reference.cxf-core-3.0.16.jar=release/modules/ext/cxf-core-3.0.16.jar file.reference.cxf-rt-frontend-jaxrs-3.0.16.jar=release/modules/ext/cxf-rt-frontend-jaxrs-3.0.16.jar file.reference.cxf-rt-transports-http-3.0.16.jar=release/modules/ext/cxf-rt-transports-http-3.0.16.jar -file.reference.sleuthkit-postgresql-4.9.0.jar=release/modules/ext/sleuthkit-postgresql-4.9.0.jar +file.reference.sleuthkit-4.9.0.jar=release/modules/ext/sleuthkit-4.9.0.jar file.reference.curator-client-2.8.0.jar=release/modules/ext/curator-client-2.8.0.jar file.reference.curator-framework-2.8.0.jar=release/modules/ext/curator-framework-2.8.0.jar file.reference.curator-recipes-2.8.0.jar=release/modules/ext/curator-recipes-2.8.0.jar diff --git a/Core/nbproject/project.xml b/Core/nbproject/project.xml index a0ec3ce5ee..9a83cd4117 100644 --- a/Core/nbproject/project.xml +++ b/Core/nbproject/project.xml @@ -652,8 +652,8 @@ release/modules/ext/commons-validator-1.6.jar - ext/sleuthkit-postgresql-4.9.0.jar - release/modules/ext/sleuthkit-postgresql-4.9.0.jar + ext/sleuthkit-4.9.0.jar + release/modules/ext/sleuthkit-4.9.0.jar ext/decodetect-core-0.3.jar diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/Case.java b/Core/src/org/sleuthkit/autopsy/casemodule/Case.java index 6c6e102dac..47fff7320f 100644 --- a/Core/src/org/sleuthkit/autopsy/casemodule/Case.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/Case.java @@ -1563,11 +1563,12 @@ public class Case { * * This should not be called from the event dispatch thread (EDT) * - * @param newTag new ContentTag added - * @param deletedTag Removed ContentTag + * @param newTag The added ContentTag. + * @param deletedTagList List of ContentTags that were removed as a result + * of the addition of newTag. */ - public void notifyContentTagAdded(ContentTag newTag, ContentTag deletedTag) { - eventPublisher.publish(new ContentTagAddedEvent(newTag, deletedTag)); + public void notifyContentTagAdded(ContentTag newTag, List deletedTagList) { + eventPublisher.publish(new ContentTagAddedEvent(newTag, deletedTagList)); } /** @@ -1627,11 +1628,12 @@ public class Case { * * This should not be called from the event dispatch thread (EDT) * - * @param newTag new BlackboardArtifactTag added - * @param removedTag The BlackboardArtifactTag that was removed. + * @param newTag The added ContentTag. + * @param removedTagList List of ContentTags that were removed as a result + * of the addition of newTag. */ - public void notifyBlackBoardArtifactTagAdded(BlackboardArtifactTag newTag, BlackboardArtifactTag removedTag) { - eventPublisher.publish(new BlackBoardArtifactTagAddedEvent(newTag, removedTag)); + public void notifyBlackBoardArtifactTagAdded(BlackboardArtifactTag newTag, List removedTagList) { + eventPublisher.publish(new BlackBoardArtifactTagAddedEvent(newTag, removedTagList)); } /** diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/events/BlackBoardArtifactTagAddedEvent.java b/Core/src/org/sleuthkit/autopsy/casemodule/events/BlackBoardArtifactTagAddedEvent.java index 45068164c1..e9793a51b0 100644 --- a/Core/src/org/sleuthkit/autopsy/casemodule/events/BlackBoardArtifactTagAddedEvent.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/events/BlackBoardArtifactTagAddedEvent.java @@ -19,6 +19,8 @@ package org.sleuthkit.autopsy.casemodule.events; import java.io.Serializable; +import java.util.ArrayList; +import java.util.List; import javax.annotation.concurrent.Immutable; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; @@ -30,7 +32,7 @@ import org.sleuthkit.datamodel.TskCoreException; * Event sent when a black board artifact tag is added. */ @Immutable -public class BlackBoardArtifactTagAddedEvent extends TagAddedEvent implements Serializable { +public class BlackBoardArtifactTagAddedEvent extends TagAddedEvent implements Serializable { private static final long serialVersionUID = 1L; @@ -38,8 +40,8 @@ public class BlackBoardArtifactTagAddedEvent extends TagAddedEvent removedTagList) { + super(Case.Events.BLACKBOARD_ARTIFACT_TAG_ADDED.toString(), newTag, (removedTagList != null ? getDeletedInfo(removedTagList) : null)); } /** @@ -54,4 +56,24 @@ public class BlackBoardArtifactTagAddedEvent extends TagAddedEvent getDeletedInfo(List deletedTagList) { + List deletedInfoList = new ArrayList<>(); + if (deletedTagList != null) { + for (BlackboardArtifactTag tag : deletedTagList) { + deletedInfoList.add(new DeletedBlackboardArtifactTagInfo(tag)); + } + } + + return deletedInfoList; + } } diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/events/ContentTagAddedEvent.java b/Core/src/org/sleuthkit/autopsy/casemodule/events/ContentTagAddedEvent.java index b3e55d3eac..10a80c772e 100644 --- a/Core/src/org/sleuthkit/autopsy/casemodule/events/ContentTagAddedEvent.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/events/ContentTagAddedEvent.java @@ -19,6 +19,8 @@ package org.sleuthkit.autopsy.casemodule.events; import java.io.Serializable; +import java.util.ArrayList; +import java.util.List; import javax.annotation.concurrent.Immutable; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; @@ -30,7 +32,7 @@ import org.sleuthkit.datamodel.TskCoreException; * An event that is fired when a ContentTag is added. */ @Immutable -public class ContentTagAddedEvent extends TagAddedEvent implements Serializable { +public class ContentTagAddedEvent extends TagAddedEvent implements Serializable { private static final long serialVersionUID = 1L; @@ -38,8 +40,8 @@ public class ContentTagAddedEvent extends TagAddedEvent implements S super(Case.Events.CONTENT_TAG_ADDED.toString(), newTag); } - public ContentTagAddedEvent(ContentTag newTag, ContentTag deletedTag) { - super(Case.Events.CONTENT_TAG_ADDED.toString(), newTag, (deletedTag != null ? new DeletedContentTagInfo(deletedTag) : null)); + public ContentTagAddedEvent(ContentTag newTag, List deletedTagList) { + super(Case.Events.CONTENT_TAG_ADDED.toString(), newTag, getDeletedInfo(deletedTagList)); } /** @@ -50,7 +52,26 @@ public class ContentTagAddedEvent extends TagAddedEvent implements S * @throws NoCurrentCaseException * @throws TskCoreException */ + @Override ContentTag getTagByID() throws NoCurrentCaseException, TskCoreException { return Case.getCurrentCaseThrows().getServices().getTagsManager().getContentTagByTagID(getTagID()); } + + /** + * Create a list of DeletedContentTagInfo objects from a list of ContentTags. + * + * @param deletedTagList List of deleted ContentTags. + * + * @return List of DeletedContentTagInfo objects or empty list if deletedTagList was empty or null. + */ + private static List getDeletedInfo(List deletedTagList) { + List deletedInfoList = new ArrayList<>(); + if (deletedTagList != null) { + for (ContentTag tag : deletedTagList) { + deletedInfoList.add(new DeletedContentTagInfo(tag)); + } + } + + return deletedInfoList; + } } diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/events/TagAddedEvent.java b/Core/src/org/sleuthkit/autopsy/casemodule/events/TagAddedEvent.java index ab737d7004..25a35a6d6c 100644 --- a/Core/src/org/sleuthkit/autopsy/casemodule/events/TagAddedEvent.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/events/TagAddedEvent.java @@ -19,6 +19,8 @@ package org.sleuthkit.autopsy.casemodule.events; import java.io.Serializable; +import java.util.Collections; +import java.util.List; import java.util.logging.Level; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.events.TagDeletedEvent.DeletedTagInfo; @@ -30,7 +32,7 @@ import org.sleuthkit.datamodel.TskCoreException; /** * Base Class for events that are fired when a Tag is added */ -abstract class TagAddedEvent extends AutopsyEvent implements Serializable { +abstract class TagAddedEvent> extends AutopsyEvent implements Serializable { private static final long serialVersionUID = 1L; @@ -39,6 +41,8 @@ abstract class TagAddedEvent extends AutopsyEvent implements Seri * re-loaded from the database in getNewValue() */ private transient T tag; + + private List deletedTagInfoList; /** * The id of the tag that was added. This will be used to re-load the @@ -50,10 +54,19 @@ abstract class TagAddedEvent extends AutopsyEvent implements Seri this(propertyName, addedTag, null); } - TagAddedEvent(String propertyName, T addedTag, DeletedTagInfo deletedTagInfo) { - super(propertyName, deletedTagInfo, null); + /** + * Construct a TagAddedEvent. + * + * @param propertyName Name of property changing + * @param addedTag Instance of added tag. + * @param deletedTagInfoList List of tags deleted as a result of the + * addition of addedTag. + */ + TagAddedEvent(String propertyName, T addedTag, List deletedTagInfoList) { + super(propertyName, deletedTagInfoList, null); tag = addedTag; tagID = addedTag.getId(); + this.deletedTagInfoList = deletedTagInfoList; } /** @@ -73,7 +86,7 @@ abstract class TagAddedEvent extends AutopsyEvent implements Seri public T getAddedTag() { return getNewValue(); } - + @Override public T getNewValue() { /** @@ -95,6 +108,21 @@ abstract class TagAddedEvent extends AutopsyEvent implements Seri return null; } } + + /** + * Returns the list of tags that were removed as a result of the addition + * of the T. + * + * @return A list of removed tags or null if no tags were removed. + */ + public List getDeletedTags() { + return deletedTagInfoList != null ? Collections.unmodifiableList(deletedTagInfoList) : null; + } + + @Override + public Object getOldValue() { + return getDeletedTags(); + } /** * implementors should override this to lookup the appropriate kind of tag diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/services/TagNameDefinition.java b/Core/src/org/sleuthkit/autopsy/casemodule/services/TagNameDefinition.java index 2e70d407a0..0c65f3c58d 100755 --- a/Core/src/org/sleuthkit/autopsy/casemodule/services/TagNameDefinition.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/services/TagNameDefinition.java @@ -80,7 +80,7 @@ final class TagNameDefinition implements Comparable { PROJECT_VIC_TAG_DEFINITIONS.put(CATEGORY_TWO_NAME, new TagNameDefinition(CATEGORY_TWO_NAME, "", TagName.HTML_COLOR.LIME, TskData.FileKnown.BAD)); PROJECT_VIC_TAG_DEFINITIONS.put(CATEGORY_THREE_NAME, new TagNameDefinition(CATEGORY_THREE_NAME, "", TagName.HTML_COLOR.YELLOW, TskData.FileKnown.BAD)); PROJECT_VIC_TAG_DEFINITIONS.put(CATEGORY_FOUR_NAME, new TagNameDefinition(CATEGORY_FOUR_NAME, "", TagName.HTML_COLOR.PURPLE, TskData.FileKnown.UNKNOWN)); - PROJECT_VIC_TAG_DEFINITIONS.put(CATEGORY_FIVE_NAME, new TagNameDefinition(CATEGORY_FIVE_NAME, "", TagName.HTML_COLOR.SILVER, TskData.FileKnown.UNKNOWN)); + PROJECT_VIC_TAG_DEFINITIONS.put(CATEGORY_FIVE_NAME, new TagNameDefinition(CATEGORY_FIVE_NAME, "", TagName.HTML_COLOR.FUCHSIA, TskData.FileKnown.UNKNOWN)); } /** diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java b/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java index 5bdb2dc64e..8af7cd7efa 100755 --- a/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/services/TagsManager.java @@ -146,6 +146,12 @@ public class TagsManager implements Closeable { return tagDisplayNames; } + /** + * Gets the set of display names of notable (TskData.FileKnown.BAD) tag types. + * If a case is not open the list will only include only the user defined + * custom tags. Otherwise the list will include all notable tags. + * @return + */ public static List getNotableTagDisplayNames() { List tagDisplayNames = new ArrayList<>(); for (TagNameDefinition tagDef : TagNameDefinition.getTagNameDefinitions()) { @@ -153,6 +159,22 @@ public class TagsManager implements Closeable { tagDisplayNames.add(tagDef.getDisplayName()); } } + + try { + TagsManager tagsManager = Case.getCurrentCaseThrows().getServices().getTagsManager(); + for (TagName tagName : tagsManager.getAllTagNames()) { + if(tagName.getKnownStatus() == TskData.FileKnown.BAD && + !tagDisplayNames.contains(tagName.getDisplayName())) { + tagDisplayNames.add(tagName.getDisplayName()); + } + } + } catch (NoCurrentCaseException ignored) { + /* + * No current case, nothing more to add to the set. + */ + } catch(TskCoreException ex) { + LOGGER.log(Level.SEVERE, "Failed to get list of TagNames from TagsManager.", ex); + } return tagDisplayNames; } @@ -481,7 +503,7 @@ public class TagsManager implements Closeable { try { Case currentCase = Case.getCurrentCaseThrows(); - currentCase.notifyContentTagAdded(tagChange.getAddedTag(), tagChange.getRemovedTags().isEmpty() ? null : tagChange.getRemovedTags().get(0)); + currentCase.notifyContentTagAdded(tagChange.getAddedTag(), tagChange.getRemovedTags().isEmpty() ? null : tagChange.getRemovedTags()); } catch (NoCurrentCaseException ex) { throw new TskCoreException("Added a tag to a closed case", ex); @@ -701,7 +723,7 @@ public class TagsManager implements Closeable { TaggingManager.BlackboardArtifactTagChange tagChange = caseDb.getTaggingManager().addArtifactTag(artifact, tagName, comment); try { Case currentCase = Case.getCurrentCaseThrows(); - currentCase.notifyBlackBoardArtifactTagAdded(tagChange.getAddedTag(), tagChange.getRemovedTags().isEmpty() ? null : tagChange.getRemovedTags().get(0)); + currentCase.notifyBlackBoardArtifactTagAdded(tagChange.getAddedTag(), tagChange.getRemovedTags().isEmpty() ? null : tagChange.getRemovedTags()); } catch (NoCurrentCaseException ex) { throw new TskCoreException("Added a tag to a closed case", ex); } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepoExaminer.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepoExaminer.java new file mode 100644 index 0000000000..75fcceb68b --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepoExaminer.java @@ -0,0 +1,52 @@ +/* + * Central Repository + * + * Copyright 2020 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.centralrepository.datamodel; + +/** + * Encapsulates the concept of an examiner. + */ +final public class CentralRepoExaminer { + + private final long id; // Row id in the examiners table in central repo database. + private final String loginName; + + public CentralRepoExaminer(long id, String loginName) { + this.id = id; + this.loginName = loginName; + } + + /** + * Returns the id. + * + * @return id + */ + public long getId() { + return id; + } + + /** + * Returns the login name of examiner. + * + * @return login name + */ + public String getLoginName() { + return this.loginName; + } + +} diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepository.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepository.java index 6a4a138cfb..a4ce71cb36 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepository.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepository.java @@ -19,12 +19,14 @@ package org.sleuthkit.autopsy.centralrepository.datamodel; import java.sql.SQLException; +import java.util.Collection; import java.util.List; import java.util.Set; import org.sleuthkit.datamodel.TskData; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoAccount.CentralRepoAccountType; import org.sleuthkit.autopsy.coordinationservice.CoordinationService; +import org.sleuthkit.datamodel.HashHitInfo; /** * Main interface for interacting with the database @@ -159,6 +161,17 @@ public interface CentralRepository { * @param eamCase The case to update */ void updateCase(CorrelationCase eamCase) throws CentralRepoException; + + /** + * Queries the examiner table for the given user name. + * Adds a row if the user is not found in the examiner table. + * + * @param examinerLoginName user name to look for. + * @return CentralRepoExaminer for the given user name. + * @throws CentralRepoException If there is an error in looking up or + * inserting the user in the examiners table. + */ + CentralRepoExaminer getOrInsertExaminer(String examinerLoginName) throws CentralRepoException; /** * Retrieves Central Repo case based on an Autopsy Case @@ -541,6 +554,22 @@ public interface CentralRepository { */ public boolean isFileHashInReferenceSet(String hash, int referenceSetID) throws CentralRepoException, CorrelationAttributeNormalizationException; + + /** + * Retrieves the given file HashHitInfo if the given file hash is in this + * reference set. Only searches the reference_files table. + * + * @param hash The hash to find in a search. + * @param referenceSetID The referenceSetID within which the file should exist. + * + * @return The HashHitInfo if found or null if not found. + * + * @throws CentralRepoException + * @throws CorrelationAttributeNormalizationException + */ + HashHitInfo lookupHash(String hash, int referenceSetID) throws CentralRepoException, CorrelationAttributeNormalizationException; + + /** * Check if the given value is in a specific reference set * @@ -806,6 +835,24 @@ public interface CentralRepository { public void processSelectClause(String selectClause, InstanceTableCallback instanceTableCallback) throws CentralRepoException; + /** + * Executes an INSERT sql statement on the central repository database. + * @param sql INSERT sql to execute. + * + * @throws CentralRepoException If there is an error. + */ + void executeInsertSQL(String sql) throws CentralRepoException; + + /** + * Executes a SELECT sql statement on the central repository database. + * + * @param sql SELECT sql to execute. + * @param queryCallback Query callback to handle the result of the query. + * + * @throws CentralRepoException If there is an error. + */ + void executeSelectSQL(String sql, CentralRepositoryDbQueryCallback queryCallback) throws CentralRepoException; + /** * Get account type by type name. * @@ -815,6 +862,15 @@ public interface CentralRepository { */ CentralRepoAccountType getAccountTypeByName(String accountTypeName) throws CentralRepoException; + /** + * Gets all account types. + * + * @return Collection of all CR account types in the database. + * + * @throws CentralRepoException + */ + Collection getAllAccountTypes() throws CentralRepoException; + /** * Get an account from the accounts table matching the given type/ID. * Inserts a row if one doesn't exists. @@ -826,6 +882,5 @@ public interface CentralRepository { * @throws CentralRepoException */ CentralRepoAccount getOrCreateAccount(CentralRepoAccount.CentralRepoAccountType crAccountType, String accountUniqueID) throws CentralRepoException; - - + } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepositoryDbQueryCallback.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepositoryDbQueryCallback.java new file mode 100644 index 0000000000..ffc02b4a9f --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepositoryDbQueryCallback.java @@ -0,0 +1,40 @@ +/* + * Central Repository + * + * Copyright 2020 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.centralrepository.datamodel; + +import java.sql.ResultSet; +import java.sql.SQLException; + +/** + * An interface to process the resultset from a Central Repository DB query. + * This enables clients of Central Repository to run custom queries and process + * the results themselves. + * + */ +interface CentralRepositoryDbQueryCallback { + + /** + * Process the resultset from a query. + * @param rs ResultSet. + * + * @throws CentralRepoException In case of an error processing the result set. + * @throws SQLException In case of a SQL error in processing the result set. + */ + void process(ResultSet rs) throws CentralRepoException, SQLException; +} diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeInstance.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeInstance.java index 4d5e2857a1..32121989e0 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeInstance.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeInstance.java @@ -77,7 +77,7 @@ public class CorrelationAttributeInstance implements Serializable { ) throws CentralRepoException, CorrelationAttributeNormalizationException { this(type, value, -1, eamCase, eamDataSource, filePath, comment, knownStatus, fileObjectId, (long)-1); } - CorrelationAttributeInstance( + public CorrelationAttributeInstance( Type type, String value, int instanceId, diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/Persona.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/Persona.java index 5fc458353b..fd4e26eca2 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/Persona.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/Persona.java @@ -18,13 +18,24 @@ */ package org.sleuthkit.autopsy.centralrepository.datamodel; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.time.Instant; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Objects; +import java.util.UUID; +import org.apache.commons.lang3.StringUtils; +import org.sleuthkit.datamodel.SleuthkitCase; + /** * This class abstracts a persona. * * An examiner may create a persona from an account. * */ -class Persona { +public class Persona { /** * Defines level of confidence in assigning a persona to an account. @@ -50,9 +61,19 @@ class Persona { return name; } - public int getLevel() { + public int getLevelId() { return this.level_id; } + + static Confidence fromId(int value) { + for (Confidence confidence : Confidence.values()) { + if (confidence.getLevelId() == value) { + return confidence; + } + } + return Confidence.UNKNOWN; + } + } /** @@ -79,9 +100,559 @@ class Persona { return description; } - public int getStatus() { + public int getStatusId() { return this.status_id; } + + static PersonaStatus fromId(int value) { + for (PersonaStatus status : PersonaStatus.values()) { + if (status.getStatusId() == value) { + return status; + } + } + return PersonaStatus.UNKNOWN; + } } + // Persona name to use if no name is specified. + private static final String DEFAULT_PERSONA_NAME = "Unknown"; + + // primary key in the Personas table in CR database + private final long id; + private final String uuidStr; + private final String name; + private final String comment; + private final long createdDate; + private final long modifiedDate; + private final PersonaStatus status; + private final CentralRepoExaminer examiner; + + public long getId() { + return id; + } + + public String getUuidStr() { + return uuidStr; + } + + public String getName() { + return name; + } + + public String getComment() { + return comment; + } + + public long getCreatedDate() { + return createdDate; + } + + public long getModifiedDate() { + return modifiedDate; + } + + public PersonaStatus getStatus() { + return status; + } + + public CentralRepoExaminer getExaminer() { + return examiner; + } + + Persona(long id, String uuidStr, String name, String comment, long created_date, long modified_date, PersonaStatus status, CentralRepoExaminer examiner) { + this.id = id; + this.uuidStr = uuidStr; + this.name = name; + this.comment = comment; + this.createdDate = created_date; + this.modifiedDate = modified_date; + this.status = status; + this.examiner = examiner; + } + + /** + * Creates a Persona and associates the specified account with it. + * + * @param personaName Persona name. + * @param comment Comment to associate with persona, may be null. + * @param status Persona status + * @param account Account for which the persona is being created. + * @param justification Justification for why this account belongs to this + * persona, may be null. + * @param confidence Confidence level for this association of Persona & + * account. + * + * @return PersonaAccount + * @throws CentralRepoException If there is an error creating the Persona. + */ + public static PersonaAccount createPersonaForAccount(String personaName, String comment, PersonaStatus status, CentralRepoAccount account, String justification, Persona.Confidence confidence) throws CentralRepoException { + Persona persona = createPersona(personaName, comment, status); + return persona.addAccountToPersona(account, justification, confidence); + } + + /** + * Inserts a row in the Persona tables. + * + * @param name Persona name, may be null - default name is used in that + * case. + * @param comment Comment to associate with persona, may be null. + * @param status Persona status. + * + * @return Persona corresponding to the row inserted in the personas table. + * + * @throws CentralRepoException If there is an error in adding a row to + * personas table. + */ + private static Persona createPersona(String name, String comment, PersonaStatus status) throws CentralRepoException { + // generate a UUID for the persona + String uuidStr = UUID.randomUUID().toString(); + CentralRepoExaminer examiner = CentralRepository.getInstance().getOrInsertExaminer(System.getProperty("user.name")); + + Instant instant = Instant.now(); + Long timeStampMillis = instant.toEpochMilli(); + String insertClause = " INTO personas (uuid, comment, name, created_date, modified_date, status_id, examiner_id ) " + + "VALUES ( '" + uuidStr + "', " + + "'" + ((StringUtils.isBlank(comment) ? "" : SleuthkitCase.escapeSingleQuotes(comment))) + "'," + + "'" + ((StringUtils.isBlank(name) ? DEFAULT_PERSONA_NAME : SleuthkitCase.escapeSingleQuotes(name))) + "'," + + timeStampMillis.toString() + "," + + timeStampMillis.toString() + "," + + status.getStatusId() + "," + + examiner.getId() + + ")"; + + CentralRepository.getInstance().executeInsertSQL(insertClause); + return getPersonaByUUID(uuidStr); + } + + /** + * Associates an account with a persona by creating a row in the + * PersonaAccounts table. + * + * @param persona Persona to add the account to. + * @param account Account to add to persona. + * @param justification Reason for adding the account to persona, may be + * null. + * @param confidence Confidence level. + * + * @return PersonaAccount + * @throws CentralRepoException If there is an error. + */ + public PersonaAccount addAccountToPersona(CentralRepoAccount account, String justification, Persona.Confidence confidence) throws CentralRepoException { + + CentralRepoExaminer currentExaminer = CentralRepository.getInstance().getOrInsertExaminer(System.getProperty("user.name")); + + Instant instant = Instant.now(); + Long timeStampMillis = instant.toEpochMilli(); + String insertClause = " INTO persona_accounts (persona_id, account_id, justification, confidence_id, date_added, examiner_id ) " + + "VALUES ( " + + this.getId() + ", " + + account.getAccountId() + ", " + + "'" + ((StringUtils.isBlank(justification) ? "" : SleuthkitCase.escapeSingleQuotes(justification))) + "', " + + confidence.getLevelId() + ", " + + timeStampMillis.toString() + ", " + + currentExaminer.getId() + + ")"; + + CentralRepository.getInstance().executeInsertSQL(insertClause); + return new PersonaAccount(this, account, justification, confidence, timeStampMillis, currentExaminer); + } + + /** + * Callback to process a Persona query from the persona table. + */ + private static class PersonaQueryCallback implements CentralRepositoryDbQueryCallback { + + private final Collection personaList = new ArrayList<>(); + + @Override + public void process(ResultSet rs) throws SQLException { + + while (rs.next()) { + CentralRepoExaminer examiner = new CentralRepoExaminer( + rs.getInt("examiner_id"), + rs.getString("login_name")); + + PersonaStatus status = PersonaStatus.fromId(rs.getInt("status_id")); + Persona persona = new Persona( + rs.getInt("id"), + rs.getString("uuid"), + rs.getString("name"), + rs.getString("comment"), + Long.parseLong(rs.getString("created_date")), + Long.parseLong(rs.getString("modified_date")), + status, + examiner + ); + + personaList.add(persona); + } + } + + Collection getPersonas() { + return Collections.unmodifiableCollection(personaList); + } + }; + + // Partial query string to select from personas table, + // just supply the where clause. + private static final String PERSONA_QUERY = + "SELECT p.id, p.uuid, p.name, p.comment, p.created_date, p.modified_date, p.status_id, p.examiner_id, e.login_name, e.display_name " + + "FROM personas as p " + + "INNER JOIN examiners as e ON e.id = p.examiner_id "; + + + /** + * Gets the row from the Personas table with the given UUID, creates and + * returns the Persona from that data. + * + * @param uuid Persona UUID to match. + * @return Persona matching the given UUID, may be null if no match is + * found. + * + * @throws CentralRepoException If there is an error in querying the + * Personas table. + */ + private static Persona getPersonaByUUID(String uuid) throws CentralRepoException { + + String queryClause = + PERSONA_QUERY + + "WHERE p.uuid = '" + uuid + "'"; + + PersonaQueryCallback queryCallback = new PersonaQueryCallback(); + CentralRepository.getInstance().executeSelectSQL(queryClause, queryCallback); + + Collection personas = queryCallback.getPersonas(); + + return personas.isEmpty() ? null : personas.iterator().next(); + } + + /** + * Gets the rows from the Personas table with matching name. + * + * @param partialName Name substring to match. + * @return Collection of personas matching the given name substring, may be + * empty if no match is found. + * + * @throws CentralRepoException If there is an error in querying the + * Personas table. + */ + public static Collection getPersonaByName(String partialName) throws CentralRepoException { + + String queryClause = PERSONA_QUERY + + "WHERE LOWER(p.name) LIKE " + "LOWER('%" + partialName + "%')" ; + + PersonaQueryCallback queryCallback = new PersonaQueryCallback(); + CentralRepository.getInstance().executeSelectSQL(queryClause, queryCallback); + + return queryCallback.getPersonas(); + } + + /** + * Creates an alias for the Persona. + * + * @param alias Alias name. + * @param justification Reason for assigning the alias, may be null. + * @param confidence Confidence level. + * + * @return PersonaAlias + * @throws CentralRepoException If there is an error in creating the alias. + */ + public PersonaAlias addAlias(String alias, String justification, Persona.Confidence confidence) throws CentralRepoException { + return PersonaAlias.addPersonaAlias(this, alias, justification, confidence); + } + + /** + * Gets all aliases for the persona. + * + * @return A collection of aliases, may be empty. + * + * @throws CentralRepoException If there is an error in retrieving aliases. + */ + public Collection getAliases() throws CentralRepoException { + return PersonaAlias.getPersonaAliases(this.getId()); + } + + /** + * Adds specified metadata to the persona. + * + * @param name Metadata name. + * @param value Metadata value. + * @param justification Reason for adding the metadata, may be null. + * @param confidence Confidence level. + * + * @return PersonaMetadata + * @throws CentralRepoException If there is an error in adding metadata. + */ + public PersonaMetadata addMetadata(String name, String value, String justification, Persona.Confidence confidence) throws CentralRepoException { + return PersonaMetadata.addPersonaMetadata(this.getId(), name, value, justification, confidence); + } + + /** + * Gets all metadata for the persona. + * + * @return A collection of metadata, may be empty. + * + * @throws CentralRepoException If there is an error in retrieving aliases. + */ + public Collection getMetadata() throws CentralRepoException { + return PersonaMetadata.getPersonaMetadata(this.getId()); + } + + /** + * Gets all the Accounts for the Persona. + * + * @return Collection of PersonaAccounts, may be empty. + * + * @throws CentralRepoException If there is an error in getting the + * persona_account. + */ + public Collection getPersonaAccounts() throws CentralRepoException { + return PersonaAccount.getPersonaAccountsForPersona(this.getId()); + } + + /** + * Callback to process a query that gets cases for account instances of an + * account + */ + private static class CaseForAccountInstanceQueryCallback implements CentralRepositoryDbQueryCallback { + + Collection correlationCases = new ArrayList<>(); + + @Override + public void process(ResultSet resultSet) throws CentralRepoException, SQLException { + + while (resultSet.next()) { + // get Case for case_id + CorrelationCase correlationCase = CentralRepository.getInstance().getCaseById(resultSet.getInt("case_id")); + correlationCases.add(correlationCase); + } + } + + Collection getCases() { + return Collections.unmodifiableCollection(correlationCases); + } + }; + + /** + * Gets a list of cases that the persona appears in. + * + * @return Collection of cases that the persona appears in, may be empty. + * @throws CentralRepoException If there is an error in getting the cases + * from the database. + */ + public Collection getCases() throws CentralRepoException { + + Collection casesForPersona = new ArrayList<>(); + + // get all accounts for this persona + Collection accounts = PersonaAccount.getAccountsForPersona(this.getId()); + for (CentralRepoAccount account : accounts) { + int corrTypeId = account.getAccountType().getCorrelationTypeId(); + CorrelationAttributeInstance.Type correlationType = CentralRepository.getInstance().getCorrelationTypeById(corrTypeId); + + String tableName = CentralRepoDbUtil.correlationTypeToInstanceTableName(correlationType); + String querySql = "SELECT DISTINCT case_id FROM " + tableName + + " WHERE account_id = " + account.getAccountId(); + + CaseForAccountInstanceQueryCallback queryCallback = new CaseForAccountInstanceQueryCallback(); + CentralRepository.getInstance().executeSelectSQL(querySql, queryCallback); + + // Add any cases that aren't already on the list. + for (CorrelationCase corrCase : queryCallback.getCases()) { + if (!casesForPersona.stream().anyMatch(p -> p.getCaseUUID().equalsIgnoreCase(corrCase.getCaseUUID()))) { + casesForPersona.add(corrCase); + } + } + } + + return casesForPersona; + } + + /** + * Callback to process a query that gets data source for account instances + * of an account + */ + private static class DatasourceForAccountInstanceQueryCallback implements CentralRepositoryDbQueryCallback { + + Collection correlationDataSources = new ArrayList<>(); + + @Override + public void process(ResultSet resultSet) throws CentralRepoException, SQLException { + + while (resultSet.next()) { + // get Case for case_id + + CorrelationCase correlationCase = CentralRepository.getInstance().getCaseById(resultSet.getInt("case_id")); + CorrelationDataSource correlationDatasource = CentralRepository.getInstance().getDataSourceById(correlationCase, resultSet.getInt("data_source_id")); + + // Add data source to list if not already on it. + if (!correlationDataSources.stream().anyMatch(p -> Objects.equals(p.getDataSourceObjectID(), correlationDatasource.getDataSourceObjectID()))) { + correlationDataSources.add(correlationDatasource); + } + } + } + + Collection getDataSources() { + return Collections.unmodifiableCollection(correlationDataSources); + } + }; + + /** + * Gets all data sources that the persona appears in. + * + * @return Collection of data sources that the persona appears in, may be + * empty. + * + * @throws CentralRepoException + */ + public Collection getDataSources() throws CentralRepoException { + Collection correlationDataSources = new ArrayList<>(); + + Collection accounts = PersonaAccount.getAccountsForPersona(this.getId()); + for (CentralRepoAccount account : accounts) { + int corrTypeId = account.getAccountType().getCorrelationTypeId(); + CorrelationAttributeInstance.Type correlationType = CentralRepository.getInstance().getCorrelationTypeById(corrTypeId); + + String tableName = CentralRepoDbUtil.correlationTypeToInstanceTableName(correlationType); + String querySql = "SELECT case_id, data_source_id FROM " + tableName + + " WHERE account_id = " + account.getAccountId(); + + DatasourceForAccountInstanceQueryCallback queryCallback = new DatasourceForAccountInstanceQueryCallback(); + CentralRepository.getInstance().executeSelectSQL(querySql, queryCallback); + + // Add any data sources that aren't already on the list. + for (CorrelationDataSource correlationDatasource : queryCallback.getDataSources()) { + if (!correlationDataSources.stream().anyMatch(p -> Objects.equals(p.getDataSourceObjectID(), correlationDatasource.getDataSourceObjectID()))) { + correlationDataSources.add(correlationDatasource); + } + } + } + + return correlationDataSources; + } + + /** + * Callback to process a query that gets Personas for a case/datasource. + */ + private static class PersonaFromAccountInstanceQueryCallback implements CentralRepositoryDbQueryCallback { + + Collection personasList = new ArrayList<>(); + + @Override + public void process(ResultSet resultSet) throws CentralRepoException, SQLException { + + while (resultSet.next()) { + + // examiner that created the persona + CentralRepoExaminer personaExaminer = new CentralRepoExaminer( + resultSet.getInt("persona_examiner_id"), + resultSet.getString("persona_examiner_login_name")); + + // create persona + PersonaStatus status = PersonaStatus.fromId(resultSet.getInt("status_id")); + Persona persona = new Persona( + resultSet.getInt("persona_id"), + resultSet.getString("uuid"), + resultSet.getString("name"), + resultSet.getString("comment"), + Long.parseLong(resultSet.getString("created_date")), + Long.parseLong(resultSet.getString("modified_date")), + status, + personaExaminer + ); + + personasList.add(persona); + } + } + + Collection getPersonasList() { + return Collections.unmodifiableCollection(personasList); + } + }; + + /** + * Returns a query string for selecting personas for a case/datasource from + * the X_instance table for the given account type. + * + * @param crAccountType Account type to generate the query string for. + * @return Query substring. + * @throws CentralRepoException + */ + private static String getPersonaFromInstanceTableQueryTemplate(CentralRepoAccount.CentralRepoAccountType crAccountType) throws CentralRepoException { + + int corrTypeId = crAccountType.getCorrelationTypeId(); + CorrelationAttributeInstance.Type correlationType = CentralRepository.getInstance().getCorrelationTypeById(corrTypeId); + + String instanceTableName = CentralRepoDbUtil.correlationTypeToInstanceTableName(correlationType); + return "SELECT " + instanceTableName + ".account_id, case_id, data_source_id, " + + " personas.id as persona_id, personas.uuid, personas.name, personas.comment, personas.created_date, personas.modified_date, personas.status_id, " + + " personas.examiner_id as persona_examiner_id, persona_examiner.login_name as persona_examiner_login_name, persona_examiner.display_name as persona_examiner_display_name " + + " FROM " + instanceTableName + + " JOIN persona_accounts as pa on pa.account_id = " + instanceTableName + ".account_id" + + " JOIN personas as personas on personas.id = pa.persona_id" + + " JOIN examiners as persona_examiner ON persona_examiner.id = personas.examiner_id "; + + } + + /** + * Get all the persona for a given case. + * + * @param correlationCase Case to look the persona in. + * + * @return Collection of personas, may be empty. + * @throws CentralRepoException + */ + public static Collection getPersonasForCase(CorrelationCase correlationCase) throws CentralRepoException { + Collection personaList = new ArrayList<>(); + + Collection accountTypes = CentralRepository.getInstance().getAllAccountTypes(); + for (CentralRepoAccount.CentralRepoAccountType crAccountType : accountTypes) { + + String querySql = getPersonaFromInstanceTableQueryTemplate(crAccountType) + + " WHERE case_id = " + correlationCase.getID(); + + PersonaFromAccountInstanceQueryCallback queryCallback = new PersonaFromAccountInstanceQueryCallback(); + CentralRepository.getInstance().executeSelectSQL(querySql, queryCallback); + + // Add persona that aren't already on the list. + for (Persona persona : queryCallback.getPersonasList()) { + if (!personaList.stream().anyMatch(p -> Objects.equals(p.getUuidStr(), persona.getUuidStr()))) { + personaList.add(persona); + } + } + + } + return personaList; + } + + /** + * Get all the persona for a given data source. + * + * @param CorrelationDataSource Data source to look the persona in. + * + * @return Collection of personas, may be empty. + * @throws CentralRepoException + */ + public static Collection getPersonasForDataSource(CorrelationDataSource dataSource) throws CentralRepoException { + Collection personaList = new ArrayList<>(); + + Collection accountTypes = CentralRepository.getInstance().getAllAccountTypes(); + for (CentralRepoAccount.CentralRepoAccountType crAccountType : accountTypes) { + + String querySql = getPersonaFromInstanceTableQueryTemplate(crAccountType) + + " WHERE data_source_id = " + dataSource.getID(); + + PersonaFromAccountInstanceQueryCallback queryCallback = new PersonaFromAccountInstanceQueryCallback(); + CentralRepository.getInstance().executeSelectSQL(querySql, queryCallback); + + // Add persona that aren't already on the list. + for (Persona persona : queryCallback.getPersonasList()) { + if (!personaList.stream().anyMatch(p -> Objects.equals(p.getUuidStr(), persona.getUuidStr()))) { + personaList.add(persona); + } + } + + } + return personaList; + } } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PersonaAccount.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PersonaAccount.java new file mode 100644 index 0000000000..3b74b82aea --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PersonaAccount.java @@ -0,0 +1,258 @@ +/* + * Central Repository + * + * Copyright 2020 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.centralrepository.datamodel; + +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; + +/** + * This class represents an association between a Persona and an Account. + * + * A Persona has at least one, possibly more, accounts associated with it. + * + * + */ +public class PersonaAccount { + + private final Persona persona; + private final CentralRepoAccount account; + private final String justification; + private final Persona.Confidence confidence; + private final long dateAdded; + private final CentralRepoExaminer examiner; + + public PersonaAccount(Persona persona, CentralRepoAccount account, String justification, Persona.Confidence confidence, long dateAdded, CentralRepoExaminer examiner) { + this.persona = persona; + this.account = account; + this.justification = justification; + this.confidence = confidence; + this.dateAdded = dateAdded; + this.examiner = examiner; + } + + public Persona getPersona() { + return persona; + } + + public CentralRepoAccount getAccount() { + return account; + } + + public String getJustification() { + return justification; + } + + public Persona.Confidence getConfidence() { + return confidence; + } + + public long getDateAdded() { + return dateAdded; + } + + public CentralRepoExaminer getExaminer() { + return examiner; + } + + /** + * Callback to process a Persona Accounts query. + */ + private static class PersonaAccountsQueryCallback implements CentralRepositoryDbQueryCallback { + + Collection personaAccountsList = new ArrayList<>(); + + @Override + public void process(ResultSet rs) throws CentralRepoException, SQLException { + + while (rs.next()) { + // examiner that created the persona/account association + CentralRepoExaminer paExaminer = new CentralRepoExaminer( + rs.getInt("pa_examiner_id"), + rs.getString("pa_examiner_login_name")); + + // examiner that created the persona + CentralRepoExaminer personaExaminer = new CentralRepoExaminer( + rs.getInt("persona_examiner_id"), + rs.getString("persona_examiner_login_name")); + + // create persona + Persona.PersonaStatus status = Persona.PersonaStatus.fromId(rs.getInt("status_id")); + Persona persona = new Persona( + rs.getInt("persona_id"), + rs.getString("uuid"), + rs.getString("name"), + rs.getString("comment"), + Long.parseLong(rs.getString("created_date")), + Long.parseLong(rs.getString("modified_date")), + status, + personaExaminer + ); + + // create account + CentralRepoAccount.CentralRepoAccountType crAccountType = CentralRepository.getInstance().getAccountTypeByName(rs.getString("type_name")); + CentralRepoAccount account = new CentralRepoAccount( + rs.getInt("account_id"), + crAccountType, + rs.getString("account_unique_identifier")); + + // create persona account + PersonaAccount personaAccount = new PersonaAccount(persona, account, + rs.getString("justification"), + Persona.Confidence.fromId(rs.getInt("confidence_id")), + Long.parseLong(rs.getString("date_added")), + paExaminer); + + personaAccountsList.add(personaAccount); + } + } + + Collection getPersonaAccountsList() { + return Collections.unmodifiableCollection(personaAccountsList); + } + }; + + // Query clause to select from persona_accounts table to create PersonaAccount(s) + private static final String PERSONA_ACCOUNTS_QUERY_CALUSE = "SELECT justification, confidence_id, date_added, persona_accounts.examiner_id as pa_examiner_id, pa_examiner.login_name as pa_examiner_login_name, pa_examiner.display_name as pa_examiner_display_name," + + " personas.id as persona_id, personas.uuid, personas.name, personas.comment, personas.created_date, personas.modified_date, personas.status_id, " + + " personas.examiner_id as persona_examiner_id, persona_examiner.login_name as persona_examiner_login_name, persona_examiner.display_name as persona_examiner_display_name, " + + " accounts.id as account_id, account_type_id, account_unique_identifier," + + " account_types.type_name as type_name " + + " FROM persona_accounts as persona_accounts " + + " JOIN personas as personas on persona_accounts.persona_id = personas.id " + + " JOIN accounts as accounts on persona_accounts.account_id = accounts.id " + + " JOIN account_types as account_types on accounts.account_type_id = account_types.id " + + " JOIN examiners as pa_examiner ON pa_examiner.id = persona_accounts.examiner_id " + + " JOIN examiners as persona_examiner ON persona_examiner.id = personas.examiner_id "; + + + /** + * Gets all the Accounts for the specified Persona. + * + * @param personaId Id of persona for which to get the accounts for. + * @return Collection of PersonaAccounts, may be empty. + * + * @throws CentralRepoException If there is an error in getting the + * persona_account. + */ + static Collection getPersonaAccountsForPersona(long personaId) throws CentralRepoException { + String queryClause = PERSONA_ACCOUNTS_QUERY_CALUSE + + " WHERE persona_accounts.persona_id = " + personaId; + + PersonaAccountsQueryCallback queryCallback = new PersonaAccountsQueryCallback(); + CentralRepository.getInstance().executeSelectSQL(queryClause, queryCallback); + + return queryCallback.getPersonaAccountsList(); + } + + /** + * Gets all the Persona for the specified Account. + * + * @param accountId Id of account for which to get the Personas for. + * @return Collection of PersonaAccounts. may be empty. + * + * @throws CentralRepoException If there is an error in getting the + * persona_account. + */ + public static Collection getPersonaAccountsForAccount(long accountId) throws CentralRepoException { + String queryClause = PERSONA_ACCOUNTS_QUERY_CALUSE + + " WHERE persona_accounts.account_id = " + accountId; + + PersonaAccountsQueryCallback queryCallback = new PersonaAccountsQueryCallback(); + CentralRepository.getInstance().executeSelectSQL(queryClause, queryCallback); + + return queryCallback.getPersonaAccountsList(); + } + + /** + * Gets all the Persona associated with all the accounts matching the given + * account identifier substring. + * + * @param accountIdentifierSubstring Account identifier substring to search + * for. + * @return Collection of PersonaAccounts. may be empty. + * + * @throws CentralRepoException If there is an error in getting the + * persona_account. + */ + public static Collection getPersonaAccountsForAccountIdentifier(String accountIdentifierSubstring) throws CentralRepoException { + String queryClause = PERSONA_ACCOUNTS_QUERY_CALUSE + + " WHERE LOWER(accounts.account_unique_identifier) LIKE LOWER('%" + accountIdentifierSubstring + "%')"; + + PersonaAccountsQueryCallback queryCallback = new PersonaAccountsQueryCallback(); + CentralRepository.getInstance().executeSelectSQL(queryClause, queryCallback); + + return queryCallback.getPersonaAccountsList(); + } + + /** + * Callback to process a query that gets all accounts belonging to a + * persona. + */ + private static class AccountsForPersonaQueryCallback implements CentralRepositoryDbQueryCallback { + + Collection accountsList = new ArrayList<>(); + + @Override + public void process(ResultSet rs) throws CentralRepoException, SQLException { + + while (rs.next()) { + + // create account + CentralRepoAccount.CentralRepoAccountType crAccountType = CentralRepository.getInstance().getAccountTypeByName(rs.getString("type_name")); + CentralRepoAccount account = new CentralRepoAccount( + rs.getInt("account_id"), + crAccountType, + rs.getString("account_unique_identifier")); + + accountsList.add(account); + } + } + + Collection getAccountsList() { + return Collections.unmodifiableCollection(accountsList); + } + }; + + /** + * Get all accounts associated with a persona. + * + * @param personaId Id of the persona to look for. + * + * @return Collection of all accounts associated with the given persona, may + * be empty. + * @throws CentralRepoException If there is an error in getting the accounts. + */ + static Collection getAccountsForPersona(long personaId) throws CentralRepoException { + String queryClause = "SELECT account_id, " + + " accounts.account_type_id as account_type_id, accounts.account_unique_identifier as account_unique_identifier," + + " account_types.type_name as type_name " + + " FROM persona_accounts " + + " JOIN accounts as accounts on persona_accounts.account_id = accounts.id " + + " JOIN account_types as account_types on accounts.account_type_id = account_types.id " + + " WHERE persona_accounts.persona_id = " + personaId; + + AccountsForPersonaQueryCallback queryCallback = new AccountsForPersonaQueryCallback(); + CentralRepository.getInstance().executeSelectSQL(queryClause, queryCallback); + + return queryCallback.getAccountsList(); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PersonaAlias.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PersonaAlias.java new file mode 100644 index 0000000000..cdebd7097d --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PersonaAlias.java @@ -0,0 +1,160 @@ +/* + * Central Repository + * + * Copyright 2020 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.centralrepository.datamodel; + +import java.sql.ResultSet; +import java.sql.SQLException; +import java.time.Instant; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import org.apache.commons.lang3.StringUtils; +import org.sleuthkit.datamodel.SleuthkitCase; + +/** + * This class abstracts an alias assigned to a Persona. + * A Persona may have multiple aliases. + * + */ +public class PersonaAlias { + + private final long personaId; + private final String alias; + private final String justification; + private final Persona.Confidence confidence; + private final long dateAdded; + private final CentralRepoExaminer examiner; + + public long getPersonaId() { + return personaId; + } + + public String getAlias() { + return alias; + } + + public String getJustification() { + return justification; + } + + public Persona.Confidence getConfidence() { + return confidence; + } + + public long getDateAadded() { + return dateAdded; + } + + public CentralRepoExaminer getExaminer() { + return examiner; + } + + public PersonaAlias(long personaId, String alias, String justification, Persona.Confidence confidence, long dateAdded, CentralRepoExaminer examiner) { + this.personaId = personaId; + this.alias = alias; + this.justification = justification; + this.confidence = confidence; + this.dateAdded = dateAdded; + this.examiner = examiner; + } + + /** + * Creates an alias for the specified Persona. + * + * @param persona Persona for which the alias is being added. + * @param alias Alias name. + * @param justification Reason for assigning the alias, may be null. + * @param confidence Confidence level. + * + * @return PersonaAlias + * @throws CentralRepoException If there is an error in creating the alias. + */ + static PersonaAlias addPersonaAlias(Persona persona, String alias, String justification, Persona.Confidence confidence) throws CentralRepoException { + + CentralRepoExaminer examiner = CentralRepository.getInstance().getOrInsertExaminer(System.getProperty("user.name")); + + Instant instant = Instant.now(); + Long timeStampMillis = instant.toEpochMilli(); + + String insertClause = " INTO persona_alias (persona_id, alias, justification, confidence_id, date_added, examiner_id ) " + + "VALUES ( " + + persona.getId() + ", " + + "'" + alias + "', " + + "'" + ((StringUtils.isBlank(justification) ? "" : SleuthkitCase.escapeSingleQuotes(justification))) + "', " + + confidence.getLevelId() + ", " + + timeStampMillis.toString() + ", " + + examiner.getId() + + ")"; + + CentralRepository.getInstance().executeInsertSQL(insertClause); + return new PersonaAlias(persona.getId(), alias, justification, confidence, timeStampMillis, examiner); + } + + /** + * Callback to process a Persona aliases query. + */ + static class PersonaAliasesQueryCallback implements CentralRepositoryDbQueryCallback { + + private final Collection personaAliases = new ArrayList<>(); + + @Override + public void process(ResultSet rs) throws SQLException { + + while (rs.next()) { + CentralRepoExaminer examiner = new CentralRepoExaminer( + rs.getInt("examiner_id"), + rs.getString("login_name")); + + PersonaAlias alias = new PersonaAlias( + rs.getLong("persona_id"), + rs.getString("alias"), + rs.getString("justification"), + Persona.Confidence.fromId(rs.getInt("confidence_id")), + Long.parseLong(rs.getString("date_added")), + examiner); + + personaAliases.add(alias); + } + } + + Collection getAliases() { + return Collections.unmodifiableCollection(personaAliases); + } + }; + + /** + * Gets all aliases for the persona with specified id. + * + * @param personaId Id of the persona for which to get the aliases. + * @return A collection of aliases, may be empty. + * + * @throws CentralRepoException If there is an error in retrieving aliases. + */ + public static Collection getPersonaAliases(long personaId) throws CentralRepoException { + String queryClause = "SELECT pa.id, pa.persona_id, pa.alias, pa.justification, pa.confidence_id, pa.date_added, pa.examiner_id, e.login_name, e.display_name " + + "FROM persona_alias as pa " + + "INNER JOIN examiners as e ON e.id = pa.examiner_id "; + + PersonaAliasesQueryCallback queryCallback = new PersonaAliasesQueryCallback(); + CentralRepository.getInstance().executeSelectSQL(queryClause, queryCallback); + + return queryCallback.getAliases(); + } + +} diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PersonaMetadata.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PersonaMetadata.java new file mode 100644 index 0000000000..713f00a981 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PersonaMetadata.java @@ -0,0 +1,172 @@ +/* + * Central Repository + * + * Copyright 2020 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.centralrepository.datamodel; + +import java.sql.ResultSet; +import java.sql.SQLException; +import java.time.Instant; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import org.apache.commons.lang3.StringUtils; +import org.sleuthkit.datamodel.SleuthkitCase; + +/** + * This class abstracts metadata associated with a Persona. + * Metadata is in the form of a name/value pair. + * + * A Persona may have zero or more metadata. + * + */ +public class PersonaMetadata { + + private final long personaId; + private final String name; + private final String value; + private final String justification; + private final Persona.Confidence confidence; + private final long dateAdded; + private final CentralRepoExaminer examiner; + + public long getPersonaId() { + return personaId; + } + + public String getName() { + return name; + } + + public String getValue() { + return value; + } + + public String getJustification() { + return justification; + } + + public Persona.Confidence getConfidence() { + return confidence; + } + + public long getDateAdded() { + return dateAdded; + } + + public CentralRepoExaminer getExaminer() { + return examiner; + } + + public PersonaMetadata(long personaId, String name, String value, String justification, Persona.Confidence confidence, long dateAdded, CentralRepoExaminer examiner) { + this.personaId = personaId; + this.name = name; + this.value = value; + this.justification = justification; + this.confidence = confidence; + this.dateAdded = dateAdded; + this.examiner = examiner; + } + + /** + * Adds specified metadata to the given persona. + * + * @param personaId Id of persona to add metadata for. + * @param name Metadata name. + * @param value Metadata value. + * @param justification Reason for adding the metadata, may be null. + * @param confidence Confidence level. + * + * @return PersonaMetadata + * @throws CentralRepoException If there is an error in adding metadata. + */ + static PersonaMetadata addPersonaMetadata(long personaId, String name, String value, String justification, Persona.Confidence confidence) throws CentralRepoException { + + CentralRepoExaminer examiner = CentralRepository.getInstance().getOrInsertExaminer(System.getProperty("user.name")); + + Instant instant = Instant.now(); + Long timeStampMillis = instant.toEpochMilli(); + + String insertClause = " INTO persona_metadata (persona_id, name, value, justification, confidence_id, date_added, examiner_id ) " + + "VALUES ( " + + personaId + ", " + + "'" + name + "', " + + "'" + value + "', " + + "'" + ((StringUtils.isBlank(justification) ? "" : SleuthkitCase.escapeSingleQuotes(justification))) + "', " + + confidence.getLevelId() + ", " + + timeStampMillis.toString() + ", " + + examiner.getId() + + ")"; + + CentralRepository.getInstance().executeInsertSQL(insertClause); + return new PersonaMetadata(personaId, name, value, justification, confidence, timeStampMillis, examiner); + } + + /** + * Callback to process a Persona metadata query. + */ + private static class PersonaMetadataQueryCallback implements CentralRepositoryDbQueryCallback { + + Collection personaMetadataList = new ArrayList<>(); + + @Override + public void process(ResultSet rs) throws SQLException { + + while (rs.next()) { + CentralRepoExaminer examiner = new CentralRepoExaminer( + rs.getInt("examiner_id"), + rs.getString("login_name")); + + PersonaMetadata metaData = new PersonaMetadata( + rs.getLong("persona_id"), + rs.getString("name"), + rs.getString("value"), + rs.getString("justification"), + Persona.Confidence.fromId(rs.getInt("confidence_id")), + Long.parseLong(rs.getString("date_added")), + examiner); + + personaMetadataList.add(metaData); + } + } + + Collection getMetadataList() { + return Collections.unmodifiableCollection(personaMetadataList); + } + }; + + /** + * Gets all metadata for the persona with specified id. + * + * @param personaId Id of the persona for which to get the metadata. + * @return A collection of metadata, may be empty. + * + * @throws CentralRepoException If there is an error in retrieving aliases. + */ + static Collection getPersonaMetadata(long personaId) throws CentralRepoException { + String queryClause = "SELECT pmd.id, pmd.persona_id, pmd.name, pmd.value, pmd.justification, pmd.confidence_id, pmd.date_added, pmd.examiner_id, e.login_name, e.display_name " + + "FROM persona_metadata as pmd " + + "INNER JOIN examiners as e ON e.id = pmd.examiner_id "; + + PersonaMetadataQueryCallback queryCallback = new PersonaMetadataQueryCallback(); + CentralRepository.getInstance().executeSelectSQL(queryClause, queryCallback); + + return queryCallback.getMetadataList(); + + } + +} diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/RdbmsCentralRepo.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/RdbmsCentralRepo.java index 24d02ae2c2..2afda97c05 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/RdbmsCentralRepo.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/RdbmsCentralRepo.java @@ -51,6 +51,8 @@ import org.sleuthkit.autopsy.healthmonitor.HealthMonitor; import org.sleuthkit.autopsy.healthmonitor.TimingMetric; import org.sleuthkit.datamodel.Account; import org.sleuthkit.datamodel.CaseDbSchemaVersionNumber; +import org.sleuthkit.datamodel.HashHitInfo; +import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskData; /** @@ -101,6 +103,9 @@ abstract class RdbmsCentralRepo implements CentralRepository { // Update Test code if this changes. It's hard coded there. static final int DEFAULT_BULK_THRESHHOLD = 1000; + private static final int QUERY_STR_MAX_LEN = 1000; + + /** * Connect to the DB and initialize it. * @@ -216,7 +221,7 @@ abstract class RdbmsCentralRepo implements CentralRepository { /** * Reset the contents of the caches associated with EamDb results. */ - protected final void clearCaches() { + public final void clearCaches() { synchronized(typeCache) { typeCache.invalidateAll(); isCRTypeCacheInitialized = false; @@ -225,6 +230,7 @@ abstract class RdbmsCentralRepo implements CentralRepository { caseCacheById.invalidateAll(); dataSourceCacheByDsObjectId.invalidateAll(); dataSourceCacheById.invalidateAll(); + accountsCache.invalidateAll(); } /** @@ -1111,6 +1117,31 @@ abstract class RdbmsCentralRepo implements CentralRepository { } + + @Override + public Collection getAllAccountTypes() throws CentralRepoException { + + Collection accountTypes = new ArrayList<>(); + + String sql = "SELECT * FROM account_types"; + try ( Connection conn = connect(); + PreparedStatement preparedStatement = conn.prepareStatement(sql);) { + + + try (ResultSet resultSet = preparedStatement.executeQuery();) { + while (resultSet.next()) { + Account.Type acctType = new Account.Type(resultSet.getString("type_name"), resultSet.getString("display_name")); + CentralRepoAccountType crAccountType = new CentralRepoAccountType(resultSet.getInt("id"), acctType, resultSet.getInt("correlation_type_id")); + + accountTypes.add(crAccountType); + } + } + } catch (SQLException ex) { + throw new CentralRepoException("Error getting account types from central repository.", ex); // NON-NLS + } + return accountTypes; + } + /** * Gets the CR account type for the specified type name. * @@ -2277,6 +2308,45 @@ abstract class RdbmsCentralRepo implements CentralRepository { return isValueInReferenceSet(hash, referenceSetID, CorrelationAttributeInstance.FILES_TYPE_ID); } + @Override + public HashHitInfo lookupHash(String hash, int referenceSetID) throws CentralRepoException, CorrelationAttributeNormalizationException { + int correlationTypeID = CorrelationAttributeInstance.FILES_TYPE_ID; + String normalizeValued = CorrelationAttributeNormalizer.normalize(this.getCorrelationTypeById(correlationTypeID), hash); + + Connection conn = connect(); + + PreparedStatement preparedStatement = null; + ResultSet resultSet = null; + String sql = "SELECT value,comment FROM %s WHERE value=? AND reference_set_id=?"; + + String fileTableName = CentralRepoDbUtil.correlationTypeToReferenceTableName(getCorrelationTypeById(correlationTypeID)); + + try { + preparedStatement = conn.prepareStatement(String.format(sql, fileTableName)); + preparedStatement.setString(1, normalizeValued); + preparedStatement.setInt(2, referenceSetID); + resultSet = preparedStatement.executeQuery(); + if (resultSet.next()) { + String comment = resultSet.getString("comment"); + String hashFound = resultSet.getString("value"); + HashHitInfo found = new HashHitInfo(hashFound, "", ""); + found.addComment(comment); + return found; + } + else { + return null; + } + } catch (SQLException ex) { + throw new CentralRepoException("Error determining if value (" + normalizeValued + ") is in reference set " + referenceSetID, ex); // NON-NLS + } finally { + CentralRepoDbUtil.closeStatement(preparedStatement); + CentralRepoDbUtil.closeResultSet(resultSet); + CentralRepoDbUtil.closeConnection(conn); + } + } + + + /** * Check if the given value is in a specific reference set * @@ -2486,6 +2556,48 @@ abstract class RdbmsCentralRepo implements CentralRepository { } } + @Override + public void executeInsertSQL(String insertClause) throws CentralRepoException { + + if (insertClause == null) { + throw new CentralRepoException("Insert SQL is null"); + } + + String sql = getPlatformSpecificInsertSQL(insertClause); + try (Connection conn = connect(); + PreparedStatement preparedStatement = conn.prepareStatement(sql);) { + preparedStatement.executeUpdate(); + } catch (SQLException ex) { + throw new CentralRepoException(String.format("Error running SQL %s, exception = %s", sql, ex.getMessage()), ex); + } + } + + @Override + public void executeSelectSQL(String selectSQL, CentralRepositoryDbQueryCallback queryCallback) throws CentralRepoException { + if (queryCallback == null) { + throw new CentralRepoException("Query callback is null"); + } + + if (selectSQL == null) { + throw new CentralRepoException("Select SQL is null"); + } + + StringBuilder sqlSb = new StringBuilder(QUERY_STR_MAX_LEN); + if (selectSQL.trim().toUpperCase().startsWith("SELECT") == false) { + sqlSb.append("SELECT "); + } + + sqlSb.append(selectSQL); + + try (Connection conn = connect(); + PreparedStatement preparedStatement = conn.prepareStatement(sqlSb.toString()); + ResultSet resultSet = preparedStatement.executeQuery();) { + queryCallback.process(resultSet); + } catch (SQLException ex) { + throw new CentralRepoException(String.format("Error running SQL %s, exception = %s", selectSQL, ex.getMessage()), ex); + } + } + @Override public CentralRepoOrganization newOrganization(CentralRepoOrganization eamOrg) throws CentralRepoException { if (eamOrg == null) { @@ -2624,6 +2736,61 @@ abstract class RdbmsCentralRepo implements CentralRepository { } } + /** + * Queries the examiner table for the given user name. + * Adds a row if the user is not found in the examiner table. + * + * @param examinerLoginName user name to look for. + * @return CentralRepoExaminer for the given user name. + * @throws CentralRepoException If there is an error in looking up or + * inserting the user in the examiners table. + */ + + @Override + public CentralRepoExaminer getOrInsertExaminer(String examinerLoginName) throws CentralRepoException { + + String querySQL = "SELECT * FROM examiners WHERE login_name = '" + SleuthkitCase.escapeSingleQuotes(examinerLoginName) + "'"; + try (Connection connection = connect(); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery(querySQL);) { + + if (resultSet.next()) { + return new CentralRepoExaminer(resultSet.getLong("id"), resultSet.getString("login_name")); + } else { + // Could not find this user in the Examiner table, add a row for it. + try { + String insertSQL; + switch (CentralRepoDbManager.getSavedDbChoice().getDbPlatform()) { + case POSTGRESQL: + insertSQL = "INSERT INTO examiners (login_name) VALUES ('" + SleuthkitCase.escapeSingleQuotes(examinerLoginName) + "')" + getConflictClause(); //NON-NLS + break; + case SQLITE: + insertSQL = "INSERT OR IGNORE INTO examiners (login_name) VALUES ('" + SleuthkitCase.escapeSingleQuotes(examinerLoginName) + "')"; //NON-NLS + break; + default: + throw new CentralRepoException(String.format("Cannot add examiner to currently selected CR database platform %s", CentralRepoDbManager.getSavedDbChoice().getDbPlatform())); //NON-NLS + } + statement.execute(insertSQL); + + // Query the table again to get the row for the user + try (ResultSet resultSet2 = statement.executeQuery(querySQL)) { + if (resultSet2.next()) { + return new CentralRepoExaminer(resultSet2.getLong("id"), resultSet2.getString("login_name")); + } else { + throw new CentralRepoException("Error getting examiner for name = " + examinerLoginName); + } + } + + } catch (SQLException ex) { + throw new CentralRepoException("Error inserting row in examiners", ex); + } + } + + } catch (SQLException ex) { + throw new CentralRepoException("Error getting examiner for name = " + examinerLoginName, ex); + } + } + /** * Update an existing organization. * @@ -3489,6 +3656,19 @@ abstract class RdbmsCentralRepo implements CentralRepository { ); } + private String getPlatformSpecificInsertSQL(String sql) throws CentralRepoException { + + switch (CentralRepoDbManager.getSavedDbChoice().getDbPlatform()) { + case POSTGRESQL: + return "INSERT " + sql + " ON CONFLICT DO NOTHING"; //NON-NLS + case SQLITE: + return "INSERT OR IGNORE " + sql; + + default: + throw new CentralRepoException("Unknown Central Repo DB platform" + CentralRepoDbManager.getSavedDbChoice().getDbPlatform()); + } + } + /** * Determine if a specific column already exists in a specific table * diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/RdbmsCentralRepoFactory.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/RdbmsCentralRepoFactory.java index aedfce276d..2a157549f3 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/RdbmsCentralRepoFactory.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/RdbmsCentralRepoFactory.java @@ -811,14 +811,14 @@ public class RdbmsCentralRepoFactory { try (Statement stmt = conn.createStatement()) { // populate the confidence table for (Confidence confidence : Persona.Confidence.values()) { - String sqlString = "INSERT INTO confidence (confidence_id, description) VALUES ( " + confidence.getLevel() + ", '" + confidence.toString() + "')" //NON-NLS + String sqlString = "INSERT INTO confidence (confidence_id, description) VALUES ( " + confidence.getLevelId() + ", '" + confidence.toString() + "')" //NON-NLS + getOnConflictDoNothingClause(selectedPlatform); stmt.execute(sqlString); } // populate the persona_status table for (PersonaStatus status : Persona.PersonaStatus.values()) { - String sqlString = "INSERT INTO persona_status (status_id, status) VALUES ( " + status.getStatus() + ", '" + status.toString() + "')" //NON-NLS + String sqlString = "INSERT INTO persona_status (status_id, status) VALUES ( " + status.getStatusId() + ", '" + status.toString() + "')" //NON-NLS + getOnConflictDoNothingClause(selectedPlatform); stmt.execute(sqlString); } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteCentralRepo.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteCentralRepo.java index e5aecdf788..80f1df746c 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteCentralRepo.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteCentralRepo.java @@ -833,6 +833,27 @@ final class SqliteCentralRepo extends RdbmsCentralRepo { } } + @Override + public void executeInsertSQL(String insertSQL) throws CentralRepoException { + try { + acquireSharedLock(); + super.executeInsertSQL(insertSQL); + } finally { + releaseSharedLock(); + } + } + + @Override + public void executeSelectSQL(String selectSQL, CentralRepositoryDbQueryCallback queryCallback) throws CentralRepoException { + try { + acquireSharedLock(); + super.executeSelectSQL(selectSQL, queryCallback); + } finally { + releaseSharedLock(); + } + } + + /** * Check whether a reference set with the given name/version is in the * central repo. Used to check for name collisions when creating reference diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java index d8bd1c43c3..67c194fda7 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java @@ -55,6 +55,7 @@ import org.sleuthkit.datamodel.TagName; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository; +import org.sleuthkit.autopsy.events.AutopsyEvent; /** * Listen for case events and update entries in the Central Repository database @@ -86,6 +87,10 @@ final class CaseEventListener implements PropertyChangeListener { @Override public void propertyChange(PropertyChangeEvent evt) { + if (!(evt instanceof AutopsyEvent) || (((AutopsyEvent) evt).getSourceType() != AutopsyEvent.SourceType.LOCAL)) { + return; + } + CentralRepository dbManager; try { dbManager = CentralRepository.getInstance(); diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/IngestEventsListener.java b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/IngestEventsListener.java index 843e476499..400eeeb033 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/IngestEventsListener.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/IngestEventsListener.java @@ -37,6 +37,7 @@ import org.apache.commons.lang3.StringUtils; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; +import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoAccount; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNormalizationException; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeUtil; @@ -62,6 +63,11 @@ import org.sleuthkit.datamodel.Image; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository; +import org.sleuthkit.autopsy.centralrepository.datamodel.Persona; +import org.sleuthkit.autopsy.centralrepository.datamodel.PersonaAccount; +import org.sleuthkit.datamodel.Account; +import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT; +import org.sleuthkit.datamodel.CommunicationsUtils; /** * Listen for ingest events and update entries in the Central Repository @@ -337,6 +343,94 @@ public class IngestEventsListener { event = evt; } + /** + * Automatically creates personas from all the TSK_CONTACT artifacts + * found in a data source. + * + * @param dataSource Data source that was just analyzed. + * @throws TskCoreException If there is any error getting contact + * artifacts from case database. + * @throws CentralRepoException If there is an error in creating + * personas in the Central Repo. + */ + private void autoGenerateContactPersonas(Content dataSource) throws TskCoreException, CentralRepoException { + + Blackboard blackboard; + try { + blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard(); + } catch (NoCurrentCaseException ex) { + LOGGER.log(Level.SEVERE, "Exception while getting open case.", ex); + return; + } + + // get all TSK_CONTACT artifacts in this data source. + List contactArtifacts = blackboard.getArtifacts(TSK_CONTACT.getTypeID(), dataSource.getId()); + for (BlackboardArtifact artifact : contactArtifacts) { + + BlackboardAttribute nameAttr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME)); + String personaName = (nameAttr != null) ? nameAttr.getValueString() : null; + + // Get phone number and email attributes. + BlackboardAttribute phoneAttr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER)); + BlackboardAttribute homePhoneAttr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_HOME)); + BlackboardAttribute mobilePhoneAttr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_MOBILE)); + BlackboardAttribute emailAttr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_EMAIL)); + + Persona persona = personaFromContactAttribute(null, Account.Type.PHONE, phoneAttr, personaName); + persona = personaFromContactAttribute(persona, Account.Type.PHONE, homePhoneAttr, personaName); + persona = personaFromContactAttribute(persona, Account.Type.PHONE, mobilePhoneAttr, personaName); + personaFromContactAttribute(persona, Account.Type.EMAIL, emailAttr, personaName); + } + } + + + + /** + * Gets central repo account for the given attribute for a TSK_CONTACT + * artifact. Associates the given persona with that account. Creates a + * Persona, if one isn't provided. + * + * @param persona Persona to associate with the account. May be null, in + * which case a persona is created first. + * @param accountType Account type of account to be associated. + * @param attribute Attribute form which get the account id. + * @param personaName Persona name, if a persona needs to be created. + * @return Persona created or associated with the account. + * + * @throws TskCoreException If there is an error in normalizing the + * account id. + * @throws CentralRepoException If there is an erorr is getting the + * account or associating the persona with it. + */ + private Persona personaFromContactAttribute(Persona persona, Account.Type accountType, BlackboardAttribute attribute, String personaName) throws CentralRepoException, TskCoreException { + + Persona personaToReturn = persona; + if (attribute != null) { + + String accountId = attribute.getValueString(); + if (CommunicationsUtils.isValidAccountId(accountType, accountId)) { + if (accountType == Account.Type.PHONE) { + accountId = CommunicationsUtils.normalizePhoneNum(accountId); + } else if (accountType == Account.Type.EMAIL) { + accountId = CommunicationsUtils.normalizeEmailAddress(accountId); + } + + CentralRepoAccount.CentralRepoAccountType crAccountType = CentralRepository.getInstance().getAccountTypeByName(accountType.getTypeName()); + CentralRepoAccount crAccount = CentralRepository.getInstance().getOrCreateAccount(crAccountType, accountId); + + PersonaAccount personaAccount; + // If persona doesnt exist, create one + if (persona == null) { + personaAccount = Persona.createPersonaForAccount(personaName, "Auto generated contact persona", Persona.PersonaStatus.UNKNOWN, crAccount, "Found in contact book entry", Persona.Confidence.DERIVED); + personaToReturn = personaAccount.getPersona(); + } else { + persona.addAccountToPersona(crAccount, "Found in contact book entry", Persona.Confidence.DERIVED); + } + } + } + return personaToReturn; + } + @Override public void run() { // clear the tracker to reduce memory usage @@ -411,6 +505,8 @@ public class IngestEventsListener { correlationDataSource.setSha256(imageSha256Hash); } } + // automatically generate persona from contact artifacts. + autoGenerateContactPersonas(dataSource); } } catch (CentralRepoException ex) { LOGGER.log(Level.SEVERE, String.format( diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/contextviewer/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/contentviewers/contextviewer/Bundle.properties-MERGED index c8cebe69fc..4d4d3cf10f 100755 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/contextviewer/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/contextviewer/Bundle.properties-MERGED @@ -17,7 +17,10 @@ ContextViewer.messageFrom=From ContextViewer.messageOn=On ContextViewer.messageTo=To ContextViewer.on=Opened at +ContextViewer.programExecution=Program Execution: ContextViewer.recentDocs=Recent Documents: +ContextViewer.runOn=Program Run On +ContextViewer.runUnknown=\ Program Run at unknown time ContextViewer.title=Context ContextViewer.toolTip=Displays context for selected file. ContextViewer.unknown=Opened at unknown time diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/contextviewer/ContextViewer.java b/Core/src/org/sleuthkit/autopsy/contentviewers/contextviewer/ContextViewer.java index 7200f6747f..f7f1f6559e 100644 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/contextviewer/ContextViewer.java +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/contextviewer/ContextViewer.java @@ -334,7 +334,8 @@ public final class ContextViewer extends javax.swing.JPanel implements DataConte @NbBundle.Messages({ "ContextViewer.attachmentSource=Attached to: ", "ContextViewer.downloadSource=Downloaded from: ", - "ContextViewer.recentDocs=Recent Documents: " + "ContextViewer.recentDocs=Recent Documents: ", + "ContextViewer.programExecution=Program Execution: " }) private void setSourceFields(BlackboardArtifact associatedArtifact) throws TskCoreException { if (BlackboardArtifact.ARTIFACT_TYPE.TSK_MESSAGE.getTypeID() == associatedArtifact.getArtifactTypeID() @@ -357,6 +358,11 @@ public final class ContextViewer extends javax.swing.JPanel implements DataConte javax.swing.JPanel usagePanel = new ContextUsagePanel(sourceName, sourceText, associatedArtifact); contextUsagePanels.add(usagePanel); + } else if (BlackboardArtifact.ARTIFACT_TYPE.TSK_PROG_RUN.getTypeID() == associatedArtifact.getArtifactTypeID()) { + String sourceName = Bundle.ContextViewer_programExecution(); + String sourceText = programExecArtifactToString(associatedArtifact); + javax.swing.JPanel usagePanel = new ContextUsagePanel(sourceName, sourceText, associatedArtifact); + contextUsagePanels.add(usagePanel); } } @@ -416,6 +422,36 @@ public final class ContextViewer extends javax.swing.JPanel implements DataConte return sb.toString(); } + /** + * Returns a display string with Program Execution + * artifact. + * + * @param artifact artifact to get doc from. + * + * @return Display string with download URL and date/time. + * + * @throws TskCoreException + */ + @NbBundle.Messages({ + "ContextViewer.runOn=Program Run On", + "ContextViewer.runUnknown= Program Run at unknown time" + }) + private String programExecArtifactToString(BlackboardArtifact artifact) throws TskCoreException { + StringBuilder sb = new StringBuilder(ARTIFACT_STR_MAX_LEN); + Map attributesMap = getAttributesMap(artifact); + + BlackboardAttribute attribute = attributesMap.get(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME); + + if (BlackboardArtifact.ARTIFACT_TYPE.TSK_PROG_RUN.getTypeID() == artifact.getArtifactTypeID()) { + if (attribute != null && attribute.getValueLong() > 0) { + appendAttributeString(sb, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, attributesMap, Bundle.ContextViewer_runOn()); + } else { + sb.append(Bundle.ContextViewer_runUnknown()); + } + } + return sb.toString(); + } + /** * Returns a abbreviated display string for a message artifact. * diff --git a/Core/src/org/sleuthkit/autopsy/events/AutopsyEventPublisher.java b/Core/src/org/sleuthkit/autopsy/events/AutopsyEventPublisher.java index 02a5d2e2ab..f3fb273bcf 100644 --- a/Core/src/org/sleuthkit/autopsy/events/AutopsyEventPublisher.java +++ b/Core/src/org/sleuthkit/autopsy/events/AutopsyEventPublisher.java @@ -42,9 +42,9 @@ public final class AutopsyEventPublisher { private static final Logger logger = Logger.getLogger(AutopsyEventPublisher.class.getName()); private static final int MAX_REMOTE_EVENT_PUBLISH_TRIES = 1; private final LocalEventPublisher localPublisher; // LocalEventPublisher is thread-safe - @GuardedBy("this)") + @GuardedBy("this") private RemoteEventPublisher remotePublisher; - @GuardedBy("this)") + @GuardedBy("this") private String currentChannelName; /** diff --git a/Core/src/org/sleuthkit/autopsy/geolocation/AbstractWaypointFetcher.java b/Core/src/org/sleuthkit/autopsy/geolocation/AbstractWaypointFetcher.java index e923d6dbd6..2c33d054cb 100755 --- a/Core/src/org/sleuthkit/autopsy/geolocation/AbstractWaypointFetcher.java +++ b/Core/src/org/sleuthkit/autopsy/geolocation/AbstractWaypointFetcher.java @@ -257,6 +257,6 @@ abstract class AbstractWaypointFetcher implements WaypointBuilder.WaypointFilter return waypointMostRecent; } - return null; + return -1L; } } diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/Bundle.properties b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/Bundle.properties index d9fb142cf4..e7ed2dedd7 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/Bundle.properties +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/Bundle.properties @@ -94,8 +94,6 @@ HashDbIngestModule.fileReadErrorMsg=Read Error: {0} HashDbIngestModule.calcHashValueErr=Error encountered while calculating the hash value for {0} ({1}). HashDbIngestModule.hashLookupErrorMsg=Hash Lookup Error: {0} HashDbIngestModule.settingKnownBadStateErr=Error encountered while setting notable state for {0}. -HashDbIngestModule.lookingUpKnownBadHashValueErr=Error encountered while looking up notable hash value for {0}. -HashDbIngestModule.lookingUpKnownHashValueErr=Error encountered while looking up known hash value for {0}. HashDbIngestModule.postToBB.fileName=File Name HashDbIngestModule.postToBB.md5Hash=MD5 Hash HashDbIngestModule.postToBB.hashsetName=Hash Set Name @@ -145,8 +143,6 @@ HashDbManager.hashDbFileExistsExceptionMsg=A file already exists at\n{0} HashDbManager.hashDbAlreadyAddedExceptionMsg=The hash set at\n{0}\nhas already been created or imported. HashDbManager.illegalHashDbFileNameExtensionMsg=The hash set file name must have a .{0} extension. HashDbManager.moduleErr=Module Error -HashDbManager.knownBad.text=Notable -HashDbManager.known.text=Known HashDbManager.fileNameExtensionFilter.title=Hash Set File HashDbSearchAction.dlgMsg.title=File Search by MD5 Hash HashDbSearchAction.getName.text=Hash Search @@ -162,8 +158,6 @@ AddContentToHashDbAction.multipleSelectionNameEmpty=Add Files to Hash Set (Empty HashDbManager.ingestRunningExceptionMsg=Ingest is ongoing; this service will be unavailable until it finishes. HashDbManager.saveErrorExceptionMsg=Error saving hash configuration HashLookupModuleSettingsPanel.alwaysCalcHashesCheckbox.text=Calculate MD5 even if no hash set is selected -HashLookupModuleSettingsPanel.knownHashDbsLabel.text=Select known hash sets to use: -HashLookupModuleSettingsPanel.knownBadHashDbsLabel.text=Select notable hash sets to use: AddContentToHashDbAction.addFilesToHashSet.files=files AddContentToHashDbAction.addFilesToHashSet.file=file HashDbManager.errCreatingIndex.title=Error creating index @@ -241,3 +235,7 @@ AddHashValuesToDatabaseDialog.okButton.text_2=OK HashDbImportDatabaseDialog.saveInUserConfigFolderCheckbox.text=Copy hash set into user configuration folder HashDbImportDatabaseDialog.saveInUserConfigFolderCheckbox.toolTipText=In Live Triage situations, this option ensures that path to the hash set will be valid HashLookupSettingsPanel.indexPathLabel.text= +HashLookupModuleSettingsPanel.hashDbsLabel.text=Select hash sets to use: +HashDbCreateDatabaseDialog.noChangeRadioButton.text=No Change +HashDbImportDatabaseDialog.noChangeRadioButton.toolTipText= +HashDbImportDatabaseDialog.noChangeRadioButton.text=No Change diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/Bundle.properties-MERGED index 44057d0016..853a23d0db 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/Bundle.properties-MERGED @@ -9,6 +9,7 @@ HashDbImportDatabaseDialog.missingOrg=An organization must be selected HashDbImportDatabaseDialog.missingVersion=A version must be entered HashDbImportDatabaseDialog.mustEnterHashSetNameMsg=A hash set name must be entered. HashDbImportDatabaseDialog.populateOrgsError.message=Failure loading organizations. +HashDbIngestModule.complete.noChangesFound=No Change items found: # {0} - File name HashDbIngestModule.dialogTitle.errorFindingArtifacts=Error Finding Artifacts: {0} # {0} - File name @@ -16,10 +17,21 @@ HashDbIngestModule.errorMessage.lookingForFileArtifacts=Error encountered while HashDbIngestModule.indexError.message=Failed to index hashset hit artifact for keyword search. HashDbIngestModule.knownBadFileSearchWillNotExecuteWarn=Notable file search will not be executed. HashDbIngestModule.knownFileSearchWillNotExecuteWarn=Known file search will not be executed. +# {0} - fileName +HashDbIngestModule.lookingUpKnownBadHashValueErr=Error encountered while looking up notable hash value for {0}. +# {0} - fileName +HashDbIngestModule.lookingUpKnownHashValueErr=Error encountered while looking up known hash value for {0}. +# {0} - fileName +HashDbIngestModule.lookingUpNoChangeHashValueErr=Error encountered while looking up no change hash value for {0}. +HashDbIngestModule.noChangeFileSearchWillNotExecuteWarn='No Change' file search will not be executed. +HashDbIngestModule.noChangeHashDbSetMsg=No 'No Change' hash set. HashDbIngestModule.noKnownBadHashDbSetMsg=No notable hash set. HashDbIngestModule.noKnownHashDbSetMsg=No known hash set. HashDbManager.CentralRepoHashDb.orgError=Error loading organization HashDbManager.centralRepoLoadError.message=Error loading central repository hash sets +HashDbManager.known.text=Known +HashDbManager.knownBad.text=Notable +HashDbManager.noChange.text=No Change # {0} - hash set name HashDbManager.noDbPath.message=Couldn't get valid hash set path for: {0} HashDbSearchAction.noOpenCase.errMsg=No open case available. @@ -49,7 +61,10 @@ ImportCentralRepoDbProgressDialog.errorParsingFile.message=Error parsing hash se ImportCentralRepoDbProgressDialog.linesProcessed.message=\ hashes processed ImportCentralRepoDbProgressDialog.title.text=Central Repository Import Progress OpenIDE-Module-Display-Category=Ingest Module -OpenIDE-Module-Long-Description=Hash Set ingest module. \n\nThe ingest module analyzes files in the disk image and marks them as "known" (based on NSRL hashset lookup for "known" files) and "bad / interesting" (based on one or more hash sets supplied by the user).\n\nThe module also contains additional non-ingest tools that are integrated in the GUI, such as file lookup by hash and hash set configuration. +OpenIDE-Module-Long-Description=\ + Hash Set ingest module. \n\n\ + The ingest module analyzes files in the disk image and marks them as "known" (based on NSRL hashset lookup for "known" files) and "bad / interesting" (based on one or more hash sets supplied by the user).\n\n\ + The module also contains additional non-ingest tools that are integrated in the GUI, such as file lookup by hash and hash set configuration. OpenIDE-Module-Name=HashDatabases OptionsCategory_Name_HashDatabase=Hash Sets OptionsCategory_Keywords_HashDatabase=Hash Sets @@ -141,8 +156,6 @@ HashDbIngestModule.fileReadErrorMsg=Read Error: {0} HashDbIngestModule.calcHashValueErr=Error encountered while calculating the hash value for {0} ({1}). HashDbIngestModule.hashLookupErrorMsg=Hash Lookup Error: {0} HashDbIngestModule.settingKnownBadStateErr=Error encountered while setting notable state for {0}. -HashDbIngestModule.lookingUpKnownBadHashValueErr=Error encountered while looking up notable hash value for {0}. -HashDbIngestModule.lookingUpKnownHashValueErr=Error encountered while looking up known hash value for {0}. HashDbIngestModule.postToBB.fileName=File Name HashDbIngestModule.postToBB.md5Hash=MD5 Hash HashDbIngestModule.postToBB.hashsetName=Hash Set Name @@ -178,7 +191,10 @@ HashDbSearchThread.name.searching=Searching HashDbSearchThread.noMoreFilesWithMD5Msg=No other files with the same MD5 hash were found. ModalNoButtons.indexingDbsTitle=Indexing hash sets ModalNoButtons.indexingDbTitle=Indexing hash set -ModalNoButtons.exitHashDbIndexingMsg=You are about to exit out of indexing your hash sets. \nThe generated index will be left unusable. If you choose to continue,\nplease delete the corresponding -md5.idx file in the hash folder.\nExit indexing? +ModalNoButtons.exitHashDbIndexingMsg=You are about to exit out of indexing your hash sets. \n\ +The generated index will be left unusable. If you choose to continue,\n\ + please delete the corresponding -md5.idx file in the hash folder.\n\ + Exit indexing? ModalNoButtons.dlgTitle.unfinishedIndexing=Unfinished Indexing ModalNoButtons.indexThis.currentlyIndexing1Db=Currently indexing 1 hash set ModalNoButtons.indexThese.currentlyIndexing1OfNDbs=Currently indexing 1 of {0} @@ -189,8 +205,6 @@ HashDbManager.hashDbFileExistsExceptionMsg=A file already exists at\n{0} HashDbManager.hashDbAlreadyAddedExceptionMsg=The hash set at\n{0}\nhas already been created or imported. HashDbManager.illegalHashDbFileNameExtensionMsg=The hash set file name must have a .{0} extension. HashDbManager.moduleErr=Module Error -HashDbManager.knownBad.text=Notable -HashDbManager.known.text=Known HashDbManager.fileNameExtensionFilter.title=Hash Set File HashDbSearchAction.dlgMsg.title=File Search by MD5 Hash HashDbSearchAction.getName.text=Hash Search @@ -205,13 +219,7 @@ AddContentToHashDbAction.singleSelectionNameEmpty=Add File to Hash Set (Empty Fi AddContentToHashDbAction.multipleSelectionNameEmpty=Add Files to Hash Set (Empty File) HashDbManager.ingestRunningExceptionMsg=Ingest is ongoing; this service will be unavailable until it finishes. HashDbManager.saveErrorExceptionMsg=Error saving hash configuration -HashLookupSettingsPanel.jButton3.text=Import Hash Set -HashLookupSettingsPanel.jLabel6.text=Type: -HashLookupSettingsPanel.jLabel4.text=Location: -HashLookupSettingsPanel.jLabel2.text=Name: HashLookupModuleSettingsPanel.alwaysCalcHashesCheckbox.text=Calculate MD5 even if no hash set is selected -HashLookupModuleSettingsPanel.knownHashDbsLabel.text=Select known hash sets to use: -HashLookupModuleSettingsPanel.knownBadHashDbsLabel.text=Select notable hash sets to use: AddContentToHashDbAction.addFilesToHashSet.files=files AddContentToHashDbAction.addFilesToHashSet.file=file HashDbManager.errCreatingIndex.title=Error creating index @@ -289,3 +297,7 @@ AddHashValuesToDatabaseDialog.okButton.text_2=OK HashDbImportDatabaseDialog.saveInUserConfigFolderCheckbox.text=Copy hash set into user configuration folder HashDbImportDatabaseDialog.saveInUserConfigFolderCheckbox.toolTipText=In Live Triage situations, this option ensures that path to the hash set will be valid HashLookupSettingsPanel.indexPathLabel.text= +HashLookupModuleSettingsPanel.hashDbsLabel.text=Select hash sets to use: +HashDbCreateDatabaseDialog.noChangeRadioButton.text=No Change +HashDbImportDatabaseDialog.noChangeRadioButton.toolTipText= +HashDbImportDatabaseDialog.noChangeRadioButton.text=No Change diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/Bundle_ja.properties b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/Bundle_ja.properties index 3f5ea43a61..c800aca094 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/Bundle_ja.properties +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/Bundle_ja.properties @@ -216,8 +216,6 @@ HashLookupSettingsPanel.jLabel6.text=\u30bf\u30a4\u30d7: HashLookupSettingsPanel.jLabel4.text=\u5834\u6240: HashLookupSettingsPanel.jLabel2.text=\u540d\u524d: HashLookupModuleSettingsPanel.alwaysCalcHashesCheckbox.text=\u30cf\u30c3\u30b7\u30e5\u30bb\u30c3\u30c8\u304c\u9078\u629e\u3055\u308c\u3066\u3044\u306a\u3044\u5834\u5408\u3067\u3082MD5\u3092\u8a08\u7b97 -HashLookupModuleSettingsPanel.knownHashDbsLabel.text=\u4f7f\u7528\u3059\u308b\u65e2\u77e5\u30cf\u30c3\u30b7\u30e5\u30bb\u30c3\u30c8\u3092\u9078\u629e: -HashLookupModuleSettingsPanel.knownBadHashDbsLabel.text=\u4f7f\u7528\u3059\u308b\u9855\u8457\u306a\u30cf\u30c3\u30b7\u30e5\u30bb\u30c3\u30c8\u3092\u9078\u629e: AddContentToHashDbAction.addFilesToHashSet.files=\u30d5\u30a1\u30a4\u30eb AddContentToHashDbAction.addFilesToHashSet.file=\u30d5\u30a1\u30a4\u30eb HashDbManager.errCreatingIndex.title=\u7d22\u5f15\u306e\u4f5c\u6210\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f @@ -295,3 +293,6 @@ AddHashValuesToDatabaseDialog.okButton.text_2=OK HashDbImportDatabaseDialog.saveInUserConfigFolderCheckbox.text=\u30cf\u30c3\u30b7\u30e5\u30bb\u30c3\u30c8\u3092\u30e6\u30fc\u30b6\u30fc\u69cb\u6210\u30d5\u30a1\u30a4\u30eb\u306b\u30b3\u30d4\u30fc HashDbImportDatabaseDialog.saveInUserConfigFolderCheckbox.toolTipText=\u30e9\u30a4\u30d6\u30c8\u30ea\u30a2\u30fc\u30b8\u306e\u72b6\u6cc1\u3067\u306f\u3001\u3053\u306e\u30aa\u30d7\u30b7\u30e7\u30f3\u306b\u3088\u3063\u3066\u30cf\u30c3\u30b7\u30e5\u30bb\u30c3\u30c8\u3078\u306e\u30d1\u30b9\u304c\u6709\u52b9\u3067\u3042\u308b\u3053\u3068\u304c\u4fdd\u8a3c\u3055\u308c\u307e\u3059\u3002 HashLookupSettingsPanel.indexPathLabel.text= +HashLookupModuleSettingsPanel.hashDbsLabel.text=\u4f7f\u7528\u3059\u308b\u65e2\u77e5\u30cf\u30c3\u30b7\u30e5\u30bb\u30c3\u30c8\u3092\u9078\u629e: +HashDbCreateDatabaseDialog.noChangeRadioButton.text=\u9855\u8457 +HashDbImportDatabaseDialog.noChangeRadioButton.text=\u9855\u8457 diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbCreateDatabaseDialog.form b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbCreateDatabaseDialog.form index 7c74b4b583..b274f93d3c 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbCreateDatabaseDialog.form +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbCreateDatabaseDialog.form @@ -78,6 +78,7 @@ + @@ -125,19 +126,21 @@ - + - - - - + + + + + + @@ -313,5 +316,18 @@ + + + + + + + + + + + + + diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbCreateDatabaseDialog.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbCreateDatabaseDialog.java index 5470b385fb..63a8c84e93 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbCreateDatabaseDialog.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbCreateDatabaseDialog.java @@ -198,6 +198,7 @@ final class HashDbCreateDatabaseDialog extends javax.swing.JDialog { lbOrg = new javax.swing.JLabel(); orgComboBox = new javax.swing.JComboBox<>(); orgButton = new javax.swing.JButton(); + noChangeRadioButton = new javax.swing.JRadioButton(); setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE); @@ -292,6 +293,14 @@ final class HashDbCreateDatabaseDialog extends javax.swing.JDialog { } }); + buttonGroup1.add(noChangeRadioButton); + org.openide.awt.Mnemonics.setLocalizedText(noChangeRadioButton, org.openide.util.NbBundle.getMessage(HashDbCreateDatabaseDialog.class, "HashDbCreateDatabaseDialog.noChangeRadioButton.text")); // NOI18N + noChangeRadioButton.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + noChangeRadioButtonActionPerformed(evt); + } + }); + javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane()); getContentPane().setLayout(layout); layout.setHorizontalGroup( @@ -334,7 +343,8 @@ final class HashDbCreateDatabaseDialog extends javax.swing.JDialog { .addGap(32, 32, 32) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(knownRadioButton) - .addComponent(knownBadRadioButton))) + .addComponent(knownBadRadioButton) + .addComponent(noChangeRadioButton))) .addGroup(layout.createSequentialGroup() .addGap(12, 12, 12) .addComponent(jLabel2)) @@ -374,16 +384,18 @@ final class HashDbCreateDatabaseDialog extends javax.swing.JDialog { .addComponent(knownRadioButton) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(knownBadRadioButton) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() - .addComponent(sendIngestMessagesCheckbox) - .addGap(0, 0, Short.MAX_VALUE)) - .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addGap(0, 0, Short.MAX_VALUE) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(cancelButton) - .addComponent(okButton)))) + .addComponent(okButton))) + .addGroup(layout.createSequentialGroup() + .addComponent(noChangeRadioButton) + .addGap(24, 24, 24) + .addComponent(sendIngestMessagesCheckbox) + .addGap(0, 0, Short.MAX_VALUE))) .addContainerGap()) ); @@ -391,13 +403,13 @@ final class HashDbCreateDatabaseDialog extends javax.swing.JDialog { }// //GEN-END:initComponents private void knownRadioButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_knownRadioButtonActionPerformed - sendIngestMessagesCheckbox.setSelected(false); - sendIngestMessagesCheckbox.setEnabled(false); + sendIngestMessagesCheckbox.setSelected(KnownFilesType.KNOWN.isDefaultInboxMessages()); + sendIngestMessagesCheckbox.setEnabled(KnownFilesType.KNOWN.isInboxMessagesAllowed()); }//GEN-LAST:event_knownRadioButtonActionPerformed private void knownBadRadioButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_knownBadRadioButtonActionPerformed - sendIngestMessagesCheckbox.setSelected(true); - sendIngestMessagesCheckbox.setEnabled(true); + sendIngestMessagesCheckbox.setSelected(KnownFilesType.KNOWN_BAD.isDefaultInboxMessages()); + sendIngestMessagesCheckbox.setEnabled(KnownFilesType.KNOWN_BAD.isInboxMessagesAllowed()); }//GEN-LAST:event_knownBadRadioButtonActionPerformed private void cancelButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_cancelButtonActionPerformed @@ -476,15 +488,17 @@ final class HashDbCreateDatabaseDialog extends javax.swing.JDialog { } KnownFilesType type; - TskData.FileKnown fileKnown; + if (knownRadioButton.isSelected()) { type = KnownFilesType.KNOWN; - fileKnown = TskData.FileKnown.KNOWN; + } else if (noChangeRadioButton.isSelected()) { + type = KnownFilesType.NO_CHANGE; } else { type = KnownFilesType.KNOWN_BAD; - fileKnown = TskData.FileKnown.BAD; } + TskData.FileKnown fileKnown = type.getFileKnown(); + String errorMessage = NbBundle .getMessage(this.getClass(), "HashDbCreateDatabaseDialog.errMsg.hashDbCreationErr"); @@ -586,6 +600,11 @@ final class HashDbCreateDatabaseDialog extends javax.swing.JDialog { enableComponents(); }//GEN-LAST:event_centralRepoRadioButtonActionPerformed + private void noChangeRadioButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_noChangeRadioButtonActionPerformed + sendIngestMessagesCheckbox.setSelected(KnownFilesType.NO_CHANGE.isDefaultInboxMessages()); + sendIngestMessagesCheckbox.setEnabled(KnownFilesType.NO_CHANGE.isInboxMessagesAllowed()); + }//GEN-LAST:event_noChangeRadioButtonActionPerformed + // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.ButtonGroup buttonGroup1; private javax.swing.JButton cancelButton; @@ -600,6 +619,7 @@ final class HashDbCreateDatabaseDialog extends javax.swing.JDialog { private javax.swing.JRadioButton knownBadRadioButton; private javax.swing.JRadioButton knownRadioButton; private javax.swing.JLabel lbOrg; + private javax.swing.JRadioButton noChangeRadioButton; private javax.swing.JButton okButton; private javax.swing.JButton orgButton; private javax.swing.JComboBox orgComboBox; diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.form b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.form index e285e99a12..13ed7fbf57 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.form +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.form @@ -29,7 +29,7 @@ - + @@ -54,10 +54,6 @@ - - - - @@ -76,7 +72,16 @@ - + + + + + + + + + + @@ -86,16 +91,17 @@ - - + + + @@ -113,52 +119,54 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + - + - + - + @@ -367,5 +375,21 @@ + + + + + + + + + + + + + + + + diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.java index a08f324a44..c854cb165d 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.java @@ -182,6 +182,7 @@ final class HashDbImportDatabaseDialog extends javax.swing.JDialog { centralRepoRadioButton = new javax.swing.JRadioButton(); jLabel4 = new javax.swing.JLabel(); saveInUserConfigFolderCheckbox = new javax.swing.JCheckBox(); + noChangeRadioButton = new javax.swing.JRadioButton(); setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE); @@ -291,6 +292,15 @@ final class HashDbImportDatabaseDialog extends javax.swing.JDialog { org.openide.awt.Mnemonics.setLocalizedText(saveInUserConfigFolderCheckbox, org.openide.util.NbBundle.getMessage(HashDbImportDatabaseDialog.class, "HashDbImportDatabaseDialog.saveInUserConfigFolderCheckbox.text")); // NOI18N saveInUserConfigFolderCheckbox.setToolTipText(org.openide.util.NbBundle.getMessage(HashDbImportDatabaseDialog.class, "HashDbImportDatabaseDialog.saveInUserConfigFolderCheckbox.toolTipText")); // NOI18N + buttonGroup1.add(noChangeRadioButton); + org.openide.awt.Mnemonics.setLocalizedText(noChangeRadioButton, org.openide.util.NbBundle.getMessage(HashDbImportDatabaseDialog.class, "HashDbImportDatabaseDialog.noChangeRadioButton.text")); // NOI18N + noChangeRadioButton.setToolTipText(org.openide.util.NbBundle.getMessage(HashDbImportDatabaseDialog.class, "HashDbImportDatabaseDialog.noChangeRadioButton.toolTipText")); // NOI18N + noChangeRadioButton.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + noChangeRadioButtonActionPerformed(evt); + } + }); + javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane()); getContentPane().setLayout(layout); layout.setHorizontalGroup( @@ -315,9 +325,6 @@ final class HashDbImportDatabaseDialog extends javax.swing.JDialog { .addComponent(openButton)))) .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() - .addComponent(sendIngestMessagesCheckbox) - .addGap(0, 0, Short.MAX_VALUE)) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addComponent(lbOrg) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) @@ -331,7 +338,13 @@ final class HashDbImportDatabaseDialog extends javax.swing.JDialog { .addGap(40, 40, 40) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) .addComponent(versionTextField) - .addComponent(hashSetNameTextField))) + .addComponent(hashSetNameTextField)))) + .addGap(81, 81, 81)) + .addGroup(layout.createSequentialGroup() + .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() + .addComponent(sendIngestMessagesCheckbox) + .addGap(0, 0, Short.MAX_VALUE)) .addGroup(layout.createSequentialGroup() .addGap(0, 0, Short.MAX_VALUE) .addComponent(okButton))) @@ -339,14 +352,15 @@ final class HashDbImportDatabaseDialog extends javax.swing.JDialog { .addComponent(cancelButton)) .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(saveInUserConfigFolderCheckbox) .addComponent(jLabel2) - .addComponent(readOnlyCheckbox) .addGroup(layout.createSequentialGroup() .addGap(19, 19, 19) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(knownRadioButton) - .addComponent(knownBadRadioButton)))) + .addComponent(knownBadRadioButton) + .addComponent(noChangeRadioButton))) + .addComponent(saveInUserConfigFolderCheckbox) + .addComponent(readOnlyCheckbox)) .addGap(0, 0, Short.MAX_VALUE))) .addContainerGap()) ); @@ -361,44 +375,46 @@ final class HashDbImportDatabaseDialog extends javax.swing.JDialog { .addComponent(databasePathTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(jLabel3) .addComponent(openButton)) + .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) + .addComponent(fileTypeRadioButton) + .addComponent(centralRepoRadioButton) + .addComponent(jLabel4)) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) + .addComponent(jLabel1) + .addComponent(hashSetNameTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) + .addComponent(lbVersion) + .addComponent(versionTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addGap(5, 5, 5) + .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) + .addComponent(orgButton) + .addComponent(orgComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) + .addComponent(lbOrg)) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(jLabel2) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(knownRadioButton) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(knownBadRadioButton) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(noChangeRadioButton) + .addGap(5, 5, 5) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) - .addComponent(fileTypeRadioButton) - .addComponent(centralRepoRadioButton) - .addComponent(jLabel4)) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) - .addComponent(jLabel1) - .addComponent(hashSetNameTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) - .addComponent(lbVersion) - .addComponent(versionTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) - .addGap(5, 5, 5) - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) - .addComponent(orgButton) - .addComponent(orgComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) - .addComponent(lbOrg)) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addComponent(jLabel2) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addComponent(knownRadioButton) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addComponent(knownBadRadioButton) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(readOnlyCheckbox) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(sendIngestMessagesCheckbox) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(saveInUserConfigFolderCheckbox) - .addGap(0, 29, Short.MAX_VALUE)) + .addGap(0, 0, Short.MAX_VALUE)) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() - .addGap(0, 0, Short.MAX_VALUE) + .addGap(81, 81, 81) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(cancelButton) .addComponent(okButton)))) - .addContainerGap()) + .addGap(18, 18, 18)) ); pack(); @@ -436,13 +452,13 @@ final class HashDbImportDatabaseDialog extends javax.swing.JDialog { }//GEN-LAST:event_openButtonActionPerformed private void knownRadioButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_knownRadioButtonActionPerformed - sendIngestMessagesCheckbox.setSelected(false); - sendIngestMessagesCheckbox.setEnabled(false); + sendIngestMessagesCheckbox.setSelected(KnownFilesType.KNOWN.isDefaultInboxMessages()); + sendIngestMessagesCheckbox.setEnabled(KnownFilesType.KNOWN.isInboxMessagesAllowed()); }//GEN-LAST:event_knownRadioButtonActionPerformed private void knownBadRadioButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_knownBadRadioButtonActionPerformed - sendIngestMessagesCheckbox.setSelected(true); - sendIngestMessagesCheckbox.setEnabled(true); + sendIngestMessagesCheckbox.setSelected(KnownFilesType.KNOWN_BAD.isDefaultInboxMessages()); + sendIngestMessagesCheckbox.setEnabled(KnownFilesType.KNOWN_BAD.isInboxMessagesAllowed()); }//GEN-LAST:event_knownBadRadioButtonActionPerformed private void cancelButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_cancelButtonActionPerformed @@ -531,6 +547,8 @@ final class HashDbImportDatabaseDialog extends javax.swing.JDialog { KnownFilesType type; if (knownRadioButton.isSelected()) { type = KnownFilesType.KNOWN; + } else if (noChangeRadioButton.isSelected()) { + type = KnownFilesType.NO_CHANGE; } else { type = KnownFilesType.KNOWN_BAD; } @@ -629,6 +647,11 @@ final class HashDbImportDatabaseDialog extends javax.swing.JDialog { enableComponents(); }//GEN-LAST:event_readOnlyCheckboxActionPerformed + private void noChangeRadioButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_noChangeRadioButtonActionPerformed + sendIngestMessagesCheckbox.setSelected(KnownFilesType.NO_CHANGE.isDefaultInboxMessages()); + sendIngestMessagesCheckbox.setEnabled(KnownFilesType.NO_CHANGE.isInboxMessagesAllowed()); + }//GEN-LAST:event_noChangeRadioButtonActionPerformed + // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.ButtonGroup buttonGroup1; private javax.swing.JButton cancelButton; @@ -644,6 +667,7 @@ final class HashDbImportDatabaseDialog extends javax.swing.JDialog { private javax.swing.JRadioButton knownRadioButton; private javax.swing.JLabel lbOrg; private javax.swing.JLabel lbVersion; + private javax.swing.JRadioButton noChangeRadioButton; private javax.swing.JButton okButton; private javax.swing.JButton openButton; private javax.swing.JButton orgButton; diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbIngestModule.java index a562ab2a22..873f891ad0 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbIngestModule.java @@ -24,7 +24,9 @@ import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.concurrent.atomic.AtomicLong; +import java.util.function.Function; import java.util.logging.Level; +import java.util.stream.Stream; import org.openide.util.NbBundle; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; @@ -57,12 +59,26 @@ import org.sleuthkit.datamodel.TskException; @Messages({ "HashDbIngestModule.noKnownBadHashDbSetMsg=No notable hash set.", "HashDbIngestModule.knownBadFileSearchWillNotExecuteWarn=Notable file search will not be executed.", + "HashDbIngestModule.noChangeHashDbSetMsg=No 'No Change' hash set.", + "HashDbIngestModule.noChangeFileSearchWillNotExecuteWarn='No Change' file search will not be executed.", "HashDbIngestModule.noKnownHashDbSetMsg=No known hash set.", - "HashDbIngestModule.knownFileSearchWillNotExecuteWarn=Known file search will not be executed." -}) + "HashDbIngestModule.knownFileSearchWillNotExecuteWarn=Known file search will not be executed.", + "# {0} - fileName", "HashDbIngestModule.lookingUpKnownBadHashValueErr=Error encountered while looking up notable hash value for {0}.", + "# {0} - fileName", "HashDbIngestModule.lookingUpNoChangeHashValueErr=Error encountered while looking up no change hash value for {0}.", + "# {0} - fileName", "HashDbIngestModule.lookingUpKnownHashValueErr=Error encountered while looking up known hash value for {0}.",}) public class HashDbIngestModule implements FileIngestModule { private static final Logger logger = Logger.getLogger(HashDbIngestModule.class.getName()); + + private final Function knownBadLookupError + = (file) -> Bundle.HashDbIngestModule_lookingUpKnownBadHashValueErr(file.getName()); + + private final Function noChangeLookupError + = (file) -> Bundle.HashDbIngestModule_lookingUpNoChangeHashValueErr(file.getName()); + + private final Function knownLookupError + = (file) -> Bundle.HashDbIngestModule_lookingUpKnownHashValueErr(file.getName()); + private static final int MAX_COMMENT_SIZE = 500; private final IngestServices services = IngestServices.getInstance(); private final SleuthkitCase skCase; @@ -70,6 +86,7 @@ public class HashDbIngestModule implements FileIngestModule { private final HashLookupModuleSettings settings; private final List knownBadHashSets = new ArrayList<>(); private final List knownHashSets = new ArrayList<>(); + private final List noChangeHashSets = new ArrayList<>(); private long jobId; private static final HashMap totalsForIngestJobs = new HashMap<>(); private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter(); @@ -81,6 +98,7 @@ public class HashDbIngestModule implements FileIngestModule { private static class IngestJobTotals { private final AtomicLong totalKnownBadCount = new AtomicLong(0); + private final AtomicLong totalNoChangeCount = new AtomicLong(0); private final AtomicLong totalCalctime = new AtomicLong(0); private final AtomicLong totalLookuptime = new AtomicLong(0); } @@ -114,8 +132,8 @@ public class HashDbIngestModule implements FileIngestModule { if (!hashDbManager.verifyAllDatabasesLoadedCorrectly()) { throw new IngestModuleException("Could not load all hash sets"); } - updateEnabledHashSets(hashDbManager.getKnownBadFileHashSets(), knownBadHashSets); - updateEnabledHashSets(hashDbManager.getKnownFileHashSets(), knownHashSets); + + initializeHashsets(hashDbManager.getAllHashSets()); if (refCounter.incrementAndGet(jobId) == 1) { // initialize job totals @@ -128,6 +146,13 @@ public class HashDbIngestModule implements FileIngestModule { Bundle.HashDbIngestModule_noKnownBadHashDbSetMsg(), Bundle.HashDbIngestModule_knownBadFileSearchWillNotExecuteWarn())); } + + if (noChangeHashSets.isEmpty()) { + services.postMessage(IngestMessage.createWarningMessage( + HashLookupModuleFactory.getModuleName(), + Bundle.HashDbIngestModule_noChangeHashDbSetMsg(), + Bundle.HashDbIngestModule_noChangeFileSearchWillNotExecuteWarn())); + } if (knownHashSets.isEmpty()) { services.postMessage(IngestMessage.createWarningMessage( @@ -139,18 +164,29 @@ public class HashDbIngestModule implements FileIngestModule { } /** - * Cycle through list of hashsets and return the subset that is enabled. + * Cycle through list of hashsets and place each HashDB in the appropriate + * list based on KnownFilesType. * - * @param allHashSets List of all hashsets from DB manager - * @param enabledHashSets List of enabled ones to return. + * @param allHashSets List of all hashsets from DB manager */ - private void updateEnabledHashSets(List allHashSets, List enabledHashSets) { - enabledHashSets.clear(); + private void initializeHashsets(List allHashSets) { for (HashDb db : allHashSets) { if (settings.isHashSetEnabled(db)) { try { if (db.isValid()) { - enabledHashSets.add(db); + switch (db.getKnownFilesType()) { + case KNOWN: + knownHashSets.add(db); + break; + case KNOWN_BAD: + knownBadHashSets.add(db); + break; + case NO_CHANGE: + noChangeHashSets.add(db); + break; + default: + throw new TskCoreException("Unknown KnownFilesType: " + db.getKnownFilesType()); + } } } catch (TskCoreException ex) { logger.log(Level.WARNING, "Error getting index status for " + db.getDisplayName() + " hash set", ex); //NON-NLS @@ -174,128 +210,37 @@ public class HashDbIngestModule implements FileIngestModule { return ProcessResult.ERROR; } - // Skip unallocated space files. - if ((file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) - || file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.SLACK))) { - return ProcessResult.OK; - } - - /* - * Skip directories. One reason for this is because we won't accurately - * calculate hashes of NTFS directories that have content that spans the - * IDX_ROOT and IDX_ALLOC artifacts. So we disable that until a solution - * for it is developed. - */ - if (file.isDir()) { - return ProcessResult.OK; - } - - // bail out if we have no hashes set - if ((knownHashSets.isEmpty()) && (knownBadHashSets.isEmpty()) && (!settings.shouldCalculateHashes())) { + if (shouldSkip(file)) { return ProcessResult.OK; } // Safely get a reference to the totalsForIngestJobs object IngestJobTotals totals = getTotalsForIngestJobs(jobId); - // calc hash value - String name = file.getName(); - long fileId = file.getId(); - String md5Hash = file.getMd5Hash(); - if (md5Hash == null || md5Hash.isEmpty()) { - try { - TimingMetric metric = HealthMonitor.getTimingMetric("Disk Reads: Hash calculation"); - long calcstart = System.currentTimeMillis(); - md5Hash = HashUtility.calculateMd5Hash(file); - if (file.getSize() > 0) { - // Surprisingly, the hash calculation does not seem to be correlated that - // strongly with file size until the files get large. - // Only normalize if the file size is greater than ~1MB. - if (file.getSize() < 1000000) { - HealthMonitor.submitTimingMetric(metric); - } else { - // In testing, this normalization gave reasonable resuls - HealthMonitor.submitNormalizedTimingMetric(metric, file.getSize() / 500000); - } - } - file.setMd5Hash(md5Hash); - long delta = (System.currentTimeMillis() - calcstart); - totals.totalCalctime.addAndGet(delta); - - } catch (IOException ex) { - logger.log(Level.WARNING, String.format("Error calculating hash of file '%s' (id=%d).", name, fileId), ex); //NON-NLS - services.postMessage(IngestMessage.createErrorMessage( - HashLookupModuleFactory.getModuleName(), - NbBundle.getMessage(this.getClass(), "HashDbIngestModule.fileReadErrorMsg", name), - NbBundle.getMessage(this.getClass(), "HashDbIngestModule.calcHashValueErr", - file.getParentPath() + file.getName(), - file.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.ALLOC)?"Allocated File" : "Deleted File"))); - return ProcessResult.ERROR; - } + // calc hash value + String md5Hash = getHash(file, totals); + if (md5Hash == null) { + return ProcessResult.ERROR; } - // look up in notable first - boolean foundBad = false; + // the processing result of handling this file ProcessResult ret = ProcessResult.OK; - for (HashDb db : knownBadHashSets) { - try { - long lookupstart = System.currentTimeMillis(); - HashHitInfo hashInfo = db.lookupMD5(file); - if (null != hashInfo) { - foundBad = true; - totals.totalKnownBadCount.incrementAndGet(); - file.setKnown(TskData.FileKnown.BAD); + // look up in notable first + FindInHashsetsResult knownBadResult = findInHashsets(file, totals.totalKnownBadCount, + totals.totalLookuptime, knownBadHashSets, TskData.FileKnown.BAD, knownBadLookupError); - String hashSetName = db.getDisplayName(); + boolean foundBad = knownBadResult.isFound(); + if (knownBadResult.isError()) { + ret = ProcessResult.ERROR; + } - String comment = ""; - ArrayList comments = hashInfo.getComments(); - int i = 0; - for (String c : comments) { - if (++i > 1) { - comment += " "; - } - comment += c; - if (comment.length() > MAX_COMMENT_SIZE) { - comment = comment.substring(0, MAX_COMMENT_SIZE) + "..."; - break; - } - } + // look up no change items next + FindInHashsetsResult noChangeResult = findInHashsets(file, totals.totalNoChangeCount, + totals.totalLookuptime, noChangeHashSets, TskData.FileKnown.UNKNOWN, noChangeLookupError); - /* - * We have a match. Now create an artifact if it is - * determined that one hasn't been created yet. - */ - List attributesList = new ArrayList<>(); - attributesList.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_SET_NAME, HashLookupModuleFactory.getModuleName(), hashSetName)); - try { - org.sleuthkit.datamodel.Blackboard tskBlackboard = skCase.getBlackboard(); - if (tskBlackboard.artifactExists(file, BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT, attributesList) == false) { - postHashSetHitToBlackboard(file, md5Hash, hashSetName, comment, db.getSendIngestMessages()); - } - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, String.format( - "A problem occurred while checking for existing artifacts for file '%s' (id=%d).", name, fileId), ex); //NON-NLS - services.postMessage(IngestMessage.createErrorMessage( - HashLookupModuleFactory.getModuleName(), - Bundle.HashDbIngestModule_dialogTitle_errorFindingArtifacts(name), - Bundle.HashDbIngestModule_errorMessage_lookingForFileArtifacts(name))); - ret = ProcessResult.ERROR; - } - } - long delta = (System.currentTimeMillis() - lookupstart); - totals.totalLookuptime.addAndGet(delta); - - } catch (TskException ex) { - logger.log(Level.WARNING, String.format( - "Couldn't lookup notable hash for file '%s' (id=%d) - see sleuthkit log for details", name, fileId), ex); //NON-NLS - services.postMessage(IngestMessage.createErrorMessage( - HashLookupModuleFactory.getModuleName(), - NbBundle.getMessage(this.getClass(), "HashDbIngestModule.hashLookupErrorMsg", name), - NbBundle.getMessage(this.getClass(), "HashDbIngestModule.lookingUpKnownBadHashValueErr", name))); - ret = ProcessResult.ERROR; - } + if (noChangeResult.isError()) { + ret = ProcessResult.ERROR; } // If the file is not in the notable sets, search for it in the known sets. @@ -313,12 +258,7 @@ public class HashDbIngestModule implements FileIngestModule { totals.totalLookuptime.addAndGet(delta); } catch (TskException ex) { - logger.log(Level.WARNING, String.format( - "Couldn't lookup known hash for file '%s' (id=%d) - see sleuthkit log for details", name, fileId), ex); //NON-NLS - services.postMessage(IngestMessage.createErrorMessage( - HashLookupModuleFactory.getModuleName(), - NbBundle.getMessage(this.getClass(), "HashDbIngestModule.hashLookupErrorMsg", name), - NbBundle.getMessage(this.getClass(), "HashDbIngestModule.lookingUpKnownHashValueErr", name))); + reportLookupError(ex, file, knownLookupError); ret = ProcessResult.ERROR; } } @@ -327,6 +267,245 @@ public class HashDbIngestModule implements FileIngestModule { return ret; } + /** + * Returns true if this file should be skipped for processing. + * + * @param file The file to potentially skip. + * + * @return True if this file should be skipped. + */ + private boolean shouldSkip(AbstractFile file) { + // Skip unallocated space files. + if ((file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) + || file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.SLACK))) { + return true; + } + + /* + * Skip directories. One reason for this is because we won't accurately + * calculate hashes of NTFS directories that have content that spans the + * IDX_ROOT and IDX_ALLOC artifacts. So we disable that until a solution + * for it is developed. + */ + if (file.isDir()) { + return true; + } + + // bail out if we have no hashes set + if ((knownHashSets.isEmpty()) && (knownBadHashSets.isEmpty()) && (!settings.shouldCalculateHashes())) { + return true; + } + + return false; + } + + /** + * Reports an error when an issue is encountered looking up a file. + * + * @param ex The exception thrown in the error. + * @param file The file for which this error applies. + * @param lookupErrorMessage The function that generates an error message + * specific to which piece of the ingest + * processing failed. + */ + private void reportLookupError(TskException ex, AbstractFile file, Function lookupErrorMessage) { + logger.log(Level.WARNING, String.format( + "Couldn't lookup notable hash for file '%s' (id=%d) - see sleuthkit log for details", file.getName(), file.getId()), ex); //NON-NLS + services.postMessage(IngestMessage.createErrorMessage( + HashLookupModuleFactory.getModuleName(), + NbBundle.getMessage(this.getClass(), "HashDbIngestModule.hashLookupErrorMsg", file.getName()), + lookupErrorMessage.apply(file))); + } + + /** + * The result of attempting to find a file in a list of HashDB objects. + */ + private static class FindInHashsetsResult { + + private final boolean found; + private final boolean error; + + FindInHashsetsResult(boolean found, boolean error) { + this.found = found; + this.error = error; + } + + /** + * Returns true if the file was found in the HashDB. + * + * @return True if the file was found in the HashDB. + */ + boolean isFound() { + return found; + } + + /** + * Returns true if there was an error in the process of finding a file + * in a HashDB. + * + * @return True if there was an error in the process of finding a file + * in a HashDB. + */ + boolean isError() { + return error; + } + } + + /** + * Attempts to find an abstract file in a list of HashDB objects. + * + * @param file The file to find. + * @param totalCount The total cound of files found in this type + * @param totalLookupTime The counter tracking the total amount of run + * time for this operation. + * @param hashSets The HashDB objects to cycle through looking for + * a hash hit. + * @param statusIfFound The FileKnown status to set on the file if the + * file is found in the hashSets. + * @param lookupErrorMessage The function that generates a message should + * there be an error in looking up the file in the + * hashSets. + * + * @return Whether or not the file was found and whether or not there was an + * error during the operation. + */ + private FindInHashsetsResult findInHashsets(AbstractFile file, AtomicLong totalCount, AtomicLong totalLookupTime, + List hashSets, TskData.FileKnown statusIfFound, Function lookupErrorMessage) { + + boolean found = false; + boolean wasError = false; + for (HashDb db : hashSets) { + try { + long lookupstart = System.currentTimeMillis(); + HashHitInfo hashInfo = db.lookupMD5(file); + if (null != hashInfo) { + found = true; + + totalCount.incrementAndGet(); + file.setKnown(statusIfFound); + String hashSetName = db.getDisplayName(); + String comment = generateComment(hashInfo); + if (!createArtifactIfNotExists(hashSetName, file, comment, db)) { + wasError = true; + } + } + long delta = (System.currentTimeMillis() - lookupstart); + totalLookupTime.addAndGet(delta); + + } catch (TskException ex) { + reportLookupError(ex, file, lookupErrorMessage); + wasError = true; + } + } + + return new FindInHashsetsResult(found, wasError); + } + + /** + * Generates a formatted comment. + * + * @param hashInfo The HashHitInfo. + * + * @return The formatted comment. + */ + private String generateComment(HashHitInfo hashInfo) { + String comment = ""; + ArrayList comments = hashInfo.getComments(); + int i = 0; + for (String c : comments) { + if (++i > 1) { + comment += " "; + } + comment += c; + if (comment.length() > MAX_COMMENT_SIZE) { + comment = comment.substring(0, MAX_COMMENT_SIZE) + "..."; + break; + } + } + return comment; + } + + /** + * Creates a BlackboardArtifact if artifact does not already exist. + * + * @param hashSetName The name of the hashset found. + * @param file The file that had a hash hit. + * @param comment The comment to associate with this artifact. + * @param db the database in which this file was found. + * + * @return True if the operation occurred successfully and without error. + */ + private boolean createArtifactIfNotExists(String hashSetName, AbstractFile file, String comment, HashDb db) { + /* + * We have a match. Now create an artifact if it is determined that one + * hasn't been created yet. + */ + List attributesList = new ArrayList<>(); + attributesList.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_SET_NAME, HashLookupModuleFactory.getModuleName(), hashSetName)); + try { + Blackboard tskBlackboard = skCase.getBlackboard(); + if (tskBlackboard.artifactExists(file, BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT, attributesList) == false) { + postHashSetHitToBlackboard(file, file.getMd5Hash(), hashSetName, comment, db.getSendIngestMessages()); + } + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, String.format( + "A problem occurred while checking for existing artifacts for file '%s' (id=%d).", file.getName(), file.getId()), ex); //NON-NLS + services.postMessage(IngestMessage.createErrorMessage( + HashLookupModuleFactory.getModuleName(), + Bundle.HashDbIngestModule_dialogTitle_errorFindingArtifacts(file.getName()), + Bundle.HashDbIngestModule_errorMessage_lookingForFileArtifacts(file.getName()))); + return false; + } + return true; + } + + /** + * Retrieves the md5 hash for a file or generates one if no one exists on + * the file. + * + * @param file The file in order to determine the hash. + * @param totals The timing metrics for this process. + * + * @return The found or determined md5 hash or null if none could be + * determined. + */ + private String getHash(AbstractFile file, IngestJobTotals totals) { + String md5Hash = file.getMd5Hash(); + if (md5Hash != null && md5Hash.isEmpty()) { + return md5Hash; + } + + try { + TimingMetric metric = HealthMonitor.getTimingMetric("Disk Reads: Hash calculation"); + long calcstart = System.currentTimeMillis(); + md5Hash = HashUtility.calculateMd5Hash(file); + if (file.getSize() > 0) { + // Surprisingly, the hash calculation does not seem to be correlated that + // strongly with file size until the files get large. + // Only normalize if the file size is greater than ~1MB. + if (file.getSize() < 1000000) { + HealthMonitor.submitTimingMetric(metric); + } else { + // In testing, this normalization gave reasonable resuls + HealthMonitor.submitNormalizedTimingMetric(metric, file.getSize() / 500000); + } + } + file.setMd5Hash(md5Hash); + long delta = (System.currentTimeMillis() - calcstart); + totals.totalCalctime.addAndGet(delta); + return md5Hash; + } catch (IOException ex) { + logger.log(Level.WARNING, String.format("Error calculating hash of file '%s' (id=%d).", file.getName(), file.getId()), ex); //NON-NLS + services.postMessage(IngestMessage.createErrorMessage( + HashLookupModuleFactory.getModuleName(), + NbBundle.getMessage(this.getClass(), "HashDbIngestModule.fileReadErrorMsg", file.getName()), + NbBundle.getMessage(this.getClass(), "HashDbIngestModule.calcHashValueErr", + file.getParentPath() + file.getName(), + file.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.ALLOC) ? "Allocated File" : "Deleted File"))); + return null; + } + } + /** * Post a hash set hit to the blackboard. * @@ -413,35 +592,35 @@ public class HashDbIngestModule implements FileIngestModule { * @param knownBadHashSets The list of hash sets for "known bad" files. * @param knownHashSets The list of hash sets for "known" files. */ - private static synchronized void postSummary(long jobId, - List knownBadHashSets, List knownHashSets) { + @Messages("HashDbIngestModule.complete.noChangesFound=No Change items found:") + private static synchronized void postSummary(long jobId, List knownBadHashSets, + List noChangeHashSets, List knownHashSets) { + IngestJobTotals jobTotals = getTotalsForIngestJobs(jobId); totalsForIngestJobs.remove(jobId); - if ((!knownBadHashSets.isEmpty()) || (!knownHashSets.isEmpty())) { + if ((!knownBadHashSets.isEmpty()) || (!knownHashSets.isEmpty()) || (!noChangeHashSets.isEmpty())) { StringBuilder detailsSb = new StringBuilder(); //details - detailsSb.append(""); //NON-NLS + detailsSb.append( + "
" + + "" + + "" + + + "" + + "" + + + "\n" + + + "\n
" + NbBundle.getMessage(HashDbIngestModule.class, "HashDbIngestModule.complete.knownBadsFound") + "" + jobTotals.totalKnownBadCount.get() + "
" + Bundle.HashDbIngestModule_complete_noChangesFound() + "" + jobTotals.totalNoChangeCount.get() + "
" + NbBundle.getMessage(HashDbIngestModule.class, "HashDbIngestModule.complete.totalCalcTime") + + "" + jobTotals.totalCalctime.get() + "
" + NbBundle.getMessage(HashDbIngestModule.class, "HashDbIngestModule.complete.totalLookupTime") + + "" + jobTotals.totalLookuptime.get() + "
" + - detailsSb.append("") //NON-NLS - .append(NbBundle.getMessage(HashDbIngestModule.class, "HashDbIngestModule.complete.knownBadsFound")) - .append(""); //NON-NLS - detailsSb.append("").append(jobTotals.totalKnownBadCount.get()).append(""); //NON-NLS - - detailsSb.append("") //NON-NLS - .append(NbBundle.getMessage(HashDbIngestModule.class, "HashDbIngestModule.complete.totalCalcTime")) - .append("").append(jobTotals.totalCalctime.get()).append("\n"); //NON-NLS - detailsSb.append("") //NON-NLS - .append(NbBundle.getMessage(HashDbIngestModule.class, "HashDbIngestModule.complete.totalLookupTime")) - .append("").append(jobTotals.totalLookuptime.get()).append("\n"); //NON-NLS - detailsSb.append(""); //NON-NLS - - detailsSb.append("

") //NON-NLS - .append(NbBundle.getMessage(HashDbIngestModule.class, "HashDbIngestModule.complete.databasesUsed")) - .append("

\n
    "); //NON-NLS - for (HashDb db : knownBadHashSets) { - detailsSb.append("
  • ").append(db.getHashSetName()).append("
  • \n"); //NON-NLS - } + "

    " + NbBundle.getMessage(HashDbIngestModule.class, "HashDbIngestModule.complete.databasesUsed") + "

    \n
      "); //NON-NLS + + Stream.concat(knownBadHashSets.stream(), noChangeHashSets.stream()).forEach((db) -> { + detailsSb.append("
    • " + db.getHashSetName() + "
    • \n"); //NON-NLS + }); detailsSb.append("
    "); //NON-NLS @@ -456,7 +635,7 @@ public class HashDbIngestModule implements FileIngestModule { @Override public void shutDown() { if (refCounter.decrementAndGet(jobId) == 0) { - postSummary(jobId, knownBadHashSets, knownHashSets); + postSummary(jobId, knownBadHashSets, noChangeHashSets, knownHashSets); } } } diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbManager.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbManager.java index c6ae0d5ef4..7559649689 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbManager.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbManager.java @@ -1,15 +1,15 @@ /* * Autopsy Forensic Browser - * + * * Copyright 2011-2018 Basis Technology Corp. * Contact: carrier sleuthkit org - * + * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -23,7 +23,6 @@ import java.beans.PropertyChangeListener; import java.beans.PropertyChangeSupport; import java.io.File; import java.io.IOException; -import java.io.Serializable; import java.util.ArrayList; import java.util.HashSet; import java.util.List; @@ -31,6 +30,7 @@ import java.util.Objects; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.logging.Level; +import java.util.stream.Stream; import javax.swing.JFileChooser; import javax.swing.JOptionPane; import javax.swing.SwingWorker; @@ -58,6 +58,7 @@ import org.sleuthkit.datamodel.SleuthkitJNI; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository; +import org.sleuthkit.autopsy.modules.hashdatabase.HashDbManager.HashDb.KnownFilesType; /** * This class implements a singleton that manages the set of hash databases used @@ -103,8 +104,8 @@ public class HashDbManager implements PropertyChangeListener { public synchronized void removePropertyChangeListener(PropertyChangeListener listener) { changeSupport.removePropertyChangeListener(listener); } - - synchronized boolean verifyAllDatabasesLoadedCorrectly(){ + + synchronized boolean verifyAllDatabasesLoadedCorrectly() { return allDatabasesLoadedCorrectly; } @@ -238,7 +239,7 @@ public class HashDbManager implements PropertyChangeListener { } return hashDb; } - + private SleuthkitHashSet addHashDatabase(int handle, String hashSetName, boolean searchDuringIngest, boolean sendIngestMessages, HashDb.KnownFilesType knownFilesType) throws TskCoreException { // Wrap an object around the handle. SleuthkitHashSet hashDb = new SleuthkitHashSet(handle, hashSetName, searchDuringIngest, sendIngestMessages, knownFilesType); @@ -273,22 +274,22 @@ public class HashDbManager implements PropertyChangeListener { } return hashDb; } - - CentralRepoHashSet addExistingCentralRepoHashSet(String hashSetName, String version, int referenceSetID, - boolean searchDuringIngest, boolean sendIngestMessages, HashDb.KnownFilesType knownFilesType, - boolean readOnly) throws TskCoreException{ - - if(! CentralRepository.isEnabled()){ + + CentralRepoHashSet addExistingCentralRepoHashSet(String hashSetName, String version, int referenceSetID, + boolean searchDuringIngest, boolean sendIngestMessages, HashDb.KnownFilesType knownFilesType, + boolean readOnly) throws TskCoreException { + + if (!CentralRepository.isEnabled()) { throw new TskCoreException("Could not load central repository hash set " + hashSetName + " - central repository is not enabled"); } - + CentralRepoHashSet db = new CentralRepoHashSet(hashSetName, version, referenceSetID, searchDuringIngest, - sendIngestMessages, knownFilesType, readOnly); - - if(! db.isValid()){ + sendIngestMessages, knownFilesType, readOnly); + + if (!db.isValid()) { throw new TskCoreException("Error finding hash set " + hashSetName + " in central repository"); } - + // Add the hash database to the collection hashSets.add(db); @@ -302,8 +303,8 @@ public class HashDbManager implements PropertyChangeListener { NbBundle.getMessage(this.getClass(), "HashDbManager.moduleErrorListeningToUpdatesMsg"), MessageNotifyUtil.MessageType.ERROR); } - return db; - + return db; + } synchronized void indexHashDatabase(SleuthkitHashSet hashDb) { @@ -341,7 +342,7 @@ public class HashDbManager implements PropertyChangeListener { this.removeHashDatabaseNoSave(hashDb); this.save(); } - + public synchronized void removeHashDatabaseNoSave(HashDb hashDb) throws HashDbManagerException { // Don't remove a database if ingest is running boolean ingestIsRunning = IngestManager.getInstance().isIngestRunning(); @@ -357,17 +358,16 @@ public class HashDbManager implements PropertyChangeListener { hashSets.remove(hashDb); // Now undertake the operations that could throw. - // Indexing is only relevanet for sleuthkit hashsets - if(hashDb instanceof SleuthkitHashSet){ - SleuthkitHashSet hashDatabase = (SleuthkitHashSet)hashDb; + if (hashDb instanceof SleuthkitHashSet) { + SleuthkitHashSet hashDatabase = (SleuthkitHashSet) hashDb; try { - if(hashDatabase.hasIndex()){ + if (hashDatabase.hasIndex()) { hashSetPaths.remove(hashDatabase.getIndexPath()); } } catch (TskCoreException ex) { Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error getting index path of " + hashDatabase.getHashSetName() + " hash set when removing the hash set", ex); //NON-NLS - } + } try { if (!hashDatabase.hasIndexOnly()) { @@ -376,7 +376,7 @@ public class HashDbManager implements PropertyChangeListener { } catch (TskCoreException ex) { Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error getting hash set path of " + hashDatabase.getHashSetName() + " hash set when removing the hash set", ex); //NON-NLS } - + try { hashDatabase.close(); } catch (TskCoreException ex) { @@ -405,7 +405,7 @@ public class HashDbManager implements PropertyChangeListener { throw new HashDbManagerException(NbBundle.getMessage(this.getClass(), "HashDbManager.saveErrorExceptionMsg")); } } - + /** * Gets all of the hash databases used to classify files as known or known * bad. Will add any new central repository databases to the list before @@ -414,12 +414,12 @@ public class HashDbManager implements PropertyChangeListener { * @return A list, possibly empty, of hash databases. */ public synchronized List getAllHashSets() { - try{ + try { updateHashSetsFromCentralRepository(); - } catch (TskCoreException ex){ + } catch (TskCoreException ex) { Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error loading central repository hash sets", ex); //NON-NLS } - + List hashDbs = new ArrayList<>(); hashDbs.addAll(this.hashSets); return hashDbs; @@ -432,9 +432,9 @@ public class HashDbManager implements PropertyChangeListener { */ public synchronized List getKnownFileHashSets() { List hashDbs = new ArrayList<>(); - try{ + try { updateHashSetsFromCentralRepository(); - } catch (TskCoreException ex){ + } catch (TskCoreException ex) { Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error loading central repository hash sets", ex); //NON-NLS } this.hashSets.stream().filter((db) -> (db.getKnownFilesType() == HashDb.KnownFilesType.KNOWN)).forEach((db) -> { @@ -450,9 +450,9 @@ public class HashDbManager implements PropertyChangeListener { */ public synchronized List getKnownBadFileHashSets() { List hashDbs = new ArrayList<>(); - try{ + try { updateHashSetsFromCentralRepository(); - } catch (TskCoreException ex){ + } catch (TskCoreException ex) { Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error loading central repository hash sets", ex); //NON-NLS } this.hashSets.stream().filter((db) -> (db.getKnownFilesType() == HashDb.KnownFilesType.KNOWN_BAD)).forEach((db) -> { @@ -472,9 +472,9 @@ public class HashDbManager implements PropertyChangeListener { private List getUpdateableHashSets(List hashDbs) { ArrayList updateableDbs = new ArrayList<>(); - try{ + try { updateHashSetsFromCentralRepository(); - } catch (TskCoreException ex){ + } catch (TskCoreException ex) { Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error loading central repository hash sets", ex); //NON-NLS } for (HashDb db : hashDbs) { @@ -488,34 +488,27 @@ public class HashDbManager implements PropertyChangeListener { } return updateableDbs; } - - private List getCentralRepoHashSetsFromDatabase(){ + + private List getCentralRepoHashSetsFromDatabase() { List crHashSets = new ArrayList<>(); - if(CentralRepository.isEnabled()){ - try{ + if (CentralRepository.isEnabled()) { + try { List crSets = CentralRepository.getInstance().getAllReferenceSets(CentralRepository.getInstance().getCorrelationTypeById(CorrelationAttributeInstance.FILES_TYPE_ID)); - for(CentralRepoFileSet globalSet:crSets){ - + for (CentralRepoFileSet globalSet : crSets) { + // Defaults for fields not stored in the central repository: // searchDuringIngest: false // sendIngestMessages: true if the hash set is notable - boolean sendIngestMessages = convertFileKnown(globalSet.getFileKnownStatus()).equals(HashDb.KnownFilesType.KNOWN_BAD); + boolean sendIngestMessages = KnownFilesType.fromFileKnown(globalSet.getFileKnownStatus()).equals(HashDb.KnownFilesType.KNOWN_BAD); crHashSets.add(new HashDbInfo(globalSet.getSetName(), globalSet.getVersion(), - globalSet.getGlobalSetID(), convertFileKnown(globalSet.getFileKnownStatus()), globalSet.isReadOnly(), false, sendIngestMessages)); - } - } catch (CentralRepoException ex){ + globalSet.getGlobalSetID(), KnownFilesType.fromFileKnown(globalSet.getFileKnownStatus()), globalSet.isReadOnly(), false, sendIngestMessages)); + } + } catch (CentralRepoException ex) { Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error loading central repository hash sets", ex); //NON-NLS } } return crHashSets; } - - private static HashDb.KnownFilesType convertFileKnown(TskData.FileKnown fileKnown){ - if(fileKnown.equals(TskData.FileKnown.BAD)){ - return HashDb.KnownFilesType.KNOWN_BAD; - } - return HashDb.KnownFilesType.KNOWN; - } /** * Restores the last saved hash sets configuration. This supports @@ -531,9 +524,9 @@ public class HashDbManager implements PropertyChangeListener { private void closeHashDatabases(List hashDatabases) { for (HashDb database : hashDatabases) { - if(database instanceof SleuthkitHashSet){ + if (database instanceof SleuthkitHashSet) { try { - ((SleuthkitHashSet)database).close(); + ((SleuthkitHashSet) database).close(); } catch (TskCoreException ex) { Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error closing " + database.getHashSetName() + " hash set", ex); //NON-NLS } @@ -558,13 +551,13 @@ public class HashDbManager implements PropertyChangeListener { * @param settings The settings to configure. */ @Messages({"# {0} - hash set name", "HashDbManager.noDbPath.message=Couldn't get valid hash set path for: {0}", - "HashDbManager.centralRepoLoadError.message=Error loading central repository hash sets"}) + "HashDbManager.centralRepoLoadError.message=Error loading central repository hash sets"}) private void configureSettings(HashLookupSettings settings) { allDatabasesLoadedCorrectly = true; List hashDbInfoList = settings.getHashDbInfo(); for (HashDbInfo hashDbInfo : hashDbInfoList) { try { - if(hashDbInfo.isFileDatabaseType()){ + if (hashDbInfo.isFileDatabaseType()) { String dbPath = this.getValidFilePath(hashDbInfo.getHashSetName(), hashDbInfo.getPath()); if (dbPath != null) { addHashDatabase(SleuthkitJNI.openHashDatabase(dbPath), hashDbInfo.getHashSetName(), hashDbInfo.getSearchDuringIngest(), hashDbInfo.getSendIngestMessages(), hashDbInfo.getKnownFilesType()); @@ -573,10 +566,10 @@ public class HashDbManager implements PropertyChangeListener { allDatabasesLoadedCorrectly = false; } } else { - if(CentralRepository.isEnabled()){ - addExistingCentralRepoHashSet(hashDbInfo.getHashSetName(), hashDbInfo.getVersion(), - hashDbInfo.getReferenceSetID(), - hashDbInfo.getSearchDuringIngest(), hashDbInfo.getSendIngestMessages(), + if (CentralRepository.isEnabled()) { + addExistingCentralRepoHashSet(hashDbInfo.getHashSetName(), hashDbInfo.getVersion(), + hashDbInfo.getReferenceSetID(), + hashDbInfo.getSearchDuringIngest(), hashDbInfo.getSendIngestMessages(), hashDbInfo.getKnownFilesType(), hashDbInfo.isReadOnly()); } } @@ -590,13 +583,13 @@ public class HashDbManager implements PropertyChangeListener { allDatabasesLoadedCorrectly = false; } } - - if(CentralRepository.isEnabled()){ - try{ + + if (CentralRepository.isEnabled()) { + try { updateHashSetsFromCentralRepository(); - } catch (TskCoreException ex){ + } catch (TskCoreException ex) { Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error opening hash set", ex); //NON-NLS - + JOptionPane.showMessageDialog(WindowManager.getDefault().getMainWindow(), Bundle.HashDbManager_centralRepoLoadError_message(), NbBundle.getMessage(this.getClass(), "HashDbManager.openHashDbErr"), @@ -604,14 +597,17 @@ public class HashDbManager implements PropertyChangeListener { allDatabasesLoadedCorrectly = false; } } - - /* NOTE: When RuntimeProperties.coreComponentsAreActive() is "false", - I don't think we should overwrite hash db settings file because we - were unable to load a database. The user should have to fix the issue or - remove the database from settings. Overwiting the settings effectively removes - the database from HashLookupSettings and the user may not know about this - because the dialogs are not being displayed. The next time user starts Autopsy, HashDB - will load without errors and the user may think that the problem was solved.*/ + + /* + * NOTE: When RuntimeProperties.coreComponentsAreActive() is "false", I + * don't think we should overwrite hash db settings file because we were + * unable to load a database. The user should have to fix the issue or + * remove the database from settings. Overwiting the settings + * effectively removes the database from HashLookupSettings and the user + * may not know about this because the dialogs are not being displayed. + * The next time user starts Autopsy, HashDB will load without errors + * and the user may think that the problem was solved. + */ if (!allDatabasesLoadedCorrectly && RuntimeProperties.runningWithGUI()) { try { HashLookupSettings.writeSettings(new HashLookupSettings(HashLookupSettings.convertHashSetList(this.hashSets))); @@ -622,31 +618,31 @@ public class HashDbManager implements PropertyChangeListener { } } } - + private void updateHashSetsFromCentralRepository() throws TskCoreException { - if(CentralRepository.isEnabled()){ + if (CentralRepository.isEnabled()) { List crHashDbInfoList = getCentralRepoHashSetsFromDatabase(); - for(HashDbInfo hashDbInfo : crHashDbInfoList) { - if(hashDbInfoIsNew(hashDbInfo)){ - addExistingCentralRepoHashSet(hashDbInfo.getHashSetName(), hashDbInfo.getVersion(), - hashDbInfo.getReferenceSetID(), - hashDbInfo.getSearchDuringIngest(), hashDbInfo.getSendIngestMessages(), hashDbInfo.getKnownFilesType(), - hashDbInfo.isReadOnly()); + for (HashDbInfo hashDbInfo : crHashDbInfoList) { + if (hashDbInfoIsNew(hashDbInfo)) { + addExistingCentralRepoHashSet(hashDbInfo.getHashSetName(), hashDbInfo.getVersion(), + hashDbInfo.getReferenceSetID(), + hashDbInfo.getSearchDuringIngest(), hashDbInfo.getSendIngestMessages(), hashDbInfo.getKnownFilesType(), + hashDbInfo.isReadOnly()); } } } } - - private boolean hashDbInfoIsNew(HashDbInfo dbInfo){ - for(HashDb db:this.hashSets){ - if(dbInfo.matches(db)){ + + private boolean hashDbInfoIsNew(HashDbInfo dbInfo) { + for (HashDb db : this.hashSets) { + if (dbInfo.matches(db)) { return false; } } return true; } - private String getValidFilePath(String hashSetName, String configuredPath) { + private String getValidFilePath(String hashSetName, String configuredPath) { // Check the configured path. File database = new File(configuredPath); if (database.exists()) { @@ -655,12 +651,12 @@ public class HashDbManager implements PropertyChangeListener { // Give the user an opportunity to find the desired file. String newPath = null; - if (RuntimeProperties.runningWithGUI() && - JOptionPane.showConfirmDialog(WindowManager.getDefault().getMainWindow(), - NbBundle.getMessage(this.getClass(), "HashDbManager.dlgMsg.dbNotFoundAtLoc", - hashSetName, configuredPath), - NbBundle.getMessage(this.getClass(), "HashDbManager.dlgTitle.MissingDb"), - JOptionPane.YES_NO_OPTION) == JOptionPane.YES_OPTION) { + if (RuntimeProperties.runningWithGUI() + && JOptionPane.showConfirmDialog(WindowManager.getDefault().getMainWindow(), + NbBundle.getMessage(this.getClass(), "HashDbManager.dlgMsg.dbNotFoundAtLoc", + hashSetName, configuredPath), + NbBundle.getMessage(this.getClass(), "HashDbManager.dlgTitle.MissingDb"), + JOptionPane.YES_NO_OPTION) == JOptionPane.YES_OPTION) { newPath = searchForFile(); if (null != newPath && !newPath.isEmpty()) { database = new File(newPath); @@ -692,26 +688,89 @@ public class HashDbManager implements PropertyChangeListener { } return filePath; } - + public static abstract class HashDb { - + /** * Indicates how files with hashes stored in a particular hash database * object should be classified. */ + @Messages({ + "HashDbManager.noChange.text=No Change", + "HashDbManager.known.text=Known", + "HashDbManager.knownBad.text=Notable" + }) public enum KnownFilesType { - KNOWN(NbBundle.getMessage(HashDbManager.class, "HashDbManager.known.text")), - KNOWN_BAD(NbBundle.getMessage(HashDbManager.class, "HashDbManager.knownBad.text")); - private final String displayName; + KNOWN(Bundle.HashDbManager_known_text(), TskData.FileKnown.KNOWN, false, false), + KNOWN_BAD(Bundle.HashDbManager_knownBad_text(), TskData.FileKnown.BAD, true, true), + NO_CHANGE(Bundle.HashDbManager_noChange_text(), TskData.FileKnown.UNKNOWN, true, false); - private KnownFilesType(String displayName) { + private final String displayName; + private final TskData.FileKnown fileKnown; + private final boolean allowSendInboxMessages; + private final boolean defaultSendInboxMessages; + + KnownFilesType(String displayName, TskData.FileKnown fileKnown, boolean allowSendInboxMessages, boolean defaultSendInboxMessages) { this.displayName = displayName; + this.fileKnown = fileKnown; + this.allowSendInboxMessages = allowSendInboxMessages; + this.defaultSendInboxMessages = defaultSendInboxMessages; + } + + /** + * Returns whether or not it is allowable to send inbox messages + * with this known files type. + * + * @return Whether or not it is allowable to send inbox messages + * with this known files type. + */ + boolean isInboxMessagesAllowed() { + return allowSendInboxMessages; + } + + /** + * Returns whether or not by default for this type is to send inbox + * messages. + * + * @return Whether or not by default for this type is to send inbox + * messages. + */ + boolean isDefaultInboxMessages() { + return defaultSendInboxMessages; } public String getDisplayName() { return this.displayName; } + + /** + * Retrieves the corresponding TskData.FileKnown enum type that + * relates to this. + * + * @return The corresponding TskData.FileKnown. + */ + TskData.FileKnown getFileKnown() { + return this.fileKnown; + } + + /** + * Converts a TskData.FileKnown to the corresponding KnownFilesType. + * + * @param fileKnown The TskData.FileKnown type. + * + * @return The corresponding KnownFilesType. + */ + static KnownFilesType fromFileKnown(TskData.FileKnown fileKnown) { + if (fileKnown == null) { + return null; + } + + return Stream.of(KnownFilesType.values()) + .filter((type) -> type.getFileKnown() == fileKnown) + .findFirst() + .orElseThrow(() -> new IllegalArgumentException("Unknown TskData.FileKnown type: " + fileKnown)); + } } /** @@ -721,9 +780,9 @@ public class HashDbManager implements PropertyChangeListener { INDEXING_DONE } - + public abstract String getHashSetName(); - + abstract String getDisplayName(); public abstract String getDatabasePath() throws TskCoreException; @@ -731,7 +790,7 @@ public class HashDbManager implements PropertyChangeListener { public abstract HashDb.KnownFilesType getKnownFilesType(); public abstract boolean getSearchDuringIngest(); - + abstract void setSearchDuringIngest(boolean useForIngest); public abstract boolean getSendIngestMessages(); @@ -764,28 +823,30 @@ public class HashDbManager implements PropertyChangeListener { public abstract boolean lookupMD5Quick(Content content) throws TskCoreException; public abstract HashHitInfo lookupMD5(Content content) throws TskCoreException; - + /** - * Returns whether this database can be enabled. - * For file type, this is the same as checking that it has an index + * Returns whether this database can be enabled. For file type, this is + * the same as checking that it has an index + * * @return true if is valid, false otherwise - * @throws TskCoreException + * + * @throws TskCoreException */ abstract boolean isValid() throws TskCoreException; - + public abstract String getIndexPath() throws TskCoreException; - + public abstract boolean hasIndexOnly() throws TskCoreException; - + public abstract void firePropertyChange(String propertyName, Object oldValue, Object newValue); - + public abstract void addPropertyChangeListener(PropertyChangeListener pcl); - + public abstract void removePropertyChangeListener(PropertyChangeListener pcl); - + @Override public abstract String toString(); - + } /** @@ -793,13 +854,13 @@ public class HashDbManager implements PropertyChangeListener { * as known or know bad. */ class SleuthkitHashSet extends HashDb { - + private static final long serialVersionUID = 1L; private final int handle; private final String hashSetName; private boolean searchDuringIngest; private boolean sendIngestMessages; - private final HashDb.KnownFilesType knownFilesType; + private final HashDb.KnownFilesType knownFilesType; private boolean indexing; private final PropertyChangeSupport propertyChangeSupport = new PropertyChangeSupport(this); @@ -813,8 +874,8 @@ public class HashDbManager implements PropertyChangeListener { } /** - * Adds a listener for the events defined in HashDb.Event. - * Listeners are used during indexing. + * Adds a listener for the events defined in HashDb.Event. Listeners are + * used during indexing. * * @param pcl */ @@ -832,8 +893,8 @@ public class HashDbManager implements PropertyChangeListener { public void removePropertyChangeListener(PropertyChangeListener pcl) { propertyChangeSupport.removePropertyChangeListener(pcl); } - - int getHandle(){ + + int getHandle() { return handle; } @@ -841,9 +902,9 @@ public class HashDbManager implements PropertyChangeListener { public String getHashSetName() { return hashSetName; } - + @Override - String getDisplayName(){ + String getDisplayName() { return getHashSetName(); } @@ -851,9 +912,9 @@ public class HashDbManager implements PropertyChangeListener { public String getDatabasePath() throws TskCoreException { return SleuthkitJNI.getHashDatabasePath(handle); } - - public void setIndexing(boolean indexing){ - this.indexing = indexing; + + public void setIndexing(boolean indexing) { + this.indexing = indexing; } @Override @@ -989,12 +1050,14 @@ public class HashDbManager implements PropertyChangeListener { } return result; } - + /** - * Returns whether this database can be enabled. - * For file type, this is the same as checking that it has an index + * Returns whether this database can be enabled. For file type, this is + * the same as checking that it has an index + * * @return true if is valid, false otherwise - * @throws TskCoreException + * + * @throws TskCoreException */ @Override boolean isValid() throws TskCoreException { @@ -1017,21 +1080,20 @@ public class HashDbManager implements PropertyChangeListener { boolean isIndexing() { return indexing; } - + @Override - public void firePropertyChange(String propertyName, Object oldValue, Object newValue){ + public void firePropertyChange(String propertyName, Object oldValue, Object newValue) { this.propertyChangeSupport.firePropertyChange(propertyName, oldValue, newValue); } private void close() throws TskCoreException { SleuthkitJNI.closeHashDatabase(handle); } - + @Override - public String toString(){ + public String toString() { return getHashSetName(); } - @Override public int hashCode() { @@ -1066,13 +1128,13 @@ public class HashDbManager implements PropertyChangeListener { * Instances of this class represent hash databases used to classify files * as known or know bad. */ - class CentralRepoHashSet extends HashDb{ + class CentralRepoHashSet extends HashDb { private static final long serialVersionUID = 1L; private final String hashSetName; private boolean searchDuringIngest; private boolean sendIngestMessages; - private final HashDb.KnownFilesType knownFilesType; + private final HashDb.KnownFilesType knownFilesType; private final int referenceSetID; private final String version; private String orgName; @@ -1080,10 +1142,10 @@ public class HashDbManager implements PropertyChangeListener { private final PropertyChangeSupport propertyChangeSupport = new PropertyChangeSupport(this); @Messages({"HashDbManager.CentralRepoHashDb.orgError=Error loading organization"}) - private CentralRepoHashSet(String hashSetName, String version, int referenceSetID, - boolean useForIngest, boolean sendHitMessages, HashDb.KnownFilesType knownFilesType, + private CentralRepoHashSet(String hashSetName, String version, int referenceSetID, + boolean useForIngest, boolean sendHitMessages, HashDb.KnownFilesType knownFilesType, boolean readOnly) - throws TskCoreException{ + throws TskCoreException { this.hashSetName = hashSetName; this.version = version; this.referenceSetID = referenceSetID; @@ -1091,18 +1153,18 @@ public class HashDbManager implements PropertyChangeListener { this.sendIngestMessages = sendHitMessages; this.knownFilesType = knownFilesType; this.readOnly = readOnly; - - try{ + + try { orgName = CentralRepository.getInstance().getReferenceSetOrganization(referenceSetID).getName(); - } catch (CentralRepoException ex){ + } catch (CentralRepoException ex) { Logger.getLogger(SleuthkitHashSet.class.getName()).log(Level.SEVERE, "Error looking up central repository organization for reference set " + referenceSetID, ex); //NON-NLS orgName = Bundle.HashDbManager_CentralRepoHashDb_orgError(); } } /** - * Adds a listener for the events defined in HashDb.Event. - * Listeners are used during indexing. + * Adds a listener for the events defined in HashDb.Event. Listeners are + * used during indexing. * * @param pcl */ @@ -1120,9 +1182,9 @@ public class HashDbManager implements PropertyChangeListener { public void removePropertyChangeListener(PropertyChangeListener pcl) { propertyChangeSupport.removePropertyChangeListener(pcl); } - + @Override - public boolean hasIndexOnly() throws TskCoreException{ + public boolean hasIndexOnly() throws TskCoreException { return true; } @@ -1130,25 +1192,25 @@ public class HashDbManager implements PropertyChangeListener { public String getHashSetName() { return hashSetName; } - + @Override - public String getDisplayName(){ - if(! getVersion().isEmpty()){ + public String getDisplayName() { + if (!getVersion().isEmpty()) { return getHashSetName() + " " + getVersion() + " (remote)"; } else { return getHashSetName() + " (remote)"; } } - - String getVersion(){ + + String getVersion() { return version; } - - String getOrgName(){ + + String getOrgName() { return orgName; } - - int getReferenceSetID(){ + + int getReferenceSetID() { return referenceSetID; } @@ -1196,7 +1258,7 @@ public class HashDbManager implements PropertyChangeListener { */ @Override public boolean isUpdateable() throws TskCoreException { - return (! readOnly); + return (!readOnly); } /** @@ -1229,18 +1291,13 @@ public class HashDbManager implements PropertyChangeListener { if (content instanceof AbstractFile) { AbstractFile file = (AbstractFile) content; if (null != file.getMd5Hash()) { - TskData.FileKnown type; - if(knownFilesType.equals(HashDb.KnownFilesType.KNOWN_BAD)){ - type = TskData.FileKnown.BAD; - } else { - type = TskData.FileKnown.KNOWN; - } - - try{ + TskData.FileKnown type = knownFilesType.getFileKnown(); + + try { CentralRepoFileInstance fileInstance = new CentralRepoFileInstance(referenceSetID, file.getMd5Hash(), - type, comment); - CentralRepository.getInstance().addReferenceInstance(fileInstance,CentralRepository.getInstance().getCorrelationTypeById(CorrelationAttributeInstance.FILES_TYPE_ID)); - } catch (CentralRepoException | CorrelationAttributeNormalizationException ex){ + type, comment); + CentralRepository.getInstance().addReferenceInstance(fileInstance, CentralRepository.getInstance().getCorrelationTypeById(CorrelationAttributeInstance.FILES_TYPE_ID)); + } catch (CentralRepoException | CorrelationAttributeNormalizationException ex) { throw new TskCoreException("Error adding hashes to " + getDisplayName(), ex); //NON-NLS } } @@ -1257,24 +1314,20 @@ public class HashDbManager implements PropertyChangeListener { @Override public void addHashes(List hashes) throws TskCoreException { Set globalFileInstances = new HashSet<>(); - for(HashEntry hashEntry:hashes){ - TskData.FileKnown type; - if(knownFilesType.equals(HashDb.KnownFilesType.KNOWN_BAD)){ - type = TskData.FileKnown.BAD; - } else { - type = TskData.FileKnown.KNOWN; - } + for (HashEntry hashEntry : hashes) { + TskData.FileKnown type = knownFilesType.getFileKnown(); + try { globalFileInstances.add(new CentralRepoFileInstance(referenceSetID, hashEntry.getMd5Hash(), type, hashEntry.getComment())); - } catch (CentralRepoException | CorrelationAttributeNormalizationException ex){ + } catch (CentralRepoException | CorrelationAttributeNormalizationException ex) { throw new TskCoreException("Error adding hashes to " + getDisplayName(), ex); } } - - try{ - CentralRepository.getInstance().bulkInsertReferenceTypeEntries(globalFileInstances, + + try { + CentralRepository.getInstance().bulkInsertReferenceTypeEntries(globalFileInstances, CentralRepository.getInstance().getCorrelationTypeById(CorrelationAttributeInstance.FILES_TYPE_ID)); - } catch (CentralRepoException ex){ + } catch (CentralRepoException ex) { throw new TskCoreException("Error adding hashes to " + getDisplayName(), ex); } } @@ -1295,9 +1348,9 @@ public class HashDbManager implements PropertyChangeListener { if (content instanceof AbstractFile) { AbstractFile file = (AbstractFile) content; if (null != file.getMd5Hash()) { - try{ + try { return CentralRepository.getInstance().isFileHashInReferenceSet(file.getMd5Hash(), this.referenceSetID); - } catch (CentralRepoException | CorrelationAttributeNormalizationException ex){ + } catch (CentralRepoException | CorrelationAttributeNormalizationException ex) { Logger.getLogger(SleuthkitHashSet.class.getName()).log(Level.SEVERE, "Error performing central reposiotry hash lookup for hash " + file.getMd5Hash() + " in reference set " + referenceSetID, ex); //NON-NLS throw new TskCoreException("Error performing central reposiotry hash lookup", ex); @@ -1324,12 +1377,9 @@ public class HashDbManager implements PropertyChangeListener { if (content instanceof AbstractFile) { AbstractFile file = (AbstractFile) content; if (null != file.getMd5Hash()) { - try{ - if(CentralRepository.getInstance().isFileHashInReferenceSet(file.getMd5Hash(), this.referenceSetID)){ - // Make a bare-bones HashHitInfo for now - result = new HashHitInfo(file.getMd5Hash(), "", ""); - } - } catch (CentralRepoException | CorrelationAttributeNormalizationException ex){ + try { + return CentralRepository.getInstance().lookupHash(file.getMd5Hash(), referenceSetID); + } catch (CentralRepoException | CorrelationAttributeNormalizationException ex) { Logger.getLogger(SleuthkitHashSet.class.getName()).log(Level.SEVERE, "Error performing central reposiotry hash lookup for hash " + file.getMd5Hash() + " in reference set " + referenceSetID, ex); //NON-NLS throw new TskCoreException("Error performing central reposiotry hash lookup", ex); @@ -1338,35 +1388,34 @@ public class HashDbManager implements PropertyChangeListener { } return result; } - + /** * Returns whether this database can be enabled. - * + * * @return true if is valid, false otherwise */ @Override boolean isValid() { - if(! CentralRepository.isEnabled()) { + if (!CentralRepository.isEnabled()) { return false; } - try{ + try { return CentralRepository.getInstance().referenceSetIsValid(this.referenceSetID, this.hashSetName, this.version); - } catch (CentralRepoException ex){ + } catch (CentralRepoException ex) { Logger.getLogger(CentralRepoHashSet.class.getName()).log(Level.SEVERE, "Error validating hash set " + hashSetName, ex); //NON-NLS return false; } } - + @Override - public void firePropertyChange(String propertyName, Object oldValue, Object newValue){ + public void firePropertyChange(String propertyName, Object oldValue, Object newValue) { this.propertyChangeSupport.firePropertyChange(propertyName, oldValue, newValue); } - + @Override - public String toString(){ + public String toString() { return getDisplayName(); } - @Override public int hashCode() { @@ -1398,8 +1447,8 @@ public class HashDbManager implements PropertyChangeListener { } return true; } - } - + } + /** * Worker thread to make an index of a database */ diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupModuleSettingsPanel.form b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupModuleSettingsPanel.form index c0a486fce9..3e2e525560 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupModuleSettingsPanel.form +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupModuleSettingsPanel.form @@ -25,16 +25,12 @@ - + - - - - - + @@ -46,22 +42,25 @@ - + - + - - - - - + - + + + + + + + + @@ -75,7 +74,7 @@ - + @@ -86,20 +85,6 @@ - - - - - - - - - - - - - - @@ -121,39 +106,5 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupModuleSettingsPanel.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupModuleSettingsPanel.java index 6dcd567226..e804e0bea7 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupModuleSettingsPanel.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupModuleSettingsPanel.java @@ -1,15 +1,15 @@ /* * Autopsy Forensic Browser - * + * * Copyright 2011-2018 Basis Technology Corp. * Contact: carrier sleuthkit org - * + * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -27,13 +27,13 @@ import javax.swing.JScrollPane; import javax.swing.JTable; import javax.swing.table.AbstractTableModel; import javax.swing.table.TableColumn; +import org.apache.commons.lang.StringUtils; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettings; import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettingsPanel; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.autopsy.modules.hashdatabase.HashDbManager.HashDb; - /** * Ingest job settings panel for hash lookup file ingest modules. */ @@ -42,10 +42,8 @@ public final class HashLookupModuleSettingsPanel extends IngestModuleIngestJobSe private static final long serialVersionUID = 1L; private final HashDbManager hashDbManager = HashDbManager.getInstance(); - private final List knownHashSetModels = new ArrayList<>(); - private final HashSetsTableModel knownHashSetsTableModel = new HashSetsTableModel(knownHashSetModels); - private final List knownBadHashSetModels = new ArrayList<>(); - private final HashSetsTableModel knownBadHashSetsTableModel = new HashSetsTableModel(knownBadHashSetModels); + private final List hashSetModels = new ArrayList<>(); + private final HashSetsTableModel hashSetsTableModel = new HashSetsTableModel(hashSetModels); HashLookupModuleSettingsPanel(HashLookupModuleSettings settings) { initializeHashSetModels(settings); @@ -54,11 +52,7 @@ public final class HashLookupModuleSettingsPanel extends IngestModuleIngestJobSe } private void initializeHashSetModels(HashLookupModuleSettings settings) { - initializeHashSetModels(settings, validSetsOnly(hashDbManager.getKnownFileHashSets()), knownHashSetModels); - initializeHashSetModels(settings, validSetsOnly(hashDbManager.getKnownBadFileHashSets()), knownBadHashSetModels); - } - - private void initializeHashSetModels(HashLookupModuleSettings settings, List hashDbs, List hashSetModels) { + List hashDbs = validSetsOnly(hashDbManager.getAllHashSets()); hashSetModels.clear(); for (HashDb db : hashDbs) { hashSetModels.add(new HashSetModel(db, settings.isHashSetEnabled(db), isHashDbValid(db))); @@ -66,8 +60,7 @@ public final class HashLookupModuleSettingsPanel extends IngestModuleIngestJobSe } private void customizeComponents(HashLookupModuleSettings settings) { - customizeHashSetsTable(jScrollPane1, knownHashTable, knownHashSetsTableModel); - customizeHashSetsTable(jScrollPane2, knownBadHashTable, knownBadHashSetsTableModel); + customizeHashSetsTable(hashDbsScrollPane, hashTable, hashSetsTableModel); alwaysCalcHashesCheckbox.setSelected(settings.shouldCalculateHashes()); hashDbManager.addPropertyChangeListener(this); alwaysCalcHashesCheckbox.setText("" + org.openide.util.NbBundle.getMessage(HashLookupModuleSettingsPanel.class, "HashLookupModuleSettingsPanel.alwaysCalcHashesCheckbox.text") + ""); // NOI18N NON-NLS @@ -78,7 +71,7 @@ public final class HashLookupModuleSettingsPanel extends IngestModuleIngestJobSe table.setTableHeader(null); table.setRowSelectionAllowed(false); final int width1 = scrollPane.getPreferredSize().width; - knownHashTable.setAutoResizeMode(JTable.AUTO_RESIZE_NEXT_COLUMN); + hashTable.setAutoResizeMode(JTable.AUTO_RESIZE_NEXT_COLUMN); TableColumn column; for (int i = 0; i < table.getColumnCount(); i++) { column = table.getColumnModel().getColumn(i); @@ -103,8 +96,7 @@ public final class HashLookupModuleSettingsPanel extends IngestModuleIngestJobSe public IngestModuleIngestJobSettings getSettings() { List enabledHashSets = new ArrayList<>(); List disabledHashSets = new ArrayList<>(); - addHashSets(knownHashSetModels, enabledHashSets, disabledHashSets); - addHashSets(knownBadHashSetModels, enabledHashSets, disabledHashSets); + addHashSets(hashSetModels, enabledHashSets, disabledHashSets); return new HashLookupModuleSettings(alwaysCalcHashesCheckbox.isSelected(), enabledHashSets, disabledHashSets); } @@ -121,46 +113,41 @@ public final class HashLookupModuleSettingsPanel extends IngestModuleIngestJobSe void update() { updateHashSetModels(); - knownHashSetsTableModel.fireTableDataChanged(); - knownBadHashSetsTableModel.fireTableDataChanged(); + hashSetsTableModel.fireTableDataChanged(); } - private void updateHashSetModels() { - updateHashSetModels(validSetsOnly(hashDbManager.getKnownFileHashSets()), knownHashSetModels); - updateHashSetModels(validSetsOnly(hashDbManager.getKnownBadFileHashSets()), knownBadHashSetModels); - } - - private List validSetsOnly(List hashDbs){ + private List validSetsOnly(List hashDbs) { List validDbs = new ArrayList<>(); - for(HashDb db:hashDbs){ - try{ - if(db.isValid()){ + for (HashDb db : hashDbs) { + try { + if (db.isValid()) { validDbs.add(db); } - } catch (TskCoreException ex){ + } catch (TskCoreException ex) { Logger.getLogger(HashLookupModuleSettingsPanel.class.getName()).log(Level.SEVERE, "Error checking validity for hash set (name = " + db.getHashSetName() + ")", ex); //NON-NLS } } return validDbs; } - void updateHashSetModels(List hashDbs, List hashSetModels) { - + void updateHashSetModels() { + List hashDbs = validSetsOnly(hashDbManager.getAllHashSets()); + List hashDatabases = new ArrayList<>(hashDbs); - + // Update the hash sets and detect deletions. List deletedHashSetModels = new ArrayList<>(); for (HashSetModel model : hashSetModels) { boolean foundDatabase = false; - for(HashDb db : hashDatabases){ - if(model.getDatabase().equals(db)){ + for (HashDb db : hashDatabases) { + if (model.getDatabase().equals(db)) { model.setValid(isHashDbValid(db)); hashDatabases.remove(db); foundDatabase = true; break; } } - if(! foundDatabase){ + if (!foundDatabase) { deletedHashSetModels.add(model); } } @@ -179,8 +166,7 @@ public final class HashLookupModuleSettingsPanel extends IngestModuleIngestJobSe void reset(HashLookupModuleSettings newSettings) { initializeHashSetModels(newSettings); alwaysCalcHashesCheckbox.setSelected(newSettings.shouldCalculateHashes()); - knownHashSetsTableModel.fireTableDataChanged(); - knownBadHashSetsTableModel.fireTableDataChanged(); + hashSetsTableModel.fireTableDataChanged(); } private boolean isHashDbValid(HashDb hashDb) { @@ -204,8 +190,8 @@ public final class HashLookupModuleSettingsPanel extends IngestModuleIngestJobSe this.enabled = enabled; this.valid = valid; } - - HashDb getDatabase(){ + + HashDb getDatabase() { return db; } @@ -213,6 +199,16 @@ public final class HashLookupModuleSettingsPanel extends IngestModuleIngestJobSe return db.getDisplayName(); } + String getFormattedName() { + String knownTypeName = (db != null && db.getKnownFilesType() != null) ? db.getKnownFilesType().getDisplayName() : ""; + if (!StringUtils.isBlank(knownTypeName)) { + knownTypeName = String.format(" (%s)", knownTypeName); + } + + String displayName = db != null ? db.getDisplayName() : ""; + return displayName + knownTypeName; + } + void setEnabled(boolean enabled) { this.enabled = enabled; } @@ -254,7 +250,7 @@ public final class HashLookupModuleSettingsPanel extends IngestModuleIngestJobSe if (columnIndex == 0) { return hashSets.get(rowIndex).isEnabled(); } else { - return hashSets.get(rowIndex).getName(); + return hashSets.get(rowIndex).getFormattedName(); } } @@ -285,26 +281,21 @@ public final class HashLookupModuleSettingsPanel extends IngestModuleIngestJobSe // //GEN-BEGIN:initComponents private void initComponents() { - jScrollPane1 = new javax.swing.JScrollPane(); - knownHashTable = new javax.swing.JTable(); - knownBadHashDbsLabel = new javax.swing.JLabel(); - knownHashDbsLabel = new javax.swing.JLabel(); + hashDbsLabel = new javax.swing.JLabel(); + hashDbsScrollPane = new javax.swing.JScrollPane(); + hashTable = new javax.swing.JTable(); alwaysCalcHashesCheckbox = new javax.swing.JCheckBox(); - jScrollPane2 = new javax.swing.JScrollPane(); - knownBadHashTable = new javax.swing.JTable(); setPreferredSize(new java.awt.Dimension(292, 150)); - jScrollPane1.setBorder(javax.swing.BorderFactory.createEtchedBorder()); + hashDbsLabel.setText(org.openide.util.NbBundle.getMessage(HashLookupModuleSettingsPanel.class, "HashLookupModuleSettingsPanel.hashDbsLabel.text")); // NOI18N - knownHashTable.setBackground(new java.awt.Color(240, 240, 240)); - knownHashTable.setShowHorizontalLines(false); - knownHashTable.setShowVerticalLines(false); - jScrollPane1.setViewportView(knownHashTable); + hashDbsScrollPane.setBorder(javax.swing.BorderFactory.createEtchedBorder()); - knownBadHashDbsLabel.setText(org.openide.util.NbBundle.getMessage(HashLookupModuleSettingsPanel.class, "HashLookupModuleSettingsPanel.knownBadHashDbsLabel.text")); // NOI18N - - knownHashDbsLabel.setText(org.openide.util.NbBundle.getMessage(HashLookupModuleSettingsPanel.class, "HashLookupModuleSettingsPanel.knownHashDbsLabel.text")); // NOI18N + hashTable.setBackground(new java.awt.Color(240, 240, 240)); + hashTable.setShowHorizontalLines(false); + hashTable.setShowVerticalLines(false); + hashDbsScrollPane.setViewportView(hashTable); alwaysCalcHashesCheckbox.setText(org.openide.util.NbBundle.getMessage(HashLookupModuleSettingsPanel.class, "HashLookupModuleSettingsPanel.alwaysCalcHashesCheckbox.text")); // NOI18N alwaysCalcHashesCheckbox.setToolTipText(org.openide.util.NbBundle.getMessage(HashLookupModuleSettingsPanel.class, "HashLookupModuleSettingsPanel.alwaysCalcHashesCheckbox.toolTipText")); // NOI18N @@ -314,21 +305,6 @@ public final class HashLookupModuleSettingsPanel extends IngestModuleIngestJobSe alwaysCalcHashesCheckbox.setVerticalAlignment(javax.swing.SwingConstants.TOP); alwaysCalcHashesCheckbox.setVerticalTextPosition(javax.swing.SwingConstants.TOP); - jScrollPane2.setBorder(javax.swing.BorderFactory.createEtchedBorder()); - - knownBadHashTable.setBackground(new java.awt.Color(240, 240, 240)); - knownBadHashTable.setModel(new javax.swing.table.DefaultTableModel( - new Object [][] { - - }, - new String [] { - - } - )); - knownBadHashTable.setShowHorizontalLines(false); - knownBadHashTable.setShowVerticalLines(false); - jScrollPane2.setViewportView(knownBadHashTable); - javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); this.setLayout(layout); layout.setHorizontalGroup( @@ -337,14 +313,11 @@ public final class HashLookupModuleSettingsPanel extends IngestModuleIngestJobSe .addContainerGap() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() - .addComponent(knownHashDbsLabel) + .addComponent(hashDbsLabel) .addGap(0, 0, Short.MAX_VALUE)) - .addComponent(knownBadHashDbsLabel, javax.swing.GroupLayout.DEFAULT_SIZE, 290, Short.MAX_VALUE) .addGroup(layout.createSequentialGroup() .addGap(10, 10, 10) - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, 0, Short.MAX_VALUE) - .addComponent(jScrollPane2, javax.swing.GroupLayout.PREFERRED_SIZE, 0, Short.MAX_VALUE))) + .addComponent(hashDbsScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, 494, Short.MAX_VALUE)) .addComponent(alwaysCalcHashesCheckbox, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) .addContainerGap()) ); @@ -352,26 +325,19 @@ public final class HashLookupModuleSettingsPanel extends IngestModuleIngestJobSe layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGap(2, 2, 2) - .addComponent(knownHashDbsLabel) + .addComponent(hashDbsLabel) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 29, Short.MAX_VALUE) + .addComponent(hashDbsScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, 207, Short.MAX_VALUE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) - .addComponent(knownBadHashDbsLabel) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addComponent(jScrollPane2, javax.swing.GroupLayout.DEFAULT_SIZE, 29, Short.MAX_VALUE) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(alwaysCalcHashesCheckbox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) - .addGap(0, 0, 0)) + .addContainerGap()) ); }// //GEN-END:initComponents // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JCheckBox alwaysCalcHashesCheckbox; - private javax.swing.JScrollPane jScrollPane1; - private javax.swing.JScrollPane jScrollPane2; - private javax.swing.JLabel knownBadHashDbsLabel; - private javax.swing.JTable knownBadHashTable; - private javax.swing.JLabel knownHashDbsLabel; - private javax.swing.JTable knownHashTable; + private javax.swing.JLabel hashDbsLabel; + private javax.swing.JScrollPane hashDbsScrollPane; + private javax.swing.JTable hashTable; // End of variables declaration//GEN-END:variables } diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupSettingsPanel.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupSettingsPanel.java index 0f1f97ba97..fade3e47c5 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupSettingsPanel.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupSettingsPanel.java @@ -49,7 +49,6 @@ import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.autopsy.ingest.IngestModuleGlobalSettingsPanel; import org.sleuthkit.autopsy.modules.hashdatabase.HashDbManager.SleuthkitHashSet; import org.sleuthkit.autopsy.modules.hashdatabase.HashDbManager.CentralRepoHashSet; -import org.sleuthkit.autopsy.modules.hashdatabase.HashDbManager.HashDb.KnownFilesType; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.autopsy.modules.hashdatabase.HashDbManager.HashDb; import org.sleuthkit.autopsy.modules.hashdatabase.HashDbManager.SetEvt; @@ -95,16 +94,16 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan } } }); - + HashDbManager.getInstance().addPropertyChangeListener(new PropertyChangeListener() { @Override public void propertyChange(PropertyChangeEvent evt) { String propName = evt.getPropertyName(); - if(propName.equals(SetEvt.DB_ADDED.toString()) || - propName.equals(SetEvt.DB_DELETED.toString())) { + if (propName.equals(SetEvt.DB_ADDED.toString()) + || propName.equals(SetEvt.DB_DELETED.toString())) { hashSetTableModel.refreshModel(); } - } + } }); } @@ -282,7 +281,7 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan // Update ingest option components. sendIngestMessagesCheckBox.setSelected(db.getSendIngestMessages()); - sendIngestMessagesCheckBox.setEnabled(!ingestIsRunning && db.getKnownFilesType().equals(KnownFilesType.KNOWN_BAD)); + sendIngestMessagesCheckBox.setEnabled(!ingestIsRunning && db.getKnownFilesType().isInboxMessagesAllowed()); // Update database action buttons. createDatabaseButton.setEnabled(true); diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashSetParser.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashSetParser.java index 8a7a3ae034..85d28231a9 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashSetParser.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashSetParser.java @@ -19,6 +19,7 @@ package org.sleuthkit.autopsy.modules.hashdatabase; import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.datamodel.HashEntry; interface HashSetParser { @@ -26,7 +27,8 @@ interface HashSetParser { * Get the next hash to import * * @return The hash as a string, or null if the end of file was reached - * without error + * without error + * * @throws TskCoreException */ String getNextHash() throws TskCoreException; @@ -50,4 +52,20 @@ interface HashSetParser { * Closes the import file */ void close(); + + /** + * Get the next hash to import as a HashEntry object. + * + * @return A new hash entry for the next item parsed. + * + * @throws TskCoreException + */ + default HashEntry getNextHashEntry() throws TskCoreException { + String next = getNextHash(); + if (next == null) { + return null; + } + + return new HashEntry(null, next, null, null, null); + } } diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java index f25471b0e0..d868b39189 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java @@ -41,6 +41,7 @@ import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository; +import org.sleuthkit.datamodel.HashEntry; /** * Imports a hash set into the central repository and updates a progress dialog @@ -186,7 +187,7 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P * Get the newly created database * * @return the imported database. May be null if an error occurred or - * the user canceled + * the user canceled */ synchronized HashDbManager.CentralRepoHashSet getDatabase() { return newHashDb; @@ -205,7 +206,7 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P * Check if the import was successful or if there was an error. * * @return true if the import process completed without error, false - * otherwise + * otherwise */ boolean getImportSuccess() { return importSuccess.get(); @@ -231,16 +232,11 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P try { // Conver to the FileKnown enum used by EamGlobalSet - TskData.FileKnown knownStatus; - if (knownFilesType.equals(HashDbManager.HashDb.KnownFilesType.KNOWN)) { - knownStatus = TskData.FileKnown.KNOWN; - } else { - knownStatus = TskData.FileKnown.BAD; - } + TskData.FileKnown knownStatus = knownFilesType.getFileKnown(); // Create an empty hashset in the central repository CentralRepository dbManager = CentralRepository.getInstance(); - referenceSetID.set(dbManager.newReferenceSet(new CentralRepoFileSet(orgId, hashSetName, version, knownStatus, + referenceSetID.set(dbManager.newReferenceSet(new CentralRepoFileSet(orgId, hashSetName, version, knownStatus, readOnly, CentralRepository.getInstance().getCorrelationTypeById(CorrelationAttributeInstance.FILES_TYPE_ID)))); // Get the "FILES" content type. This is a database lookup so we @@ -255,14 +251,14 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P return null; } - String newHash = hashSetParser.getNextHash(); + HashEntry newHash = hashSetParser.getNextHashEntry(); if (newHash != null) { CentralRepoFileInstance eamGlobalFileInstance = new CentralRepoFileInstance( referenceSetID.get(), - newHash, + newHash.getMd5Hash(), knownStatus, - ""); + newHash.getComment()); globalInstances.add(eamGlobalFileInstance); diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/KdbHashSetParser.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/KdbHashSetParser.java index 94d2724995..55bd974be4 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/KdbHashSetParser.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/KdbHashSetParser.java @@ -25,6 +25,7 @@ import java.sql.SQLException; import java.sql.Statement; import java.util.logging.Level; import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.datamodel.HashEntry; import org.sleuthkit.datamodel.TskCoreException; /** @@ -68,7 +69,9 @@ public class KdbHashSetParser implements HashSetParser { } // Get the hashes - resultSet = statement.executeQuery("SELECT md5 FROM hashes"); + resultSet = statement.executeQuery("SELECT h.md5 as md5, " + + " (SELECT group_concat(c.comment, ' ') FROM comments c WHERE h.id = c.hash_id) as comment " + + " from hashes h"); // At this point, getNextHash can read each hash from the result set } catch (ClassNotFoundException | SQLException ex) { @@ -77,15 +80,21 @@ public class KdbHashSetParser implements HashSetParser { } + /** * Get the next hash to import * * @return The hash as a string + * * @throws TskCoreException */ @Override public String getNextHash() throws TskCoreException { + return getNextHashEntry().getMd5Hash(); + } + @Override + public HashEntry getNextHashEntry() throws TskCoreException { try { if (resultSet.next()) { byte[] hashBytes = resultSet.getBytes("md5"); @@ -98,13 +107,15 @@ public class KdbHashSetParser implements HashSetParser { throw new TskCoreException("Hash has incorrect length: " + sb.toString()); } + String md5Hash = sb.toString(); + String comment = resultSet.getString("comment"); totalHashesRead++; - return sb.toString(); + return new HashEntry(null, md5Hash, null, null, comment); } else { throw new TskCoreException("Could not read expected number of hashes from hash set " + filename); } } catch (SQLException ex) { - throw new TskCoreException("Error reading hash from result set for hash set " + filename, ex); + throw new TskCoreException("Error opening/reading hash set " + filename, ex); } } diff --git a/Core/src/org/sleuthkit/autopsy/persona/Bundle.properties b/Core/src/org/sleuthkit/autopsy/persona/Bundle.properties new file mode 100644 index 0000000000..59ec5eaeed --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/persona/Bundle.properties @@ -0,0 +1,14 @@ +CTL_OpenPersonas=Personas +CTL_PersonasTopComponentAction=PersonasTopComponent +CTL_PersonasTopComponent=Personas +PersonasTopComponent.searchField.text=John Doe +PersonasTopComponent.searchBtn.text=Search +PersonasTopComponent.searchNameRadio.text=Name +PersonasTopComponent.searchAccountRadio.text=Account +PersonasTopComponent.filterResultsTable.columnModel.title1=Name +PersonasTopComponent.filterResultsTable.columnModel.title0=ID +PersonasTopComponent.resultAccountsLbl.text=Accounts: +PersonasTopComponent.resultAliasesLbl.text=Aliases: +PersonasTopComponent.resultNameLbl.text=Name: +PersonasTopComponent.resultCasesLbl.text=Cases found in: +PersonasTopComponent.resultNameField.text=Johnathan Dough diff --git a/Core/src/org/sleuthkit/autopsy/persona/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/persona/Bundle.properties-MERGED new file mode 100644 index 0000000000..1e9b1769d2 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/persona/Bundle.properties-MERGED @@ -0,0 +1,5 @@ +CTL_OpenPersonas=Personas +CTL_PersonasTopComponentAction=PersonasTopComponent +CTL_PersonasTopComponent=Personas +OpenPersonasAction.displayName=Personas +PTopComponent_Name=Personas diff --git a/Core/src/org/sleuthkit/autopsy/persona/OpenPersonasAction.java b/Core/src/org/sleuthkit/autopsy/persona/OpenPersonasAction.java new file mode 100644 index 0000000000..157b25d1cc --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/persona/OpenPersonasAction.java @@ -0,0 +1,93 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2020 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.persona; + +import javax.swing.JMenuItem; +import org.openide.awt.ActionID; +import org.openide.awt.ActionReference; +import org.openide.awt.ActionReferences; +import org.openide.awt.ActionRegistration; +import org.openide.util.HelpCtx; +import org.openide.util.NbBundle; +import org.openide.util.actions.CallableSystemAction; +import org.openide.windows.TopComponent; +import org.openide.windows.WindowManager; +import org.sleuthkit.autopsy.coreutils.ThreadConfined; + +/** + * An Action that opens the Personas window. + */ + +@ActionID(category = "Tools", id = "org.sleuthkit.autopsy.persona.Personas") +@ActionRegistration(displayName = "#CTL_OpenPersonas", lazy = false) +@ActionReferences(value = { + @ActionReference(path = "Menu/Tools", position = 105) +}) +public final class OpenPersonasAction extends CallableSystemAction { + + private static final long serialVersionUID = 1L; + + private final JMenuItem menuItem; + + + public OpenPersonasAction() { + menuItem = super.getMenuPresenter(); + this.setEnabled(true); + } + + @Override + @ThreadConfined(type = ThreadConfined.ThreadType.AWT) + public void performAction() { + final TopComponent topComponent = WindowManager.getDefault().findTopComponent("PersonasTopComponent"); + if (topComponent != null) { + if (topComponent.isOpened() == false) { + topComponent.open(); + } + topComponent.toFront(); + topComponent.requestActive(); + } + } + + @Override + @NbBundle.Messages("OpenPersonasAction.displayName=Personas") + public String getName() { + return Bundle.OpenPersonasAction_displayName(); + } + + @Override + public HelpCtx getHelpCtx() { + return HelpCtx.DEFAULT_HELP; + } + + @Override + public boolean asynchronous() { + return false; // run on edt + } + + @Override + public void setEnabled(boolean enable) { + super.setEnabled(enable); + menuItem.setEnabled(enable); + } + + @Override + public JMenuItem getMenuPresenter() { + return menuItem; + } +} diff --git a/Core/src/org/sleuthkit/autopsy/persona/PersonasTopComponent.form b/Core/src/org/sleuthkit/autopsy/persona/PersonasTopComponent.form new file mode 100644 index 0000000000..83638e2f02 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/persona/PersonasTopComponent.form @@ -0,0 +1,381 @@ + + + + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + + + + + + + + + + + + +
    +
    + + + + + <ResourceString bundle="org/sleuthkit/autopsy/persona/Bundle.properties" key="PersonasTopComponent.filterResultsTable.columnModel.title0" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, "{key}")"/> + + + + + + + <ResourceString bundle="org/sleuthkit/autopsy/persona/Bundle.properties" key="PersonasTopComponent.filterResultsTable.columnModel.title1" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, "{key}")"/> + + + + + + + + + +
    +
    +
    +
    + + + + + + + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    +
    +
    +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    + + + + diff --git a/Core/src/org/sleuthkit/autopsy/persona/PersonasTopComponent.java b/Core/src/org/sleuthkit/autopsy/persona/PersonasTopComponent.java new file mode 100644 index 0000000000..5631f80a78 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/persona/PersonasTopComponent.java @@ -0,0 +1,308 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2020 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.persona; + +import org.openide.util.NbBundle.Messages; +import org.openide.windows.RetainLocation; +import org.openide.windows.TopComponent; +import org.openide.windows.WindowManager; + +/** + * Top component for the Personas tool + * + */ +@TopComponent.Description(preferredID = "PersonasTopComponent", persistenceType = TopComponent.PERSISTENCE_NEVER) +@TopComponent.Registration(mode = "personas", openAtStartup = false) +@RetainLocation("personas") +@SuppressWarnings("PMD.SingularField") +public final class PersonasTopComponent extends TopComponent { + + @Messages({ + "PTopComponent_Name=Personas" + }) + public PersonasTopComponent() { + initComponents(); + setName(Bundle.PTopComponent_Name()); + } + + @Override + public void componentOpened() { + super.componentOpened(); + WindowManager.getDefault().setTopComponentFloating(this, true); + } + + /** + * This method is called from within the constructor to initialize the form. + * WARNING: Do NOT modify this code. The content of this method is always + * regenerated by the Form Editor. + */ + // //GEN-BEGIN:initComponents + private void initComponents() { + + searchButtonGroup = new javax.swing.ButtonGroup(); + splitPane = new javax.swing.JSplitPane(); + searchPanel = new javax.swing.JPanel(); + searchField = new javax.swing.JTextField(); + searchNameRadio = new javax.swing.JRadioButton(); + searchAccountRadio = new javax.swing.JRadioButton(); + filterResultsPane = new javax.swing.JScrollPane(); + filterResultsTable = new javax.swing.JTable(); + searchBtn = new javax.swing.JButton(); + detailsPanel = new javax.swing.JPanel(); + resultNameLbl = new javax.swing.JLabel(); + resultNameField = new javax.swing.JTextField(); + resultAliasesLbl = new javax.swing.JLabel(); + resultAccountsLbl = new javax.swing.JLabel(); + accountsTablePane = new javax.swing.JScrollPane(); + accountsTable = new javax.swing.JTable(); + resultCasesLbl = new javax.swing.JLabel(); + casesListPane = new javax.swing.JScrollPane(); + casesList = new javax.swing.JList<>(); + aliasesListPane = new javax.swing.JScrollPane(); + aliasesList = new javax.swing.JList<>(); + + setMinimumSize(new java.awt.Dimension(400, 400)); + + searchField.setText(org.openide.util.NbBundle.getMessage(PersonasTopComponent.class, "PersonasTopComponent.searchField.text")); // NOI18N + + searchButtonGroup.add(searchNameRadio); + searchNameRadio.setSelected(true); + org.openide.awt.Mnemonics.setLocalizedText(searchNameRadio, org.openide.util.NbBundle.getMessage(PersonasTopComponent.class, "PersonasTopComponent.searchNameRadio.text")); // NOI18N + searchNameRadio.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + searchNameRadioActionPerformed(evt); + } + }); + + searchButtonGroup.add(searchAccountRadio); + org.openide.awt.Mnemonics.setLocalizedText(searchAccountRadio, org.openide.util.NbBundle.getMessage(PersonasTopComponent.class, "PersonasTopComponent.searchAccountRadio.text")); // NOI18N + + filterResultsTable.setModel(new javax.swing.table.DefaultTableModel( + new Object [][] { + {"0", "Johnathn Dough"}, + {"3", "Joe Schmoe"}, + {"2", "Michael Schmoe"}, + {"1", "Ethan Schmoe"} + }, + new String [] { + "ID", "Name" + } + ) { + Class[] types = new Class [] { + java.lang.String.class, java.lang.String.class + }; + + public Class getColumnClass(int columnIndex) { + return types [columnIndex]; + } + }); + filterResultsPane.setViewportView(filterResultsTable); + if (filterResultsTable.getColumnModel().getColumnCount() > 0) { + filterResultsTable.getColumnModel().getColumn(0).setMaxWidth(25); + filterResultsTable.getColumnModel().getColumn(0).setHeaderValue(org.openide.util.NbBundle.getMessage(PersonasTopComponent.class, "PersonasTopComponent.filterResultsTable.columnModel.title0")); // NOI18N + filterResultsTable.getColumnModel().getColumn(1).setHeaderValue(org.openide.util.NbBundle.getMessage(PersonasTopComponent.class, "PersonasTopComponent.filterResultsTable.columnModel.title1")); // NOI18N + } + + org.openide.awt.Mnemonics.setLocalizedText(searchBtn, org.openide.util.NbBundle.getMessage(PersonasTopComponent.class, "PersonasTopComponent.searchBtn.text")); // NOI18N + + javax.swing.GroupLayout searchPanelLayout = new javax.swing.GroupLayout(searchPanel); + searchPanel.setLayout(searchPanelLayout); + searchPanelLayout.setHorizontalGroup( + searchPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(searchPanelLayout.createSequentialGroup() + .addContainerGap() + .addGroup(searchPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addComponent(filterResultsPane, javax.swing.GroupLayout.PREFERRED_SIZE, 0, Short.MAX_VALUE) + .addGroup(searchPanelLayout.createSequentialGroup() + .addGroup(searchPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addComponent(searchField) + .addGroup(searchPanelLayout.createSequentialGroup() + .addGroup(searchPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(searchPanelLayout.createSequentialGroup() + .addComponent(searchNameRadio) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(searchAccountRadio)) + .addComponent(searchBtn)) + .addGap(0, 25, Short.MAX_VALUE))) + .addContainerGap()))) + ); + searchPanelLayout.setVerticalGroup( + searchPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(searchPanelLayout.createSequentialGroup() + .addContainerGap() + .addComponent(searchField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addGroup(searchPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) + .addComponent(searchNameRadio) + .addComponent(searchAccountRadio)) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(searchBtn) + .addGap(18, 18, 18) + .addComponent(filterResultsPane, javax.swing.GroupLayout.PREFERRED_SIZE, 0, Short.MAX_VALUE) + .addContainerGap()) + ); + + splitPane.setLeftComponent(searchPanel); + + org.openide.awt.Mnemonics.setLocalizedText(resultNameLbl, org.openide.util.NbBundle.getMessage(PersonasTopComponent.class, "PersonasTopComponent.resultNameLbl.text")); // NOI18N + + resultNameField.setEditable(false); + resultNameField.setText(org.openide.util.NbBundle.getMessage(PersonasTopComponent.class, "PersonasTopComponent.resultNameField.text")); // NOI18N + resultNameField.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + resultNameFieldActionPerformed(evt); + } + }); + + org.openide.awt.Mnemonics.setLocalizedText(resultAliasesLbl, org.openide.util.NbBundle.getMessage(PersonasTopComponent.class, "PersonasTopComponent.resultAliasesLbl.text")); // NOI18N + + org.openide.awt.Mnemonics.setLocalizedText(resultAccountsLbl, org.openide.util.NbBundle.getMessage(PersonasTopComponent.class, "PersonasTopComponent.resultAccountsLbl.text")); // NOI18N + + accountsTable.setModel(new javax.swing.table.DefaultTableModel( + new Object [][] { + {"Email", "jdb@yahoo.com"}, + {"Phone", "865-555-5555"}, + {"Twitter", "@jd93.bread"}, + {null, null} + }, + new String [] { + "Type", "Data" + } + ) { + Class[] types = new Class [] { + java.lang.String.class, java.lang.String.class + }; + boolean[] canEdit = new boolean [] { + false, false + }; + + public Class getColumnClass(int columnIndex) { + return types [columnIndex]; + } + + public boolean isCellEditable(int rowIndex, int columnIndex) { + return canEdit [columnIndex]; + } + }); + accountsTablePane.setViewportView(accountsTable); + + org.openide.awt.Mnemonics.setLocalizedText(resultCasesLbl, org.openide.util.NbBundle.getMessage(PersonasTopComponent.class, "PersonasTopComponent.resultCasesLbl.text")); // NOI18N + + casesList.setModel(new javax.swing.AbstractListModel() { + String[] strings = { "Investigation 13", "Scene 5" }; + public int getSize() { return strings.length; } + public String getElementAt(int i) { return strings[i]; } + }); + casesListPane.setViewportView(casesList); + + aliasesList.setModel(new javax.swing.AbstractListModel() { + String[] strings = { "J.D.", "Fred Smidge", "Ethan Roseman" }; + public int getSize() { return strings.length; } + public String getElementAt(int i) { return strings[i]; } + }); + aliasesListPane.setViewportView(aliasesList); + + javax.swing.GroupLayout detailsPanelLayout = new javax.swing.GroupLayout(detailsPanel); + detailsPanel.setLayout(detailsPanelLayout); + detailsPanelLayout.setHorizontalGroup( + detailsPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, detailsPanelLayout.createSequentialGroup() + .addContainerGap() + .addGroup(detailsPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) + .addComponent(accountsTablePane, javax.swing.GroupLayout.PREFERRED_SIZE, 0, Short.MAX_VALUE) + .addComponent(aliasesListPane) + .addComponent(casesListPane) + .addGroup(javax.swing.GroupLayout.Alignment.LEADING, detailsPanelLayout.createSequentialGroup() + .addComponent(resultNameLbl) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(resultNameField, javax.swing.GroupLayout.DEFAULT_SIZE, 447, Short.MAX_VALUE)) + .addComponent(resultAliasesLbl, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) + .addComponent(resultAccountsLbl, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) + .addComponent(resultCasesLbl, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) + .addContainerGap()) + ); + detailsPanelLayout.setVerticalGroup( + detailsPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(detailsPanelLayout.createSequentialGroup() + .addContainerGap() + .addGroup(detailsPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) + .addComponent(resultNameLbl) + .addComponent(resultNameField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addGap(18, 18, 18) + .addComponent(resultAliasesLbl) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(aliasesListPane, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) + .addComponent(resultAccountsLbl) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(accountsTablePane, javax.swing.GroupLayout.PREFERRED_SIZE, 153, javax.swing.GroupLayout.PREFERRED_SIZE) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) + .addComponent(resultCasesLbl) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(casesListPane, javax.swing.GroupLayout.DEFAULT_SIZE, 118, Short.MAX_VALUE) + .addContainerGap()) + ); + + splitPane.setRightComponent(detailsPanel); + + javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); + this.setLayout(layout); + layout.setHorizontalGroup( + layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addComponent(splitPane) + ); + layout.setVerticalGroup( + layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addComponent(splitPane) + ); + }// //GEN-END:initComponents + + private void searchNameRadioActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_searchNameRadioActionPerformed + // TODO add your handling code here: + }//GEN-LAST:event_searchNameRadioActionPerformed + + private void resultNameFieldActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_resultNameFieldActionPerformed + // TODO add your handling code here: + }//GEN-LAST:event_resultNameFieldActionPerformed + + // Variables declaration - do not modify//GEN-BEGIN:variables + private javax.swing.JTable accountsTable; + private javax.swing.JScrollPane accountsTablePane; + private javax.swing.JList aliasesList; + private javax.swing.JScrollPane aliasesListPane; + private javax.swing.JList casesList; + private javax.swing.JScrollPane casesListPane; + private javax.swing.JPanel detailsPanel; + private javax.swing.JScrollPane filterResultsPane; + private javax.swing.JTable filterResultsTable; + private javax.swing.JLabel resultAccountsLbl; + private javax.swing.JLabel resultAliasesLbl; + private javax.swing.JLabel resultCasesLbl; + private javax.swing.JTextField resultNameField; + private javax.swing.JLabel resultNameLbl; + private javax.swing.JRadioButton searchAccountRadio; + private javax.swing.JButton searchBtn; + private javax.swing.ButtonGroup searchButtonGroup; + private javax.swing.JTextField searchField; + private javax.swing.JRadioButton searchNameRadio; + private javax.swing.JPanel searchPanel; + private javax.swing.JSplitPane splitPane; + // End of variables declaration//GEN-END:variables + +} diff --git a/Core/src/org/sleuthkit/autopsy/timeline/EventsModel.java b/Core/src/org/sleuthkit/autopsy/timeline/EventsModel.java index 146bc156ed..f80eef206b 100755 --- a/Core/src/org/sleuthkit/autopsy/timeline/EventsModel.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/EventsModel.java @@ -657,6 +657,17 @@ public final class EventsModel { } return postTagsDeleted(updatedEventIDs); } + + /** + * Updates the events model for a data source added event. + * + * @throws TskCoreException If there is an error reading model data from the + * case database. + */ + synchronized void handleDataSourceAdded() throws TskCoreException { + populateDataSourcesCache(); + invalidateCaches(null); + } /** * Updates the events model for an artifact tag deleted event and publishes @@ -782,7 +793,6 @@ public final class EventsModel { * @throws TskCoreException */ public synchronized void invalidateCaches(Collection updatedEventIDs) throws TskCoreException { - populateDataSourcesCache(); minEventTimeCache.invalidateAll(); maxEventTimeCache.invalidateAll(); idsToEventsCache.invalidateAll(emptyIfNull(updatedEventIDs)); diff --git a/Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java b/Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java index cf8e9c46f7..6b41d849d8 100755 --- a/Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java @@ -783,7 +783,7 @@ public class TimeLineController { break; case DATA_SOURCE_ADDED: future = executor.submit(() -> { - filteredEvents.invalidateCaches(null); + filteredEvents.handleDataSourceAdded(); return null; }); break; diff --git a/Core/test/qa-functional/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepoPersonasTest.java b/Core/test/qa-functional/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepoPersonasTest.java new file mode 100644 index 0000000000..2b1d72534e --- /dev/null +++ b/Core/test/qa-functional/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepoPersonasTest.java @@ -0,0 +1,904 @@ +/* + * Central Repository + * + * Copyright 2020 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.centralrepository.datamodel; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.time.Instant; +import java.util.Collection; +import junit.framework.Assert; +import static junit.framework.Assert.assertTrue; +import junit.framework.TestCase; +import junit.framework.Test; +import org.apache.commons.io.FileUtils; + +import org.netbeans.junit.NbModuleSuite; +import org.openide.util.Exceptions; +import org.sleuthkit.datamodel.Account; +import org.sleuthkit.datamodel.TskData; + + +/** + * + * Tests the Persona API in CentralRepository. + */ +public class CentralRepoPersonasTest extends TestCase { + + private final Path testDirectory = Paths.get(System.getProperty("java.io.tmpdir"), "CentralRepoDatamodelTest"); + + + private static final String CASE_1_UUID = "case1_uuid"; + private static final String CASE_2_UUID = "case2_uuid"; + private static final String CASE_3_UUID = "case3_uuid"; + private static final String CASE_4_UUID = "case4_uuid"; + + private static final String DS1_DEVICEID = "dataSource1_deviceID"; + private static final String DS2_DEVICEID = "dataSource2_deviceID"; + private static final String DS3_DEVICEID = "dataSource3_deviceID"; + private static final String DS4_DEVICEID = "dataSource4_deviceID"; + private static final String DS5_DEVICEID = "dataSource5_deviceID"; + private static final String DS6_DEVICEID = "dataSource6_deviceID"; + + + private static final long CASE_1_DATA_SOURCE_1_ID = 11; + private static final long CASE_1_DATA_SOURCE_2_ID = 12; + private static final long CASE_2_DATA_SOURCE_1_ID = 21; + + private static final long CASE_3_DATA_SOURCE_1_ID = 31; + private static final long CASE_3_DATA_SOURCE_2_ID = 32; + + private static final long CASE_4_DATA_SOURCE_1_ID = 41; + + + private static final String PHONE_NUM_1 = "+1 441-231-2552"; + + + private static final String FACEBOOK_ID_CATDOG = "BalooSherkhan"; + + private static final String DOG_EMAIL_ID = "superpupper@junglebook.com"; + private static final String CAT_WHATSAPP_ID = "111 222 3333"; + private static final String EMAIL_ID_1 = "rkipling@jungle.book"; + + private static final String HOLMES_SKYPE_ID = "live:holmes@221baker.com"; + + + private static final String DOG_PERSONA_NAME = "Baloo McDog"; + private static final String CAT_PERSONA_NAME = "SherKhan"; + private static final String HOLMES_PERSONA_NAME = "Sherlock Holmes"; + + + private CorrelationCase case1; + private CorrelationCase case2; + private CorrelationCase case3; + private CorrelationCase case4; + + private CorrelationDataSource dataSource1fromCase1; + private CorrelationDataSource dataSource2fromCase1; + private CorrelationDataSource dataSource1fromCase2; + + private CorrelationDataSource dataSource1fromCase3; + private CorrelationDataSource dataSource2fromCase3; + private CorrelationDataSource dataSource1fromCase4; + + private CentralRepoOrganization org1; + private CentralRepoOrganization org2; + + private CentralRepoAccount.CentralRepoAccountType phoneAccountType; + private CentralRepoAccount.CentralRepoAccountType emailAccountType; + private CentralRepoAccount.CentralRepoAccountType facebookAccountType; + private CentralRepoAccount.CentralRepoAccountType textnowAccountType; + private CentralRepoAccount.CentralRepoAccountType whatsAppAccountType; + private CentralRepoAccount.CentralRepoAccountType skypeAccountType; + + + private CorrelationAttributeInstance.Type phoneInstanceType; + private CorrelationAttributeInstance.Type emailInstanceType; + private CorrelationAttributeInstance.Type facebookInstanceType; + private CorrelationAttributeInstance.Type textnowInstanceType; + private CorrelationAttributeInstance.Type whatsAppInstanceType; + private CorrelationAttributeInstance.Type skypeInstanceType; + + + // NbModuleSuite requires these tests use Junit 3.8 + // Extension of the TestCase class is how tests were defined and used + // in Junit 3.8 + public static Test suite() { + NbModuleSuite.Configuration conf = NbModuleSuite.createConfiguration(CentralRepoPersonasTest.class). + clusters(".*"). + enableModules(".*"); + return conf.suite(); + } + + // This function is run before every test, NOT before the entire collection of + // tests defined in this class are run. + @Override + public void setUp() throws CentralRepoException, IOException { + // Tear down the previous run, if need be. + if (Files.exists(testDirectory)) { + tearDown(); + } + + // Create the test directory + Files.createDirectory(testDirectory); + + final String CR_DB_NAME = "testcentralrepo.db"; + + + + SqliteCentralRepoSettings sqliteSettings = new SqliteCentralRepoSettings(); + sqliteSettings.setDbName(CR_DB_NAME); + sqliteSettings.setDbDirectory(testDirectory.toString()); + + if (!sqliteSettings.dbDirectoryExists() && !sqliteSettings.createDbDirectory()) { + Assert.fail("Failed to create central repo directory."); + } + + RdbmsCentralRepoFactory factory = new RdbmsCentralRepoFactory(CentralRepoPlatforms.SQLITE, sqliteSettings); + if (!factory.initializeDatabaseSchema() || !factory.insertDefaultDatabaseContent()) { + Assert.fail("Failed to initialize central repo database"); + } + + sqliteSettings.saveSettings(); + CentralRepoDbUtil.setUseCentralRepo(true); + CentralRepoDbManager.saveDbChoice(CentralRepoDbChoice.SQLITE); + + Path crDbFilePath = Paths.get(testDirectory.toString(), CR_DB_NAME); + if (!Files.exists(crDbFilePath)) { + Assert.fail("Failed to create central repo database, should be located at + " + crDbFilePath); + } + + + // Set up some default objects to be used by the tests + try { + case1 = new CorrelationCase(CASE_1_UUID, "case1"); + case1 = CentralRepository.getInstance().newCase(case1); + assertTrue("Failed to create test object case1", case1 != null); + + case2 = new CorrelationCase(CASE_2_UUID, "case2"); + case2 = CentralRepository.getInstance().newCase(case2); + assertTrue("Failed to create test object case2", case2 != null); + + case3 = new CorrelationCase(CASE_3_UUID, "case3"); + case3 = CentralRepository.getInstance().newCase(case3); + assertTrue("Failed to create test object case3", case3 != null); + + case4 = new CorrelationCase(CASE_4_UUID, "case4"); + case4 = CentralRepository.getInstance().newCase(case4); + assertTrue("Failed to create test object case4", case4 != null); + + dataSource1fromCase1 = new CorrelationDataSource(case1, DS1_DEVICEID, "dataSource1", CASE_1_DATA_SOURCE_1_ID, null, null, null); + CentralRepository.getInstance().newDataSource(dataSource1fromCase1); + dataSource1fromCase1 = CentralRepository.getInstance().getDataSource(case1, dataSource1fromCase1.getDataSourceObjectID()); + assertTrue("Failed to create test object dataSource1fromCase1", dataSource1fromCase1 != null); + + dataSource2fromCase1 = new CorrelationDataSource(case1, DS2_DEVICEID, "dataSource2", CASE_1_DATA_SOURCE_2_ID, null, null, null); + CentralRepository.getInstance().newDataSource(dataSource2fromCase1); + dataSource2fromCase1 = CentralRepository.getInstance().getDataSource(case1, dataSource2fromCase1.getDataSourceObjectID()); + assertTrue("Failed to create test object dataSource2fromCase1", dataSource2fromCase1 != null); + + dataSource1fromCase2 = new CorrelationDataSource(case2, DS3_DEVICEID, "dataSource3", CASE_2_DATA_SOURCE_1_ID, null, null, null); + CentralRepository.getInstance().newDataSource(dataSource1fromCase2); + dataSource1fromCase2 = CentralRepository.getInstance().getDataSource(case2, dataSource1fromCase2.getDataSourceObjectID()); + assertTrue("Failed to create test object dataSource1fromCase2", dataSource1fromCase2 != null); + + dataSource1fromCase3 = new CorrelationDataSource(case3, DS4_DEVICEID, "dataSource4", CASE_3_DATA_SOURCE_1_ID, null, null, null); + CentralRepository.getInstance().newDataSource(dataSource1fromCase3); + dataSource1fromCase3 = CentralRepository.getInstance().getDataSource(case3, dataSource1fromCase3.getDataSourceObjectID()); + assertTrue("Failed to create test object dataSource1fromCase3", dataSource1fromCase3 != null); + + dataSource2fromCase3 = new CorrelationDataSource(case3, DS5_DEVICEID, "dataSource5", CASE_3_DATA_SOURCE_2_ID, null, null, null); + CentralRepository.getInstance().newDataSource(dataSource2fromCase3); + dataSource2fromCase3 = CentralRepository.getInstance().getDataSource(case3, dataSource2fromCase3.getDataSourceObjectID()); + assertTrue("Failed to create test object dataSource2fromCase3", dataSource2fromCase3 != null); + + dataSource1fromCase4 = new CorrelationDataSource(case4, DS6_DEVICEID, "dataSource6", CASE_4_DATA_SOURCE_1_ID, null, null, null); + CentralRepository.getInstance().newDataSource(dataSource1fromCase4); + dataSource1fromCase4 = CentralRepository.getInstance().getDataSource(case4, dataSource1fromCase4.getDataSourceObjectID()); + assertTrue("Failed to create test object dataSource1fromCase4", dataSource1fromCase4 != null); + + org1 = new CentralRepoOrganization("org1"); + org1 = CentralRepository.getInstance().newOrganization(org1); + + org2 = new CentralRepoOrganization("org2"); + org2 = CentralRepository.getInstance().newOrganization(org2); + + // get some correltion types for different account types, for later use + phoneAccountType = CentralRepository.getInstance().getAccountTypeByName( Account.Type.PHONE.getTypeName()); + phoneInstanceType = CentralRepository.getInstance().getCorrelationTypeById(phoneAccountType.getCorrelationTypeId()); + assertTrue("getCorrelationTypeById(PHONE) returned null", phoneInstanceType != null); + + emailAccountType = CentralRepository.getInstance().getAccountTypeByName( Account.Type.EMAIL.getTypeName()); + emailInstanceType = CentralRepository.getInstance().getCorrelationTypeById(emailAccountType.getCorrelationTypeId()); + assertTrue("getCorrelationTypeById(EMAIL) returned null", emailInstanceType != null); + + facebookAccountType = CentralRepository.getInstance().getAccountTypeByName( Account.Type.FACEBOOK.getTypeName()); + facebookInstanceType = CentralRepository.getInstance().getCorrelationTypeById(facebookAccountType.getCorrelationTypeId()); + assertTrue("getCorrelationTypeById(FACEBOOK) returned null", facebookInstanceType != null); + + textnowAccountType = CentralRepository.getInstance().getAccountTypeByName( Account.Type.TEXTNOW.getTypeName()); + textnowInstanceType = CentralRepository.getInstance().getCorrelationTypeById(textnowAccountType.getCorrelationTypeId()); + assertTrue("getCorrelationTypeById(TEXTNOW) returned null", textnowInstanceType != null); + + whatsAppAccountType = CentralRepository.getInstance().getAccountTypeByName( Account.Type.WHATSAPP.getTypeName()); + whatsAppInstanceType = CentralRepository.getInstance().getCorrelationTypeById(whatsAppAccountType.getCorrelationTypeId()); + assertTrue("getCorrelationTypeById(WHATSAPP) returned null", whatsAppInstanceType != null); + + skypeAccountType = CentralRepository.getInstance().getAccountTypeByName( Account.Type.SKYPE.getTypeName()); + skypeInstanceType = CentralRepository.getInstance().getCorrelationTypeById(skypeAccountType.getCorrelationTypeId()); + assertTrue("getCorrelationTypeById(SKYPE) returned null", skypeInstanceType != null); + + + } catch (CentralRepoException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex.getMessage()); + } + + } + + // This function is run after every test, NOT after the entire collection of + // tests defined in the class are run. + @Override + public void tearDown() throws CentralRepoException, IOException { + // Close and delete the test case and central repo db + if (CentralRepository.isEnabled()) { + CentralRepository.getInstance().shutdownConnections(); + } + FileUtils.deleteDirectory(testDirectory.toFile()); + } + + + /** + * Basic tests for: + * - Persona creation, + * - adding aliases and metadata + * - add additional accounts to Persona + * - Get Persona(s) by account + * - get Account(s) by Persona + * + */ + public void testBasicPersonaCreation() { + + //final String DATE_FORMAT_STRING = "yyyy/MM/dd HH:mm:ss"; //NON-NLS + //final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat(DATE_FORMAT_STRING, Locale.US); + + try { + + // Step 1: Create an account + CentralRepoAccount phoneAccount1 = CentralRepository.getInstance() + .getOrCreateAccount(phoneAccountType, PHONE_NUM_1); + + + // Step 2: Create a Persona for the Account + + String comment = "The best dog ever"; + Persona.PersonaStatus status = Persona.PersonaStatus.ACTIVE; + PersonaAccount pa1 = Persona.createPersonaForAccount(DOG_PERSONA_NAME, comment , status, phoneAccount1, "Because I said so", Persona.Confidence.LOW ); + + + Persona dogPersona = pa1.getPersona(); + + // Verify Persona name, status etc. + Assert.assertEquals(DOG_PERSONA_NAME, pa1.getPersona().getName()); + Assert.assertEquals(status.name(), dogPersona.getStatus().name()); + Assert.assertTrue(dogPersona.getExaminer().getLoginName().equalsIgnoreCase(pa1.getExaminer().getLoginName())); + + // Assert that the persona was created by the currently logged in user + Assert.assertTrue(dogPersona.getExaminer().getLoginName().equalsIgnoreCase(System.getProperty("user.name"))); + + // Assert that Persona was created within the last 10 mins + Assert.assertTrue(Instant.now().toEpochMilli() - pa1.getDateAdded() < 600 * 1000); + Assert.assertEquals(pa1.getConfidence(), Persona.Confidence.LOW); + + // Step 3. Add Persona Aliases + PersonaAlias alias1 = dogPersona.addAlias("Good Boy", "Coz he's is the best dog ever", Persona.Confidence.MEDIUM); + PersonaAlias alias2 = dogPersona.addAlias("WoofWoof", "How many dumb comments can I come up with?", Persona.Confidence.LOW); + + Assert.assertNotNull(alias1); + Assert.assertNotNull(alias2); + + // get all aliases for persona + Collection aliases = dogPersona.getAliases(); + Assert.assertEquals(2, aliases.size()); + for (PersonaAlias alias: aliases) { + //System.out.println("Alias: "+ alias.getAlias()) ; + Assert.assertFalse(alias.getAlias().isEmpty()); + } + + + //Step 4: Add Persona metadata + PersonaMetadata metadata1 = dogPersona.addMetadata("Color", "Black", "He's got thick black hair.", Persona.Confidence.MEDIUM); + PersonaMetadata metadata2 = dogPersona.addMetadata("Gender", "Male", "Because...", Persona.Confidence.LOW); + + Assert.assertNotNull(metadata1); + Assert.assertNotNull(metadata2); + + // get all metadata for persona + Collection metadataList = dogPersona.getMetadata(); + Assert.assertEquals(2, metadataList.size()); + for (PersonaMetadata md: metadataList) { + //System.out.println(String.format("Metadata: %s : %s", md.getName(), md.getValue())) ; + Assert.assertFalse(md.getName().isEmpty()); + Assert.assertFalse(md.getValue().isEmpty()); + } + + + // Step 5: associate another account with same persona + CentralRepoAccount catdogFBAccount = CentralRepository.getInstance() + .getOrCreateAccount(facebookAccountType, FACEBOOK_ID_CATDOG); + + // Add an account to persona + dogPersona.addAccountToPersona(catdogFBAccount, "Looks like dog, barks like a dog...", Persona.Confidence.MEDIUM); + + // Get all acounts for the persona... + Collection personaAccounts = dogPersona.getPersonaAccounts(); + + Assert.assertEquals(2, personaAccounts.size()); + + for (PersonaAccount pa: personaAccounts) { + //System.out.println(String.format("PersonaAccount: Justification = %s : Date Added = %s", pa.getJustification(), DATE_FORMAT.format(new Date(pa.getDateAdded())))) ; + Assert.assertFalse(pa.getJustification().isEmpty()); + Assert.assertFalse(pa.getAccount().getTypeSpecificId().isEmpty()); + Assert.assertTrue(pa.getDateAdded() > 0); + Assert.assertTrue(pa.getPersona().getCreatedDate()> 0); + } + + // Step 6: Create a Second Persona, that shares a common account with another persona + + String comment2 = "The fiercest cat alive."; + PersonaAccount pa2 = Persona.createPersonaForAccount(CAT_PERSONA_NAME, comment2 , Persona.PersonaStatus.ACTIVE, catdogFBAccount, "Smells like a cat.", Persona.Confidence.LOW ); + Assert.assertNotNull(pa2); + Assert.assertTrue(pa2.getPersona().getName().equalsIgnoreCase(CAT_PERSONA_NAME)); + + + // Get ALL personas for an account + Collection personaAccounts2 = PersonaAccount.getPersonaAccountsForAccount(catdogFBAccount.getAccountId()); + + Assert.assertEquals(2, personaAccounts2.size()); + for (PersonaAccount pa: personaAccounts2) { + //System.out.println(String.format("PersonaAccount: Justification = %s : Date Added = %s", pa.getJustification(), DATE_FORMAT.format(new Date(pa.getDateAdded())))) ; + Assert.assertFalse(pa.getJustification().isEmpty()); + Assert.assertFalse(pa.getAccount().getTypeSpecificId().isEmpty()); + Assert.assertTrue(pa.getDateAdded() > 0); + Assert.assertTrue(pa.getPersona().getCreatedDate()> 0); + Assert.assertFalse(pa.getPersona().getName().isEmpty()); + } + + + } catch (CentralRepoException ex) { + Assert.fail("Didn't expect an exception here. Exception: " + ex); + } + } + + /** + * Tests Personas & X_Accounts and X_instances in the context of Case/data source. + * There are 4 Cases. + * - Case1 has 2 data sources, case2 has 1. + * - Case3 has 2 data sources, case4 has 1. + * There are 3 personas - A Cat, a Dog, and Sherlock Holmes. + * Cat & Dog share a FB account - with 3 instances split over the 2 cases - Case1 & Case2. + * Dog has his his own email account - with one instance in Case1 + * Cat has his own WhatsApp account. - with 2 instances - in Case1 & Case2 + * Sherlock has a Skype account - with 1 instance in Case 3. + * Case 4 has no personas or accounts + */ + public void testPersonaWithCases() { + + try { + // Create an account - Cat and Dog have a shared FB account + CentralRepoAccount catdogFBAccount = CentralRepository.getInstance() + .getOrCreateAccount(facebookAccountType, FACEBOOK_ID_CATDOG); + + + // Create account instance attribute for that account, on Case 1, DS 1 + CorrelationAttributeInstance fbAcctInstance1 = new CorrelationAttributeInstance(facebookInstanceType, FACEBOOK_ID_CATDOG, + -1, + case1, + dataSource1fromCase1, + "path1", + "", + TskData.FileKnown.UNKNOWN, + 1001L, + catdogFBAccount.getAccountId()); + CentralRepository.getInstance().addArtifactInstance(fbAcctInstance1); + + + // Create account instance attribute for that account, on Case 1, DS 2 + CorrelationAttributeInstance fbAcctInstance2 = new CorrelationAttributeInstance(facebookInstanceType, FACEBOOK_ID_CATDOG, + -1, + case1, + dataSource2fromCase1, + "path2", + "", + TskData.FileKnown.UNKNOWN, + 1002L, catdogFBAccount.getAccountId()); + + CentralRepository.getInstance().addArtifactInstance(fbAcctInstance2); + + + // Create account instance attribute for that account, on Case 1, DS 2 + CorrelationAttributeInstance fbAcctInstance3 = new CorrelationAttributeInstance(facebookInstanceType, FACEBOOK_ID_CATDOG, + -1, + case2, + dataSource1fromCase2, + "path3", + "", + TskData.FileKnown.UNKNOWN, + 1003L, catdogFBAccount.getAccountId()); + CentralRepository.getInstance().addArtifactInstance(fbAcctInstance3); + + + // Create Persona for the Dog, using the shared FB account + String comment = "The best dog ever"; + Persona.PersonaStatus status = Persona.PersonaStatus.ACTIVE; + PersonaAccount pa1 = Persona.createPersonaForAccount(DOG_PERSONA_NAME, + comment , + status, catdogFBAccount, "Because I said so", Persona.Confidence.LOW ); + Persona dogPersona = pa1.getPersona(); + + + + // create a second persona for the same account - Cat has the same FB account as dog + String comment2 = "The fiercest cat alive."; + PersonaAccount pa2 = Persona.createPersonaForAccount(CAT_PERSONA_NAME, + comment2 , Persona.PersonaStatus.ACTIVE, + catdogFBAccount, "Smells like a cat.", Persona.Confidence.LOW ); + + Persona catPersona = pa2.getPersona(); + Assert.assertNotNull(pa2); + Assert.assertTrue(pa2.getPersona().getName().equalsIgnoreCase(CAT_PERSONA_NAME)); + + + + // Add a 2nd account to the Dog - dog has his own email + CentralRepoAccount dogEmailAccount = CentralRepository.getInstance() + .getOrCreateAccount(emailAccountType, DOG_EMAIL_ID); + + // Add an instance of dog email + CorrelationAttributeInstance dogEmailAcctInstance = new CorrelationAttributeInstance(emailInstanceType, DOG_EMAIL_ID, + -1, + case1, + dataSource2fromCase1, + "path3", + "", + TskData.FileKnown.UNKNOWN, + 1002L, + dogEmailAccount.getAccountId()); + + CentralRepository.getInstance().addArtifactInstance(dogEmailAcctInstance); + + PersonaAccount pa3 = dogPersona.addAccountToPersona(dogEmailAccount, "Thats definitely a dog email account", Persona.Confidence.MEDIUM); + Assert.assertNotNull(pa3); + Assert.assertTrue(pa3.getPersona().getName().equalsIgnoreCase(DOG_PERSONA_NAME)); + + + // create a WhatsApp account for cat, add 2 instances, and then add that to Cat persona + CentralRepoAccount catWhatsAppAccount = CentralRepository.getInstance() + .getOrCreateAccount(whatsAppAccountType, CAT_WHATSAPP_ID); + + // Add 2 instances of cat whatsApp + CorrelationAttributeInstance catWhatsAppAccountInstance1 = new CorrelationAttributeInstance(whatsAppInstanceType, CAT_WHATSAPP_ID, + -1, + case1, + dataSource1fromCase1, + "path4", + "", + TskData.FileKnown.UNKNOWN, + 1005L, + catWhatsAppAccount.getAccountId()); + CentralRepository.getInstance().addArtifactInstance(catWhatsAppAccountInstance1); + + CorrelationAttributeInstance catWhatsAppAccountInstance2 = new CorrelationAttributeInstance(whatsAppInstanceType, CAT_WHATSAPP_ID, + -1, + case2, + dataSource1fromCase2, + "path5", + "", + TskData.FileKnown.UNKNOWN, + 1006L, + catWhatsAppAccount.getAccountId()); + CentralRepository.getInstance().addArtifactInstance(catWhatsAppAccountInstance2); + + + PersonaAccount pa4 = catPersona.addAccountToPersona(catWhatsAppAccount, "The cat has a WhatsApp account", Persona.Confidence.MEDIUM); + Assert.assertNotNull(pa4); + Assert.assertTrue(pa4.getPersona().getName().equalsIgnoreCase(CAT_PERSONA_NAME)); + + + + Collection dogPersonaAccounts = dogPersona.getPersonaAccounts(); + Assert.assertEquals(2, dogPersonaAccounts.size()); // Dog has 2 accounts. + for (PersonaAccount pa : dogPersonaAccounts) { + Assert.assertTrue(pa.getAccount().getTypeSpecificId().equalsIgnoreCase(FACEBOOK_ID_CATDOG) + || pa.getAccount().getTypeSpecificId().equalsIgnoreCase(DOG_EMAIL_ID)); + // System.out.println("Dog Account id : " + acct.getTypeSpecificId()); + } + + + Collection catPersonaAccounts = catPersona.getPersonaAccounts(); + Assert.assertEquals(2, catPersonaAccounts.size()); // cat has 2 accounts. + for (PersonaAccount pa:catPersonaAccounts) { + //System.out.println("Cat Account id : " + acct.getTypeSpecificId()); + Assert.assertTrue(pa.getAccount().getTypeSpecificId().equalsIgnoreCase(FACEBOOK_ID_CATDOG) + || pa.getAccount().getTypeSpecificId().equalsIgnoreCase(CAT_WHATSAPP_ID)); + } + + // create account and Persona for Sherlock Holmes. + // Create a Skype Account + CentralRepoAccount holmesSkypeAccount = CentralRepository.getInstance() + .getOrCreateAccount(skypeAccountType, HOLMES_SKYPE_ID); + + // Add an instance of Skype account to Case3/DS1 + CorrelationAttributeInstance skypeAcctInstance = new CorrelationAttributeInstance(skypeInstanceType, HOLMES_SKYPE_ID, + -1, + case3, + dataSource1fromCase3, + "path8", + "", + TskData.FileKnown.UNKNOWN, + 1011L, + holmesSkypeAccount.getAccountId()); + CentralRepository.getInstance().addArtifactInstance(skypeAcctInstance); + + + // Create a person for the Skype account + PersonaAccount pa5 = Persona.createPersonaForAccount(HOLMES_PERSONA_NAME, + "Has a Pipe in his mouth." , Persona.PersonaStatus.ACTIVE, + holmesSkypeAccount, "The name says it all.", Persona.Confidence.LOW ); + + Persona holmesPersona = pa5.getPersona(); + Assert.assertNotNull(pa5); + Assert.assertTrue(pa5.getPersona().getName().equalsIgnoreCase(HOLMES_PERSONA_NAME)); + + + + // Test that getting cases for Persona + Collection dogCases = dogPersona.getCases(); + Assert.assertEquals(2, dogCases.size()); // dog appears in 2 cases. + for (CorrelationCase dc: dogCases) { + Assert.assertTrue(dc.getCaseUUID().equalsIgnoreCase(CASE_1_UUID) + || dc.getCaseUUID().equalsIgnoreCase(CASE_2_UUID)); + //System.out.println("Dog Case UUID : " + dc.getCaseUUID()); + } + + Collection catCases = catPersona.getCases(); + Assert.assertEquals(2, catCases.size()); // cat appears in 2 cases. + for (CorrelationCase cc: catCases) { + Assert.assertTrue(cc.getCaseUUID().equalsIgnoreCase(CASE_1_UUID) + || cc.getCaseUUID().equalsIgnoreCase(CASE_2_UUID)); + //System.out.println("Cat Case UUID : " + cc.getCaseUUID()); + } + + Collection holmesCases = holmesPersona.getCases(); + Assert.assertEquals(1, holmesCases.size()); // Holmes appears in 1 case. + for (CorrelationCase hc: holmesCases) { + Assert.assertTrue(hc.getCaseUUID().equalsIgnoreCase(CASE_3_UUID)); + //System.out.println("Holmes Case UUID : " + hc.getCaseUUID()); + } + + + // Test that getting data sources for the Persona - + Collection dogDatasources = dogPersona.getDataSources(); + Assert.assertEquals(3, dogDatasources.size()); // dog appears in 2 cases in 3 data sources. + for (CorrelationDataSource dds: dogDatasources) { + Assert.assertTrue(dds.getDeviceID().equalsIgnoreCase(DS1_DEVICEID) + || dds.getDeviceID().equalsIgnoreCase(DS2_DEVICEID) + || dds.getDeviceID().equalsIgnoreCase(DS3_DEVICEID)); + //System.out.println("Dog DS DeviceID : " + dds.getDeviceID()); + } + + Collection catDatasources = catPersona.getDataSources(); + Assert.assertEquals(3, catDatasources.size()); // cat appears in 2 cases in 3 data sources. + for (CorrelationDataSource cds: catDatasources) { + Assert.assertTrue(cds.getDeviceID().equalsIgnoreCase(DS1_DEVICEID) + || cds.getDeviceID().equalsIgnoreCase(DS2_DEVICEID) + || cds.getDeviceID().equalsIgnoreCase(DS3_DEVICEID)); + //System.out.println("Cat DS DeviceID : " + cds.getDeviceID()); + } + + Collection holmesDatasources = holmesPersona.getDataSources(); + Assert.assertEquals(1, holmesDatasources.size()); // Holmes appears in 1 cases in 1 data source. + for (CorrelationDataSource hds: holmesDatasources) { + Assert.assertTrue(hds.getDeviceID().equalsIgnoreCase(DS4_DEVICEID)); + //System.out.println("Holmes DS DeviceID : " + hds.getDeviceID()); + } + + // Test getting peronas by case. + + // Test that getting all Personas for Case 1 - Case1 has 2 persona - Cat & Dog + Collection case1Persona = Persona.getPersonasForCase(case1); + Assert.assertEquals(2, case1Persona.size()); // + + // Test that getting all Personas for Case 2 - Case2 has 2 persona - Cat & Dog + Collection case2Persona = Persona.getPersonasForCase(case2); + Assert.assertEquals(2, case2Persona.size()); // + + // Test that getting all Personas for Case 3 - Case3 has 1 persona - Holmes + Collection case3Persona = Persona.getPersonasForCase(case3); + Assert.assertEquals(1, case3Persona.size()); // + + // Test that getting all Personas for Case 4 - Case4 has no persona + Collection case4Persona = Persona.getPersonasForCase(case4); + Assert.assertEquals(0, case4Persona.size()); // + + + // Test getting peronas by data source. + + // Test that getting all Personas for DS 1 + Collection ds1Persona = Persona.getPersonasForDataSource(dataSource1fromCase1); + Assert.assertEquals(2, ds1Persona.size()); // + + Collection ds2Persona = Persona.getPersonasForDataSource(dataSource2fromCase1); + Assert.assertEquals(2, ds2Persona.size()); // + + Collection ds3Persona = Persona.getPersonasForDataSource(dataSource1fromCase2); + Assert.assertEquals(2, ds3Persona.size()); // + + Collection ds4Persona = Persona.getPersonasForDataSource(dataSource1fromCase3); + Assert.assertEquals(1, ds4Persona.size()); // + + Collection ds5Persona = Persona.getPersonasForDataSource(dataSource2fromCase3); + Assert.assertEquals(0, ds5Persona.size()); // + + Collection ds6Persona = Persona.getPersonasForDataSource(dataSource1fromCase4); + Assert.assertEquals(0, ds6Persona.size()); // + + + } + catch (CentralRepoException | CorrelationAttributeNormalizationException ex) { + Exceptions.printStackTrace(ex); + Assert.fail(ex.getMessage()); + } + + } + /** + * Tests edge cases, error cases + */ + public void testPersonaCreationEdgeCases() { + + // Test1: create Persona without specifying a name + { + try { + // Create an email account + CentralRepoAccount emailAccount1 = CentralRepository.getInstance() + .getOrCreateAccount(emailAccountType, EMAIL_ID_1); + + // Create a Persona with no name + PersonaAccount pa1 = Persona.createPersonaForAccount(null, "A persona with no name", + Persona.PersonaStatus.ACTIVE, emailAccount1, "The person lost his name", Persona.Confidence.LOW); + + // Verify Persona has a default name + Assert.assertEquals("Unknown", pa1.getPersona().getName()); + + } catch (CentralRepoException ex) { + Assert.fail("No name persona test failed. Exception: " + ex); + } + } + + + } + + /** + * Tests searching of Persona by persona name. + */ + public void testPersonaSearchByName() { + + // Test1: create Personas with similar names. + { + try { + // Create an email account + CentralRepoAccount emailAccount1 = CentralRepository.getInstance() + .getOrCreateAccount(emailAccountType, EMAIL_ID_1); + + // Create all personas with same comment. + final String personaComment = "Creator of Jungle Book."; + + // Create a Persona with name "Rudyard Kipling" + Persona.createPersonaForAccount("Rudyard Kipling", personaComment, + Persona.PersonaStatus.ACTIVE, emailAccount1, "", Persona.Confidence.LOW); + + // Create a Persona with name "Rudy" + Persona.createPersonaForAccount("Rudy", personaComment, + Persona.PersonaStatus.ACTIVE, emailAccount1, "", Persona.Confidence.LOW); + + + // Create a Persona with name "Kipling Senior" + Persona.createPersonaForAccount("Kipling Senior", personaComment, + Persona.PersonaStatus.ACTIVE, emailAccount1, "", Persona.Confidence.LOW); + + // Create a Persona with name "Senor Kipling" + Persona.createPersonaForAccount("Senor Kipling", personaComment, + Persona.PersonaStatus.ACTIVE, emailAccount1, "", Persona.Confidence.LOW); + + + // Test 1 Search "kipling" - expect 3 matches + Collection personaSearchResult = Persona.getPersonaByName("kipling"); + Assert.assertEquals(3, personaSearchResult.size()); + for (Persona p: personaSearchResult) { + Assert.assertTrue(p.getComment().equalsIgnoreCase(personaComment)); + } + + // Search 'Rudy' - expect 2 matches + personaSearchResult = Persona.getPersonaByName("Rudy"); + Assert.assertEquals(2, personaSearchResult.size()); + + + // Search 'Sen' - expect 2 matches + personaSearchResult = Persona.getPersonaByName("Sen"); + Assert.assertEquals(2, personaSearchResult.size()); + + + // Search 'IPL' - expect 3 matches + personaSearchResult = Persona.getPersonaByName("IPL"); + Assert.assertEquals(3, personaSearchResult.size()); + + + // Serach "Rudyard Kipling" - expect 1 match + personaSearchResult = Persona.getPersonaByName("Rudyard Kipling"); + Assert.assertEquals(1, personaSearchResult.size()); + Assert.assertTrue(personaSearchResult.iterator().next().getName().equalsIgnoreCase("Rudyard Kipling")); + + // Search '' - expect ALL (4) to match + personaSearchResult = Persona.getPersonaByName(""); + Assert.assertEquals(4, personaSearchResult.size()); + + + } catch (CentralRepoException ex) { + Assert.fail("No name persona test failed. Exception: " + ex); + } + } + + + } + + + /** + * Tests searching of Persona by account identifier substrings. + */ + public void testPersonaSearchByAccountIdentifier() { + + // Test1: create Personas with similar names. + { + try { + // Create an email account1 + CentralRepoAccount emailAccount1 = CentralRepository.getInstance() + .getOrCreateAccount(emailAccountType, "joeexotic555@yahoo.com"); + + // Create all personas with same comment. + final String personaComment = "Comment used to create a persona"; + + // Create a Persona with name "Joe Exotic" associated with the email address + Persona.createPersonaForAccount("Joe Exotic", personaComment, + Persona.PersonaStatus.ACTIVE, emailAccount1, "", Persona.Confidence.LOW); + + // Create a Persona with name "Tiger King" associated with the email address + Persona.createPersonaForAccount("Tiger King", personaComment, + Persona.PersonaStatus.ACTIVE, emailAccount1, "", Persona.Confidence.LOW); + + + + // Create an phone account with number "+1 999 555 3366" + CentralRepoAccount phoneAccount1 = CentralRepository.getInstance() + .getOrCreateAccount(phoneAccountType, "+1 999 555 3366"); + + + // Create a Persona with name "Carol Baskin" associated + Persona.createPersonaForAccount("Carol Baskin", personaComment, + Persona.PersonaStatus.ACTIVE, phoneAccount1, "", Persona.Confidence.LOW); + + // Create a Persona with no name assoctaed with + Persona.createPersonaForAccount(null, personaComment, + Persona.PersonaStatus.ACTIVE, phoneAccount1, "", Persona.Confidence.LOW); + + + + // Create another email account1 + CentralRepoAccount emailAccount2 = CentralRepository.getInstance() + .getOrCreateAccount(emailAccountType, "jodoe@mail.com"); + + + + // Create a Persona with name "John Doe" associated with the email address + Persona.createPersonaForAccount("John Doe", personaComment, + Persona.PersonaStatus.ACTIVE, emailAccount2, "", Persona.Confidence.LOW); + + Persona.createPersonaForAccount("Joanne Doe", personaComment, + Persona.PersonaStatus.ACTIVE, emailAccount2, "", Persona.Confidence.LOW); + + + + // Test1 Search on 'joe' - should get 2 + Collection personaSearchResult = PersonaAccount.getPersonaAccountsForAccountIdentifier("joe"); + Assert.assertEquals(2, personaSearchResult.size()); + for (PersonaAccount pa: personaSearchResult) { + Assert.assertTrue(pa.getAccount().getTypeSpecificId().contains("joe")); + } + + // Search on 'exotic' - should get 2 + personaSearchResult = PersonaAccount.getPersonaAccountsForAccountIdentifier("exotic"); + Assert.assertEquals(2, personaSearchResult.size()); + for (PersonaAccount pa: personaSearchResult) { + Assert.assertTrue(pa.getAccount().getTypeSpecificId().contains("exotic")); + } + + // Test1 Search on '999' - should get 2 + personaSearchResult = PersonaAccount.getPersonaAccountsForAccountIdentifier("999"); + Assert.assertEquals(2, personaSearchResult.size()); + for (PersonaAccount pa: personaSearchResult) { + Assert.assertTrue(pa.getAccount().getTypeSpecificId().contains("999")); + } + + // Test1 Search on '555' - should get 4 + personaSearchResult = PersonaAccount.getPersonaAccountsForAccountIdentifier("555"); + Assert.assertEquals(4, personaSearchResult.size()); + for (PersonaAccount pa: personaSearchResult) { + Assert.assertTrue(pa.getAccount().getTypeSpecificId().contains("555")); + } + + // Test1 Search on 'doe' - should get 2 + personaSearchResult = PersonaAccount.getPersonaAccountsForAccountIdentifier("doe"); + Assert.assertEquals(2, personaSearchResult.size()); + for (PersonaAccount pa: personaSearchResult) { + Assert.assertTrue(pa.getAccount().getTypeSpecificId().contains("doe")); + } + + // Test1 Search on '@' - should get 4 + personaSearchResult = PersonaAccount.getPersonaAccountsForAccountIdentifier("@"); + Assert.assertEquals(4, personaSearchResult.size()); + for (PersonaAccount pa: personaSearchResult) { + Assert.assertTrue(pa.getAccount().getTypeSpecificId().contains("@")); + } + + // Test1 Search on '' - should get ALL (6) + personaSearchResult = PersonaAccount.getPersonaAccountsForAccountIdentifier(""); + Assert.assertEquals(6, personaSearchResult.size()); + + + } catch (CentralRepoException ex) { + Assert.fail("No name persona test failed. Exception: " + ex); + } + } + + } + + /** + * Tests the getOrInsertExaminer() api. + */ + public void testExaminers() { + + try { + String examinerName = "abcdefg"; + CentralRepoExaminer examiner = CentralRepository.getInstance().getOrInsertExaminer(examinerName); + Assert.assertTrue(examiner.getLoginName().equalsIgnoreCase(examinerName)); + + examinerName = ""; + examiner = CentralRepository.getInstance().getOrInsertExaminer(examinerName); + Assert.assertTrue(examiner.getLoginName().equalsIgnoreCase(examinerName)); + + examinerName = "D'Aboville"; + examiner = CentralRepository.getInstance().getOrInsertExaminer(examinerName); + Assert.assertTrue(examiner.getLoginName().equalsIgnoreCase(examinerName)); + + } catch (CentralRepoException ex) { + Assert.fail("Examiner tests failed. Exception: " + ex); + } + + } +} diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryController.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryController.java index 9d8c8b1269..5ab3b8c84b 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryController.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryController.java @@ -79,6 +79,7 @@ import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.SleuthkitCase; +import org.sleuthkit.datamodel.TagSet; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; @@ -107,6 +108,8 @@ public final class ImageGalleryController { Case.Events.CONTENT_TAG_DELETED, Case.Events.DATA_SOURCE_DELETED ); + + private static final String CATEGORY_TAG_SET_PREFIX = "Project VIC"; /* * There is an image gallery controller per case. It is created during the @@ -228,14 +231,16 @@ public final class ImageGalleryController { void startUp() throws TskCoreException { selectionModel = new FileIDSelectionModel(this); thumbnailCache = new ThumbnailCache(this); + + TagSet categoryTagSet = getCategoryTagSet(); /* * TODO (JIRA-5212): The next two lines need to be executed in this * order. Why? This suggests there is some inappropriate coupling * between the DrawableDB and GroupManager classes. */ groupManager = new GroupManager(this); - drawableDB = DrawableDB.getDrawableDB(this); - categoryManager = new CategoryManager(this); + drawableDB = DrawableDB.getDrawableDB(this, categoryTagSet); + categoryManager = new CategoryManager(this, categoryTagSet); tagsManager = new DrawableTagsManager(this); tagsManager.registerListener(groupManager); tagsManager.registerListener(categoryManager); @@ -720,6 +725,28 @@ public final class ImageGalleryController { private static boolean isDrawableAndNotKnown(AbstractFile abstractFile) throws FileTypeDetector.FileTypeDetectorInitException { return (abstractFile.getKnown() != TskData.FileKnown.KNOWN) && FileTypeUtils.isDrawable(abstractFile); } + + /** + * Returns the TagSet with the image gallery categories. + * + * @return Category TagSet. + * + * @throws TskCoreException + */ + private TagSet getCategoryTagSet() throws TskCoreException { + List tagSetList = getCaseDatabase().getTaggingManager().getTagSets(); + if (tagSetList != null && !tagSetList.isEmpty()) { + for (TagSet set : tagSetList) { + if (set.getName().startsWith(CATEGORY_TAG_SET_PREFIX)) { + return set; + } + } + // If we get to here the Project VIC Test set wasn't found; + throw new TskCoreException("Error loading Project VIC tag set: Tag set not found."); + } else { + throw new TskCoreException("Error loading Project VIC tag set: Tag set not found."); + } + } /** * A listener for ingest module application events. diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/UpdateDrawableFileTask.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/UpdateDrawableFileTask.java index 6ea1e91769..125f5489ca 100755 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/UpdateDrawableFileTask.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/UpdateDrawableFileTask.java @@ -27,7 +27,7 @@ import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.TskCoreException; /** - * A task that updates one drawable file in the drawables database. + * A task that updates one drawable file in the drawable database. */ class UpdateDrawableFileTask extends DrawableDbTask { @@ -60,5 +60,5 @@ class UpdateDrawableFileTask extends DrawableDbTask { Logger.getLogger(UpdateDrawableFileTask.class.getName()).log(Level.SEVERE, "Error in update file task", ex); //NON-NLS } } - + } diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/actions/CategorizeAction.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/actions/CategorizeAction.java index d847a21a58..13703f417b 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/actions/CategorizeAction.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/actions/CategorizeAction.java @@ -19,6 +19,9 @@ package org.sleuthkit.autopsy.imagegallery.actions; import com.google.common.collect.ImmutableMap; +import java.awt.Color; +import java.awt.Graphics2D; +import java.awt.image.BufferedImage; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -27,9 +30,10 @@ import java.util.Set; import java.util.logging.Level; import java.util.stream.Collectors; import javafx.collections.ObservableSet; +import javafx.embed.swing.SwingFXUtils; +import javafx.scene.Node; import javafx.scene.control.Menu; import javafx.scene.control.MenuItem; -import javafx.scene.image.ImageView; import javafx.scene.input.KeyCode; import javafx.scene.input.KeyCodeCombination; import javax.annotation.Nonnull; @@ -41,9 +45,7 @@ import org.openide.util.NbBundle; import org.openide.windows.WindowManager; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.imagegallery.ImageGalleryController; -import org.sleuthkit.autopsy.datamodel.DhsImageCategory; import org.sleuthkit.autopsy.imagegallery.DrawableDbTask; -import org.sleuthkit.autopsy.imagegallery.datamodel.CategoryManager; import org.sleuthkit.autopsy.imagegallery.datamodel.DrawableAttribute; import org.sleuthkit.autopsy.imagegallery.datamodel.DrawableFile; import org.sleuthkit.autopsy.imagegallery.datamodel.DrawableTagsManager; @@ -51,6 +53,7 @@ import org.sleuthkit.datamodel.ContentTag; import org.sleuthkit.datamodel.Tag; import org.sleuthkit.datamodel.TagName; import org.sleuthkit.datamodel.TskCoreException; +import javafx.scene.image.ImageView; /** * An action that associates a drawable file with a Project Vic category. @@ -62,24 +65,24 @@ public class CategorizeAction extends Action { private final ImageGalleryController controller; private final UndoRedoManager undoManager; - private final DhsImageCategory cat; private final Set selectedFileIDs; private final Boolean createUndo; + private final TagName tagName; - public CategorizeAction(ImageGalleryController controller, DhsImageCategory cat, Set selectedFileIDs) { - this(controller, cat, selectedFileIDs, true); + public CategorizeAction(ImageGalleryController controller, TagName tagName, Set selectedFileIDs) { + this(controller, tagName, selectedFileIDs, true); } - private CategorizeAction(ImageGalleryController controller, DhsImageCategory cat, Set selectedFileIDs, Boolean createUndo) { - super(cat.getDisplayName()); + private CategorizeAction(ImageGalleryController controller, TagName tagName, Set selectedFileIDs, Boolean createUndo) { + super(tagName.getDisplayName()); this.controller = controller; this.undoManager = controller.getUndoManager(); - this.cat = cat; this.selectedFileIDs = selectedFileIDs; this.createUndo = createUndo; - setGraphic(cat.getGraphic()); + this.tagName = tagName; + setGraphic(getGraphic(tagName)); setEventHandler(actionEvent -> addCatToFiles(selectedFileIDs)); - setAccelerator(new KeyCodeCombination(KeyCode.getKeyCode(Integer.toString(cat.getCategoryNumber())))); + setAccelerator(new KeyCodeCombination(KeyCode.getKeyCode(getCategoryNumberFromTagName(tagName)))); } static public Menu getCategoriesMenu(ImageGalleryController controller) { @@ -87,8 +90,18 @@ public class CategorizeAction extends Action { } final void addCatToFiles(Set ids) { - Logger.getAnonymousLogger().log(Level.INFO, "categorizing{0} as {1}", new Object[]{ids.toString(), cat.getDisplayName()}); //NON-NLS - controller.queueDBTask(new CategorizeDrawableFileTask(ids, cat, createUndo)); + Logger.getAnonymousLogger().log(Level.INFO, "categorizing{0} as {1}", new Object[]{ids.toString(), tagName.getDisplayName()}); //NON-NLS + controller.queueDBTask(new CategorizeDrawableFileTask(ids, tagName, createUndo)); + } + + private String getCategoryNumberFromTagName(TagName tagName) { + String displayName = tagName.getDisplayName(); + if (displayName.contains("CAT")) { + String[] split = displayName.split(":"); + split = split[0].split("-"); + return split[1]; + } + return ""; } /** @@ -104,8 +117,8 @@ public class CategorizeAction extends Action { // Each category get an item in the sub-menu. Selecting one of these menu items adds // a tag with the associated category. - for (final DhsImageCategory cat : DhsImageCategory.values()) { - MenuItem categoryItem = ActionUtils.createMenuItem(new CategorizeAction(controller, cat, selected)); + for (TagName tagName : controller.getCategoryManager().getCategories()) { + MenuItem categoryItem = ActionUtils.createMenuItem(new CategorizeAction(controller, tagName, selected)); getItems().add(categoryItem); } } @@ -124,54 +137,39 @@ public class CategorizeAction extends Action { final Set fileIDs; final boolean createUndo; - final DhsImageCategory cat; + final TagName catTagName; - CategorizeDrawableFileTask(Set fileIDs, @Nonnull DhsImageCategory cat, boolean createUndo) { + CategorizeDrawableFileTask(Set fileIDs, @Nonnull TagName catTagName, boolean createUndo) { super(); this.fileIDs = fileIDs; - java.util.Objects.requireNonNull(cat); - this.cat = cat; + java.util.Objects.requireNonNull(catTagName); + this.catTagName = catTagName; this.createUndo = createUndo; } @Override public void run() { final DrawableTagsManager tagsManager = controller.getTagsManager(); - final CategoryManager categoryManager = controller.getCategoryManager(); - Map oldCats = new HashMap<>(); - TagName tagName = categoryManager.getTagName(cat); + Map oldCats = new HashMap<>(); for (long fileID : fileIDs) { try { DrawableFile file = controller.getFileFromID(fileID); //drawable db access if (createUndo) { - DhsImageCategory oldCat = file.getCategory(); //drawable db access - TagName oldCatTagName = categoryManager.getTagName(oldCat); - if (false == tagName.equals(oldCatTagName)) { - oldCats.put(fileID, oldCat); + TagName oldCatTagName = file.getCategory(); //drawable db access + if (false == catTagName.equals(oldCatTagName)) { + oldCats.put(fileID, oldCatTagName); } } final List fileTags = tagsManager.getContentTags(file); - if (tagName == categoryManager.getTagName(DhsImageCategory.ZERO)) { - // delete all cat tags for cat-0 - fileTags.stream() - .filter(tag -> CategoryManager.isCategoryTagName(tag.getName())) - .forEach((ct) -> { - try { - tagsManager.deleteContentTag(ct); - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error removing old categories result", ex); //NON-NLS - } - }); - } else { - //add cat tag if no existing cat tag for that cat - if (fileTags.stream() - .map(Tag::getName) - .filter(tagName::equals) - .collect(Collectors.toList()).isEmpty()) { - tagsManager.addContentTag(file, tagName, ""); - } + + if (fileTags.stream() + .map(Tag::getName) + .filter(tagName::equals) + .collect(Collectors.toList()).isEmpty()) { + tagsManager.addContentTag(file, tagName, ""); } + } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error categorizing result", ex); //NON-NLS JOptionPane.showMessageDialog(WindowManager.getDefault().getMainWindow(), @@ -183,7 +181,7 @@ public class CategorizeAction extends Action { } if (createUndo && oldCats.isEmpty() == false) { - undoManager.addToUndo(new CategorizationChange(controller, cat, oldCats)); + undoManager.addToUndo(new CategorizationChange(controller, catTagName, oldCats)); } } } @@ -194,14 +192,14 @@ public class CategorizeAction extends Action { @Immutable private final class CategorizationChange implements UndoRedoManager.UndoableCommand { - private final DhsImageCategory newCategory; - private final ImmutableMap oldCategories; + private final TagName newTagNameCategory; + private final ImmutableMap oldTagNameCategories; private final ImageGalleryController controller; - CategorizationChange(ImageGalleryController controller, DhsImageCategory newCategory, Map oldCategories) { + CategorizationChange(ImageGalleryController controller, TagName newTagNameCategory, Map oldTagNameCategories) { this.controller = controller; - this.newCategory = newCategory; - this.oldCategories = ImmutableMap.copyOf(oldCategories); + this.newTagNameCategory = newTagNameCategory; + this.oldTagNameCategories = ImmutableMap.copyOf(oldTagNameCategories); } /** @@ -210,7 +208,7 @@ public class CategorizeAction extends Action { */ @Override public void run() { - new CategorizeAction(controller, newCategory, this.oldCategories.keySet(), false) + new CategorizeAction(controller, newTagNameCategory, this.oldTagNameCategories.keySet(), false) .handle(null); } @@ -221,10 +219,42 @@ public class CategorizeAction extends Action { @Override public void undo() { - for (Map.Entry entry : oldCategories.entrySet()) { + for (Map.Entry entry : oldTagNameCategories.entrySet()) { new CategorizeAction(controller, entry.getValue(), Collections.singleton(entry.getKey()), false) .handle(null); } } } + + /** + * Create an BufferedImage to use as the icon for the given TagName. + * + * @param tagName The category TagName. + * + * @return TagName Icon BufferedImage. + */ + private BufferedImage getImageForTagName(TagName tagName) { + BufferedImage off_image = new BufferedImage(16, 16, BufferedImage.TYPE_INT_ARGB); + Graphics2D g2 = off_image.createGraphics(); + + g2.setColor(java.awt.Color.decode(tagName.getColor().getRgbValue())); + g2.fillRect(0, 0, 16, 16); + + g2.setColor(Color.BLACK); + g2.drawRect(0, 0, 16, 16); + return off_image; + } + + /** + * Returns a Node which is a ImageView of the icon for the given TagName. + * + * @param tagname + * + * @return Node for use as the TagName menu item graphic. + */ + private Node getGraphic(TagName tagname) { + BufferedImage buff_image = getImageForTagName(tagname); + return new ImageView(SwingFXUtils.toFXImage(buff_image, null)); + } + } diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/actions/CategorizeGroupAction.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/actions/CategorizeGroupAction.java index b49e1ca30c..20e8cb2081 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/actions/CategorizeGroupAction.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/actions/CategorizeGroupAction.java @@ -25,6 +25,7 @@ import java.util.logging.Level; import javafx.collections.ObservableList; import javafx.geometry.Orientation; import javafx.geometry.VPos; +import javafx.scene.Node; import javafx.scene.control.Alert; import javafx.scene.control.ButtonBar; import javafx.scene.control.ButtonType; @@ -37,9 +38,9 @@ import javafx.scene.layout.VBox; import static org.apache.commons.lang.ObjectUtils.notEqual; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.datamodel.DhsImageCategory; import org.sleuthkit.autopsy.imagegallery.ImageGalleryController; import org.sleuthkit.autopsy.imagegallery.ImageGalleryPreferences; +import org.sleuthkit.datamodel.TagName; import org.sleuthkit.datamodel.TskCoreException; /** @@ -50,7 +51,7 @@ public class CategorizeGroupAction extends CategorizeAction { private final static Logger LOGGER = Logger.getLogger(CategorizeGroupAction.class.getName()); - public CategorizeGroupAction(DhsImageCategory newCat, ImageGalleryController controller) { + public CategorizeGroupAction(TagName newCat, ImageGalleryController controller) { super(controller, newCat, null); setEventHandler(actionEvent -> { controller.getViewState().getGroup().ifPresent(group -> { @@ -60,12 +61,12 @@ public class CategorizeGroupAction extends CategorizeAction { //if they have preveiously disabled the warning, just go ahead and apply categories. addCatToFiles(ImmutableSet.copyOf(fileIDs)); } else { - final Map catCountMap = new HashMap<>(); + final Map catCountMap = new HashMap<>(); for (Long fileID : fileIDs) { try { - DhsImageCategory category = controller.getFileFromID(fileID).getCategory(); - if (false == DhsImageCategory.ZERO.equals(category) && newCat.equals(category) == false) { + TagName category = controller.getFileFromID(fileID).getCategory(); + if (category != null && newCat.equals(category) == false) { catCountMap.merge(category, 1L, Long::sum); } } catch (TskCoreException ex) { @@ -90,18 +91,19 @@ public class CategorizeGroupAction extends CategorizeAction { "CategorizeGroupAction.fileCountMessage={0} with {1}", "CategorizeGroupAction.dontShowAgain=Don't show this message again", "CategorizeGroupAction.fileCountHeader=Files in the following categories will have their categories overwritten: "}) - private void showConfirmationDialog(final Map catCountMap, DhsImageCategory newCat, ObservableList fileIDs) { + private void showConfirmationDialog(final Map catCountMap, TagName newCat, ObservableList fileIDs) { ButtonType categorizeButtonType = new ButtonType(Bundle.CategorizeGroupAction_OverwriteButton_text(), ButtonBar.ButtonData.APPLY); VBox textFlow = new VBox(); - for (Map.Entry entry : catCountMap.entrySet()) { - if (entry.getValue() > 0 - && notEqual(entry.getKey(), newCat)) { + for (Map.Entry entry : catCountMap.entrySet()) { + + if (entry != null && entry.getValue() > 0 + && notEqual(entry.getKey(), newCat)) { Label label = new Label(Bundle.CategorizeGroupAction_fileCountMessage(entry.getValue(), entry.getKey().getDisplayName()), - entry.getKey().getGraphic()); + getGraphic(entry.getKey())); label.setContentDisplay(ContentDisplay.RIGHT); textFlow.getChildren().add(label); } @@ -127,4 +129,8 @@ public class CategorizeGroupAction extends CategorizeAction { } }); } + + public Node getGraphic(TagName tagName) { + return null; + } } diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/actions/CategorizeSelectedFilesAction.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/actions/CategorizeSelectedFilesAction.java index bb8cd9de96..742ac300c1 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/actions/CategorizeSelectedFilesAction.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/actions/CategorizeSelectedFilesAction.java @@ -19,14 +19,14 @@ package org.sleuthkit.autopsy.imagegallery.actions; import org.sleuthkit.autopsy.imagegallery.ImageGalleryController; -import org.sleuthkit.autopsy.datamodel.DhsImageCategory; +import org.sleuthkit.datamodel.TagName; /** * */ public class CategorizeSelectedFilesAction extends CategorizeAction { - public CategorizeSelectedFilesAction(DhsImageCategory cat, ImageGalleryController controller) { + public CategorizeSelectedFilesAction(TagName cat, ImageGalleryController controller) { super(controller, cat, null); setEventHandler(actionEvent -> addCatToFiles(controller.getSelectionModel().getSelected()) diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/CategoryManager.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/CategoryManager.java index 2fb97b6e95..5eaf8b4ad7 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/CategoryManager.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/CategoryManager.java @@ -24,8 +24,11 @@ import com.google.common.collect.ImmutableSet; import com.google.common.eventbus.AsyncEventBus; import com.google.common.eventbus.EventBus; import com.google.common.eventbus.Subscribe; +import java.util.ArrayList; import java.util.Collection; import java.util.Collections; +import java.util.Comparator; +import java.util.List; import java.util.concurrent.Executors; import java.util.concurrent.atomic.LongAdder; import java.util.logging.Level; @@ -33,11 +36,13 @@ import javax.annotation.concurrent.Immutable; import org.apache.commons.lang3.concurrent.BasicThreadFactory; import org.sleuthkit.autopsy.casemodule.events.ContentTagAddedEvent; import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent; +import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent.DeletedContentTagInfo; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.datamodel.DhsImageCategory; import org.sleuthkit.autopsy.imagegallery.ImageGalleryController; import org.sleuthkit.datamodel.ContentTag; import org.sleuthkit.datamodel.TagName; +import org.sleuthkit.datamodel.TagSet; import org.sleuthkit.datamodel.TskCoreException; /** @@ -56,8 +61,6 @@ public class CategoryManager { private static final Logger LOGGER = Logger.getLogger(CategoryManager.class.getName()); - private final ImageGalleryController controller; - /** * the DrawableDB that backs the category counts cache. The counts are * initialized from this, and the counting of CAT-0 is always delegated to @@ -65,6 +68,8 @@ public class CategoryManager { */ private final DrawableDB drawableDb; + private final TagSet categoryTagSet; + /** * Used to distribute CategoryChangeEvents */ @@ -79,32 +84,20 @@ public class CategoryManager { * the count related methods go through this cache, which loads initial * values from the database if needed. */ - private final LoadingCache categoryCounts + private final LoadingCache categoryCounts = CacheBuilder.newBuilder().build(CacheLoader.from(this::getCategoryCountHelper)); - /** - * cached TagNames corresponding to Categories, looked up from - * autopsyTagManager at initial request or if invalidated by case change. - */ - private final LoadingCache catTagNameMap - = CacheBuilder.newBuilder().build(new CacheLoader() { - @Override - public TagName load(DhsImageCategory cat) throws TskCoreException { - return getController().getTagsManager().getTagName(cat); - } - }); - public CategoryManager(ImageGalleryController controller) { - this.controller = controller; + public CategoryManager(ImageGalleryController controller, TagSet categoryTagSet) throws TskCoreException { this.drawableDb = controller.getDrawablesDatabase(); + this.categoryTagSet = categoryTagSet; } - private ImageGalleryController getController() { - return controller; + public List getCategories() { + return Collections.unmodifiableList(getSortedTagNames(categoryTagSet.getTagNames())); } synchronized public void invalidateCaches() { categoryCounts.invalidateAll(); - catTagNameMap.invalidateAll(); fireChange(Collections.emptyList(), null); } @@ -115,16 +108,8 @@ public class CategoryManager { * * @return the number of files with the given Category */ - synchronized public long getCategoryCount(DhsImageCategory cat) { - if (cat == DhsImageCategory.ZERO) { - // Keeping track of the uncategorized files is a bit tricky while ingest - // is going on, so always use the list of file IDs we already have along with the - // other category counts instead of trying to track it separately. - long allOtherCatCount = getCategoryCount(DhsImageCategory.ONE) + getCategoryCount(DhsImageCategory.TWO) + getCategoryCount(DhsImageCategory.THREE) + getCategoryCount(DhsImageCategory.FOUR) + getCategoryCount(DhsImageCategory.FIVE); - return drawableDb.getNumberOfImageFilesInList() - allOtherCatCount; - } else { - return categoryCounts.getUnchecked(cat).sum(); - } + synchronized public long getCategoryCount(TagName tagName) { + return categoryCounts.getUnchecked(tagName).sum(); } /** @@ -133,10 +118,8 @@ public class CategoryManager { * * @param cat the Category to increment */ - synchronized public void incrementCategoryCount(DhsImageCategory cat) { - if (cat != DhsImageCategory.ZERO) { - categoryCounts.getUnchecked(cat).increment(); - } + synchronized public void incrementCategoryCount(TagName tagName) { + categoryCounts.getUnchecked(tagName).increment(); } /** @@ -145,10 +128,8 @@ public class CategoryManager { * * @param cat the Category to decrement */ - synchronized public void decrementCategoryCount(DhsImageCategory cat) { - if (cat != DhsImageCategory.ZERO) { - categoryCounts.getUnchecked(cat).decrement(); - } + synchronized public void decrementCategoryCount(TagName tagName) { + categoryCounts.getUnchecked(tagName).decrement(); } /** @@ -161,14 +142,14 @@ public class CategoryManager { * @return a LongAdder whose value is set to the number of file with the * given Category */ - synchronized private LongAdder getCategoryCountHelper(DhsImageCategory cat) { + synchronized private LongAdder getCategoryCountHelper(TagName cat) { LongAdder longAdder = new LongAdder(); longAdder.decrement(); try { longAdder.add(drawableDb.getCategoryCount(cat)); longAdder.increment(); } catch (IllegalStateException ex) { - LOGGER.log(Level.WARNING, "Case closed while getting files"); //NON-NLS + LOGGER.log(Level.WARNING, "Case closed while getting files", ex); //NON-NLS } return longAdder; } @@ -178,8 +159,8 @@ public class CategoryManager { * * @param fileIDs */ - public void fireChange(Collection fileIDs, DhsImageCategory newCategory) { - categoryEventBus.post(new CategoryChangeEvent(fileIDs, newCategory)); + public void fireChange(Collection fileIDs, TagName tagName) { + categoryEventBus.post(new CategoryChangeEvent(fileIDs, tagName)); } /** @@ -216,68 +197,66 @@ public class CategoryManager { } /** - * get the TagName used to store this Category in the main autopsy db. + * Returns true if the given TagName is a category tag. * - * @return the TagName used for this Category + * @param tName TagName + * + * @return True if tName is a category tag. */ - synchronized public TagName getTagName(DhsImageCategory cat) { - return catTagNameMap.getUnchecked(cat); + public boolean isCategoryTagName(TagName tName) { + return categoryTagSet.getTagNames().contains(tName); + } + + /** + * Returns true if the given TagName is not a category tag. + * + * Keep for use in location were a reference to this function is passed. + * + * @param tName TagName + * + * @return True if the given tName is not a category tag. + */ + public boolean isNotCategoryTagName(TagName tName) { + return !isCategoryTagName(tName); } - public static DhsImageCategory categoryFromTagName(TagName tagName) { - return DhsImageCategory.fromDisplayName(tagName.getDisplayName()); - } - - public static boolean isCategoryTagName(TagName tName) { - return DhsImageCategory.isCategoryName(tName.getDisplayName()); - } - - public static boolean isNotCategoryTagName(TagName tName) { - return DhsImageCategory.isNotCategoryName(tName.getDisplayName()); - + /** + * Returns the category tag set. + * + * @return + */ + TagSet getCategorySet() { + return categoryTagSet; } @Subscribe public void handleTagAdded(ContentTagAddedEvent event) { final ContentTag addedTag = event.getAddedTag(); - if (isCategoryTagName(addedTag.getName())) { - final DrawableTagsManager tagsManager = controller.getTagsManager(); - try { - //remove old category tag(s) if necessary - for (ContentTag ct : tagsManager.getContentTags(addedTag.getContent())) { - if (ct.getId() != addedTag.getId() - && CategoryManager.isCategoryTagName(ct.getName())) { - try { - tagsManager.deleteContentTag(ct); - } catch (TskCoreException tskException) { - LOGGER.log(Level.SEVERE, "Failed to delete content tag. Unable to maintain categories in a consistent state.", tskException); //NON-NLS - break; - } - } - } - } catch (TskCoreException tskException) { - LOGGER.log(Level.SEVERE, "Failed to get content tags for content. Unable to maintain category in a consistent state.", tskException); //NON-NLS - } - DhsImageCategory newCat = CategoryManager.categoryFromTagName(addedTag.getName()); - if (newCat != DhsImageCategory.ZERO) { - incrementCategoryCount(newCat); - } - fireChange(Collections.singleton(addedTag.getContent().getId()), newCat); + List removedTags = event.getDeletedTags(); + if (removedTags != null) { + for (DeletedContentTagInfo tagInfo : removedTags) { + handleDeletedInfo(tagInfo); + } + } + + if (isCategoryTagName(addedTag.getName())) { + incrementCategoryCount(addedTag.getName()); + fireChange(Collections.singleton(addedTag.getContent().getId()), addedTag.getName()); } } @Subscribe public void handleTagDeleted(ContentTagDeletedEvent event) { final ContentTagDeletedEvent.DeletedContentTagInfo deletedTagInfo = event.getDeletedTagInfo(); + handleDeletedInfo(deletedTagInfo); + } + + private void handleDeletedInfo(DeletedContentTagInfo deletedTagInfo) { TagName tagName = deletedTagInfo.getName(); if (isCategoryTagName(tagName)) { - - DhsImageCategory deletedCat = CategoryManager.categoryFromTagName(tagName); - if (deletedCat != DhsImageCategory.ZERO) { - decrementCategoryCount(deletedCat); - } + decrementCategoryCount(tagName); fireChange(Collections.singleton(deletedTagInfo.getContentID()), null); } } @@ -290,16 +269,16 @@ public class CategoryManager { public static class CategoryChangeEvent { private final ImmutableSet fileIDs; - private final DhsImageCategory newCategory; + private final TagName tagName; - public CategoryChangeEvent(Collection fileIDs, DhsImageCategory newCategory) { + public CategoryChangeEvent(Collection fileIDs, TagName tagName) { super(); this.fileIDs = ImmutableSet.copyOf(fileIDs); - this.newCategory = newCategory; + this.tagName = tagName; } - public DhsImageCategory getNewCategory() { - return newCategory; + public TagName getNewCategory() { + return tagName; } /** @@ -309,4 +288,18 @@ public class CategoryManager { return fileIDs; } } + + private List getSortedTagNames(List tagNames) { + Comparator compareByDisplayName = new Comparator() { + @Override + public int compare(TagName tagName1, TagName tagName2) { + return tagName1.getDisplayName().compareTo(tagName2.getDisplayName()); + } + }; + + List sortedTagNames = new ArrayList<>(tagNames); + sortedTagNames.sort(compareByDisplayName); + + return sortedTagNames; + } } diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/DrawableAttribute.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/DrawableAttribute.java index 25b50811b0..03ea2e3292 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/DrawableAttribute.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/DrawableAttribute.java @@ -18,7 +18,6 @@ */ package org.sleuthkit.autopsy.imagegallery.datamodel; -import org.sleuthkit.autopsy.datamodel.DhsImageCategory; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -89,15 +88,17 @@ public class DrawableAttribute> { * //TODO: this has lead to awkward hard to maintain code, and little * advantage. move categories into DrawableDB? */ - public final static DrawableAttribute CATEGORY - = new DrawableAttribute(AttributeName.CATEGORY, Bundle.DrawableAttribute_category(), + public final static DrawableAttribute CATEGORY + = new DrawableAttribute(AttributeName.CATEGORY, Bundle.DrawableAttribute_category(), false, "category-icon.png", //NON-NLS f -> Collections.singleton(f.getCategory())) { @Override - public Node getGraphicForValue(DhsImageCategory val) { - return val.getGraphic(); + public Node getGraphicForValue(TagName val) { + + return null; + //return val.getGraphic(); } }; @@ -235,9 +236,13 @@ public class DrawableAttribute> { .filter(value -> (value != null && value.toString().isEmpty() == false)) .collect(Collectors.toSet()); } catch (Exception ex) { - /* There is a catch-all here because the code in the try block executes third-party - library calls that throw unchecked exceptions. See JIRA-5144, where an IllegalStateException - was thrown because a file's MIME type was incorrectly identified as a picture type. */ + /* + * There is a catch-all here because the code in the try block + * executes third-party library calls that throw unchecked + * exceptions. See JIRA-5144, where an IllegalStateException was + * thrown because a file's MIME type was incorrectly identified as a + * picture type. + */ logger.log(Level.WARNING, "Exception while getting image attributes", ex); //NON-NLS return Collections.emptySet(); } diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/DrawableDB.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/DrawableDB.java index f634e8387d..3aa39bd2fd 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/DrawableDB.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/DrawableDB.java @@ -52,11 +52,9 @@ import java.util.logging.Level; import javax.annotation.Nonnull; import javax.annotation.concurrent.GuardedBy; import javax.swing.SortOrder; -import static org.apache.commons.lang3.ObjectUtils.notEqual; import org.apache.commons.lang3.StringUtils; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.datamodel.DhsImageCategory; import org.sleuthkit.autopsy.imagegallery.FileTypeUtils; import org.sleuthkit.autopsy.imagegallery.ImageGalleryController; import org.sleuthkit.autopsy.imagegallery.ImageGalleryModule; @@ -80,6 +78,7 @@ import org.sleuthkit.datamodel.TskDataException; import org.sleuthkit.datamodel.VersionNumber; import org.sqlite.SQLiteJDBCLoader; import java.util.stream.Collectors; +import org.sleuthkit.datamodel.TagSet; /** * Provides access to the image gallery database and selected tables in the case @@ -250,7 +249,7 @@ public final class DrawableDB { * could not be correctly initialized for Image * Gallery use. */ - private DrawableDB(Path dbPath, ImageGalleryController controller) throws IOException, SQLException, TskCoreException { + private DrawableDB(Path dbPath, ImageGalleryController controller, TagSet standardCategories) throws IOException, SQLException, TskCoreException { this.dbPath = dbPath; this.controller = controller; caseDb = this.controller.getCaseDatabase(); @@ -259,7 +258,7 @@ public final class DrawableDB { dbWriteLock(); try { con = DriverManager.getConnection("jdbc:sqlite:" + dbPath.toString()); //NON-NLS - if (!initializeDBSchema() || !upgradeDBSchema() || !prepareStatements() || !initializeStandardGroups() || !removeDeletedDataSources() || !initializeImageList()) { + if (!initializeDBSchema() || !upgradeDBSchema() || !prepareStatements() || !initializeStandardGroups(standardCategories) || !removeDeletedDataSources() || !initializeImageList()) { close(); throw new TskCoreException("Failed to initialize drawables database for Image Gallery use"); //NON-NLS } @@ -297,12 +296,13 @@ public final class DrawableDB { } } - private boolean initializeStandardGroups() { + private boolean initializeStandardGroups(TagSet standardCategories) { CaseDbTransaction caseDbTransaction = null; try { caseDbTransaction = caseDb.beginTransaction(); - for (DhsImageCategory cat : DhsImageCategory.values()) { - insertGroup(cat.getDisplayName(), DrawableAttribute.CATEGORY, caseDbTransaction); + + for(TagName tagName: standardCategories.getTagNames()) { + insertGroup(tagName.getDisplayName(), DrawableAttribute.CATEGORY, caseDbTransaction); } caseDbTransaction.commit(); return true; @@ -466,7 +466,7 @@ public final class DrawableDB { * * @throws org.sleuthkit.datamodel.TskCoreException */ - public static DrawableDB getDrawableDB(ImageGalleryController controller) throws TskCoreException { + public static DrawableDB getDrawableDB(ImageGalleryController controller, TagSet standardCategories) throws TskCoreException { Path dbPath = ImageGalleryModule.getModuleOutputDir(controller.getCase()).resolve("drawable.db"); try { deleteDatabaseIfOlderVersion(dbPath); @@ -477,14 +477,14 @@ public final class DrawableDB { } try { - return new DrawableDB(dbPath, controller); + return new DrawableDB(dbPath, controller, standardCategories); } catch (IOException ex) { throw new TskCoreException("Failed to create drawables database directory", ex); //NON-NLS } catch (SQLException ex) { throw new TskCoreException("Failed to create/open the drawables database", ex); //NON-NLS } } - + /** * Checks if the specified table exists in Drawable DB * @@ -2068,7 +2068,7 @@ public final class DrawableDB { case MIME_TYPE: return groupManager.getFileIDsWithMimeType((String) groupKey.getValue()); case CATEGORY: - return groupManager.getFileIDsWithCategory((DhsImageCategory) groupKey.getValue()); + return groupManager.getFileIDsWithCategory((TagName) groupKey.getValue()); case TAGS: return groupManager.getFileIDsWithTag((TagName) groupKey.getValue()); } @@ -2269,9 +2269,8 @@ public final class DrawableDB { * * @return the number of the with the given category */ - public long getCategoryCount(DhsImageCategory cat) { + public long getCategoryCount(TagName tagName) { try { - TagName tagName = controller.getTagsManager().getTagName(cat); if (nonNull(tagName)) { return caseDb.getContentTagsByTagName(tagName).stream() .map(ContentTag::getContent) @@ -2280,7 +2279,7 @@ public final class DrawableDB { .count(); } } catch (IllegalStateException ex) { - logger.log(Level.WARNING, "Case closed while getting files"); //NON-NLS + logger.log(Level.WARNING, "Case closed while getting files", ex); //NON-NLS } catch (TskCoreException ex1) { logger.log(Level.SEVERE, "Failed to get content tags by tag name.", ex1); //NON-NLS } @@ -2314,7 +2313,6 @@ public final class DrawableDB { DrawableTagsManager tagsManager = controller.getTagsManager(); String catTagNameIDs = tagsManager.getCategoryTagNames().stream() - .filter(tagName -> notEqual(tagName.getDisplayName(), DhsImageCategory.ZERO.getDisplayName())) .map(TagName::getId) .map(Object::toString) .collect(Collectors.joining(",", "(", ")")); diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/DrawableFile.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/DrawableFile.java index 9c84f6d3b2..112c7ab6ec 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/DrawableFile.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/DrawableFile.java @@ -40,8 +40,8 @@ import org.apache.commons.lang3.text.WordUtils; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.datamodel.DhsImageCategory; import org.sleuthkit.autopsy.imagegallery.FileTypeUtils; +import org.sleuthkit.autopsy.imagegallery.ImageGalleryController; import org.sleuthkit.autopsy.imagegallery.utils.TaskUtils; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; @@ -94,15 +94,19 @@ public abstract class DrawableFile { private final SimpleBooleanProperty analyzed; - private final SimpleObjectProperty category = new SimpleObjectProperty<>(null); + private final SimpleObjectProperty categoryTagName = new SimpleObjectProperty<>(null); private String make; private String model; + private final CategoryManager categoryManager; + protected DrawableFile(AbstractFile file, Boolean analyzed) { this.analyzed = new SimpleBooleanProperty(analyzed); this.file = file; + + categoryManager = ImageGalleryController.getController(Case.getCurrentCase()).getCategoryManager(); } public abstract boolean isVideo(); @@ -229,32 +233,30 @@ public abstract class DrawableFile { return ""; } - public void setCategory(DhsImageCategory category) { - categoryProperty().set(category); - - } - - public DhsImageCategory getCategory() { + public TagName getCategory() { updateCategory(); - return category.get(); + return categoryTagName.get(); } - public SimpleObjectProperty categoryProperty() { - return category; + public SimpleObjectProperty categoryProperty() { + return categoryTagName; } /** - * set the category property to the most severe one found + * Update the category property. */ private void updateCategory() { try { - category.set(getContentTags().stream() - .map(Tag::getName).filter(CategoryManager::isCategoryTagName) - .map(TagName::getDisplayName) - .map(DhsImageCategory::fromDisplayName) - .sorted().findFirst() //sort by severity and take the first - .orElse(DhsImageCategory.ZERO) - ); + List contentTags = getContentTags(); + TagName tag = null; + for (ContentTag ct : contentTags) { + TagName tagName = ct.getName(); + if (categoryManager.isCategoryTagName(tagName)) { + tag = tagName; + break; + } + } + categoryTagName.set(tag); } catch (TskCoreException ex) { LOGGER.log(Level.WARNING, "problem looking up category for " + this.getContentPathSafe(), ex); //NON-NLS } catch (IllegalStateException ex) { diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/DrawableTagsManager.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/DrawableTagsManager.java index d2cdc484ce..ec06c6e343 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/DrawableTagsManager.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/DrawableTagsManager.java @@ -20,10 +20,11 @@ package org.sleuthkit.autopsy.imagegallery.datamodel; import com.google.common.eventbus.AsyncEventBus; import com.google.common.eventbus.EventBus; +import java.util.ArrayList; +import java.util.Comparator; import java.util.List; import java.util.concurrent.Executors; import java.util.logging.Level; -import java.util.stream.Collectors; import javafx.scene.Node; import javafx.scene.image.Image; import javafx.scene.image.ImageView; @@ -33,7 +34,6 @@ import org.sleuthkit.autopsy.casemodule.events.ContentTagAddedEvent; import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent; import org.sleuthkit.autopsy.casemodule.services.TagsManager; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.datamodel.DhsImageCategory; import org.sleuthkit.autopsy.imagegallery.ImageGalleryController; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.ContentTag; @@ -54,10 +54,16 @@ public final class DrawableTagsManager { private static final Image BOOKMARK_IMAGE = new Image("/org/sleuthkit/autopsy/images/star-bookmark-icon-16.png"); private final TagsManager autopsyTagsManager; - /** The tag name corresponding to the "built-in" tag "Follow Up" */ + /** + * The tag name corresponding to the "built-in" tag "Follow Up" + */ private final TagName followUpTagName; private final TagName bookmarkTagName; + private final ImageGalleryController controller; + + private final Comparator compareByDisplayName; + /** * Used to distribute TagsChangeEvents */ @@ -74,6 +80,14 @@ public final class DrawableTagsManager { this.autopsyTagsManager = controller.getCase().getServices().getTagsManager(); followUpTagName = getTagName(Bundle.DrawableTagsManager_followUp()); bookmarkTagName = getTagName(Bundle.DrawableTagsManager_bookMark()); + this.controller = controller; + + compareByDisplayName = new Comparator() { + @Override + public int compare(TagName tagName1, TagName tagName2) { + return tagName1.getDisplayName().compareTo(tagName2.getDisplayName()); + } + }; } /** @@ -129,25 +143,26 @@ public final class DrawableTagsManager { * @throws org.sleuthkit.datamodel.TskCoreException */ public List getNonCategoryTagNames() throws TskCoreException { - return autopsyTagsManager.getAllTagNames().stream() - .filter(CategoryManager::isNotCategoryTagName) - .distinct().sorted() - .collect(Collectors.toList()); + List nonCategoryTagNames = new ArrayList<>(); + List allTags = autopsyTagsManager.getAllTagNames(); + for (TagName tag : allTags) { + if (controller.getCategoryManager().isNotCategoryTagName(tag)) { + nonCategoryTagNames.add(tag); + } + } + nonCategoryTagNames.sort(compareByDisplayName); + return nonCategoryTagNames; } /** * Get all the TagNames that are categories * - * @return All the TagNames that are categories, in alphabetical order by - * displayName. + * @return All the TagNames that are categories. * * @throws org.sleuthkit.datamodel.TskCoreException */ public List getCategoryTagNames() throws TskCoreException { - return autopsyTagsManager.getAllTagNames().stream() - .filter(CategoryManager::isCategoryTagName) - .distinct().sorted() - .collect(Collectors.toList()); + return controller.getCategoryManager().getCategorySet().getTagNames(); } /** @@ -190,15 +205,11 @@ public final class DrawableTagsManager { returnTagName = autopsyTagsManager.getDisplayNamesToTagNamesMap().get(displayName); if (returnTagName != null) { return returnTagName; - } + } throw new TskCoreException("Tag name exists but an error occured in retrieving it", ex); } } - public TagName getTagName(DhsImageCategory cat) throws TskCoreException { - return getTagName(cat.getDisplayName()); - } - public ContentTag addContentTag(DrawableFile file, TagName tagName, String comment) throws TskCoreException { return autopsyTagsManager.addContentTag(file.getAbstractFile(), tagName, comment); } diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/VideoFile.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/VideoFile.java index 5f6f72279e..f972e6cc9e 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/VideoFile.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/VideoFile.java @@ -55,7 +55,7 @@ public class VideoFile extends DrawableFile { } /** - * Get the genereric video thumbnail. + * Get the generic video thumbnail. * * @return The thumbnail. */ diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/grouping/GroupKey.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/grouping/GroupKey.java index 52dadf5ecd..e750c5ff2a 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/grouping/GroupKey.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/grouping/GroupKey.java @@ -59,6 +59,7 @@ public class GroupKey> implements Comparable public String getValueDisplayName() { return Objects.equals(attr, DrawableAttribute.TAGS) + || Objects.equals(attr, DrawableAttribute.CATEGORY) ? ((TagName) getValue()).getDisplayName() : Objects.toString(getValue(), "unknown"); } @@ -74,8 +75,9 @@ public class GroupKey> implements Comparable hash = 79 * hash + Objects.hashCode(this.val); hash = 79 * hash + Objects.hashCode(this.attr); - if (this.dataSource != null) - hash = 79 * hash + (int)this.dataSource.getId(); + if (this.dataSource != null) { + hash = 79 * hash + (int) this.dataSource.getId(); + } return hash; } @@ -99,20 +101,20 @@ public class GroupKey> implements Comparable if (!Objects.equals(this.attr, other.attr)) { return false; } - + // Data source is significant only for PATH based groups. if (this.attr == DrawableAttribute.PATH) { if (this.dataSource != null && other.dataSource != null) { - return this.dataSource.getId() == other.dataSource.getId(); + return this.dataSource.getId() == other.dataSource.getId(); } else if (this.dataSource == null && other.dataSource == null) { // neither group has a datasource return true; } else { - // one group has a datasource, other doesn't + // one group has a datasource, other doesn't return false; } } - + return true; } diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/grouping/GroupManager.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/grouping/GroupManager.java index 59dae2af00..774f6ea3e5 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/grouping/GroupManager.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/grouping/GroupManager.java @@ -62,7 +62,6 @@ import javax.annotation.concurrent.GuardedBy; import javax.swing.SortOrder; import static org.apache.commons.collections4.CollectionUtils.isNotEmpty; import static org.apache.commons.lang3.ObjectUtils.notEqual; -import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.concurrent.BasicThreadFactory; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.Case; @@ -70,9 +69,7 @@ import org.sleuthkit.autopsy.casemodule.events.ContentTagAddedEvent; import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent; import org.sleuthkit.autopsy.coreutils.LoggedTask; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.datamodel.DhsImageCategory; import org.sleuthkit.autopsy.imagegallery.ImageGalleryController; -import org.sleuthkit.autopsy.imagegallery.datamodel.CategoryManager; import org.sleuthkit.autopsy.imagegallery.datamodel.DrawableAttribute; import org.sleuthkit.autopsy.imagegallery.datamodel.DrawableDB; import org.sleuthkit.autopsy.imagegallery.datamodel.DrawableFile; @@ -106,23 +103,23 @@ public class GroupManager { private final ImageGalleryController controller; /** - * Keeps track of the current path group - * - a change in path indicates the current path group is analyzed + * Keeps track of the current path group - a change in path indicates the + * current path group is analyzed */ @GuardedBy("this") //NOPMD private GroupKey currentPathGroup = null; - + /** - * list of all analyzed groups - i.e. groups that are ready to be shown to user. - * These are groups under the selected groupBy attribute. + * list of all analyzed groups - i.e. groups that are ready to be shown to + * user. These are groups under the selected groupBy attribute. */ @GuardedBy("this") //NOPMD private final ObservableList analyzedGroups = FXCollections.observableArrayList(); private final ObservableList unmodifiableAnalyzedGroups = FXCollections.unmodifiableObservableList(analyzedGroups); /** - * list of unseen groups - * These are groups under the selected groupBy attribute. + * list of unseen groups These are groups under the selected groupBy + * attribute. */ @GuardedBy("this") //NOPMD private final ObservableList unSeenGroups = FXCollections.observableArrayList(); @@ -186,15 +183,15 @@ public class GroupManager { @SuppressWarnings({"rawtypes", "unchecked"}) synchronized public Set> getAllGroupKeysForFile(DrawableFile file) throws TskCoreException, TskDataException { Set> resultSet = new HashSet<>(); - - for (DrawableAttribute attr: DrawableAttribute.getGroupableAttrs()) { + + for (DrawableAttribute attr : DrawableAttribute.getGroupableAttrs()) { for (Comparable val : attr.getValue(file)) { if (attr == DrawableAttribute.PATH) { resultSet.add(new GroupKey(attr, val, file.getDataSource())); } else if (attr == DrawableAttribute.TAGS) { //don't show groups for the categories when grouped by tags. - if (CategoryManager.isNotCategoryTagName((TagName) val)) { + if (controller.getCategoryManager().isNotCategoryTagName((TagName) val)) { resultSet.add(new GroupKey(attr, val, null)); } } else { @@ -204,9 +201,8 @@ public class GroupManager { } return resultSet; } - - /** + /** * * Returns GroupKeys for all the Groups the given file is a part of. * @@ -225,7 +221,7 @@ public class GroupManager { } return Collections.emptySet(); } - + /** * @param groupKey * @@ -244,7 +240,7 @@ public class GroupManager { setGroupBy(DrawableAttribute.PATH); setSortOrder(SortOrder.ASCENDING); setDataSource(null); - + unSeenGroups.forEach(controller.getCategoryManager()::unregisterListener); unSeenGroups.clear(); analyzedGroups.forEach(controller.getCategoryManager()::unregisterListener); @@ -300,12 +296,12 @@ public class GroupManager { public ListenableFuture markGroupUnseen(DrawableGroup group) { return exec.submit(() -> { try { - + getDrawableDB().markGroupUnseen(group.getGroupKey()); // only update and reshuffle if its new results if (group.isSeen() == true) { group.setSeen(false); - } + } // The group may already be in 'unseen' state, e.g. when new files are added, // but not be on the unseenGroupsList yet. updateUnSeenGroups(group); @@ -314,7 +310,7 @@ public class GroupManager { } }); } - + /** * Update unseenGroups list accordingly based on the current status of * 'group'. Removes it if it is seen or adds it if it is unseen. @@ -322,13 +318,13 @@ public class GroupManager { * @param group */ synchronized private void updateUnSeenGroups(DrawableGroup group) { - if (group.isSeen()) { - unSeenGroups.removeAll(group); - } else if (unSeenGroups.contains(group) == false && - getGroupBy() == group.getGroupKey().getAttribute()) { - unSeenGroups.add(group); - } - sortUnseenGroups(); + if (group.isSeen()) { + unSeenGroups.removeAll(group); + } else if (unSeenGroups.contains(group) == false + && getGroupBy() == group.getGroupKey().getAttribute()) { + unSeenGroups.add(group); + } + sortUnseenGroups(); } /** @@ -390,7 +386,7 @@ public class GroupManager { switch (groupKey.getAttribute().attrName) { //these cases get special treatment case CATEGORY: - return getFileIDsWithCategory((DhsImageCategory) groupKey.getValue()); + return getFileIDsWithCategory((TagName) groupKey.getValue()); case TAGS: return getFileIDsWithTag((TagName) groupKey.getValue()); case MIME_TYPE: @@ -405,33 +401,18 @@ public class GroupManager { // @@@ This was kind of slow in the profiler. Maybe we should cache it. // Unless the list of file IDs is necessary, use countFilesWithCategory() to get the counts. - synchronized public Set getFileIDsWithCategory(DhsImageCategory category) throws TskCoreException { + synchronized public Set getFileIDsWithCategory(TagName category) throws TskCoreException { Set fileIDsToReturn = Collections.emptySet(); try { final DrawableTagsManager tagsManager = controller.getTagsManager(); - if (category == DhsImageCategory.ZERO) { - Set fileIDs = new HashSet<>(); - for (TagName catTagName : tagsManager.getCategoryTagNames()) { - if (notEqual(catTagName.getDisplayName(), DhsImageCategory.ZERO.getDisplayName())) { - tagsManager.getContentTagsByTagName(catTagName).stream() - .filter(ct -> ct.getContent() instanceof AbstractFile) - .map(ct -> ct.getContent().getId()) - .filter(getDrawableDB()::isInDB) - .forEach(fileIDs::add); - } - } - fileIDsToReturn = getDrawableDB().findAllFileIdsWhere("obj_id NOT IN (" + StringUtils.join(fileIDs, ',') + ")"); //NON-NLS - } else { - - List contentTags = tagsManager.getContentTagsByTagName(tagsManager.getTagName(category)); - fileIDsToReturn = contentTags.stream() - .filter(ct -> ct.getContent() instanceof AbstractFile) - .filter(ct -> getDrawableDB().isInDB(ct.getContent().getId())) - .map(ct -> ct.getContent().getId()) - .collect(Collectors.toSet()); - } + List contentTags = tagsManager.getContentTagsByTagName(category); + fileIDsToReturn = contentTags.stream() + .filter(ct -> ct.getContent() instanceof AbstractFile) + .filter(ct -> getDrawableDB().isInDB(ct.getContent().getId())) + .map(ct -> ct.getContent().getId()) + .collect(Collectors.toSet()); } catch (TskCoreException ex) { logger.log(Level.WARNING, "TSK error getting files in Category:" + category.getDisplayName(), ex); //NON-NLS throw ex; @@ -552,14 +533,14 @@ public class GroupManager { synchronized public void handleTagAdded(ContentTagAddedEvent evt) { GroupKey newGroupKey = null; final long fileID = evt.getAddedTag().getContent().getId(); - if (getGroupBy() == DrawableAttribute.CATEGORY && CategoryManager.isCategoryTagName(evt.getAddedTag().getName())) { - newGroupKey = new GroupKey<>(DrawableAttribute.CATEGORY, CategoryManager.categoryFromTagName(evt.getAddedTag().getName()), getDataSource()); + if (getGroupBy() == DrawableAttribute.CATEGORY && controller.getCategoryManager().isCategoryTagName(evt.getAddedTag().getName())) { + newGroupKey = new GroupKey<>(DrawableAttribute.CATEGORY, evt.getAddedTag().getName(), getDataSource()); for (GroupKey oldGroupKey : groupMap.keySet()) { if (oldGroupKey.equals(newGroupKey) == false) { removeFromGroup(oldGroupKey, fileID); } } - } else if (getGroupBy() == DrawableAttribute.TAGS && CategoryManager.isNotCategoryTagName(evt.getAddedTag().getName())) { + } else if (getGroupBy() == DrawableAttribute.TAGS && controller.getCategoryManager().isNotCategoryTagName(evt.getAddedTag().getName())) { newGroupKey = new GroupKey<>(DrawableAttribute.TAGS, evt.getAddedTag().getName(), getDataSource()); } if (newGroupKey != null) { @@ -569,7 +550,8 @@ public class GroupManager { } /** - * Adds an analyzed file to the in-memory group data structures. Marks the group as unseen. + * Adds an analyzed file to the in-memory group data structures. Marks the + * group as unseen. * * @param group Group being added to (will be null if a group has not yet * been created) @@ -584,16 +566,20 @@ public class GroupManager { //if there wasn't already a DrawableGroup, then check if this group is now // in an appropriate state to get one made. // Path group, for example, only gets a DrawableGroup created when all files are analyzed - /* NOTE: With the current (Jan 2019) behavior of how we detect a PATH group as being analyzed, the group - * is not marked as analyzed until we add a file for another folder. So, when the last picture in a folder - * is added to the group, the call to 'populateIfAnalyzed' will still not return a group and therefore this - * method will never mark the group as unseen. */ + /* + * NOTE: With the current (Jan 2019) behavior of how we detect a + * PATH group as being analyzed, the group is not marked as analyzed + * until we add a file for another folder. So, when the last picture + * in a folder is added to the group, the call to + * 'populateIfAnalyzed' will still not return a group and therefore + * this method will never mark the group as unseen. + */ group = popuplateIfAnalyzed(groupKey, null); } else { //if there is aleady a group that was previously deemed fully analyzed, then add this newly analyzed file to it. group.addFile(fileID); } - + // reset the seen status for the group (if it is currently considered analyzed) if (group != null) { markGroupUnseen(group); @@ -605,18 +591,14 @@ public class GroupManager { GroupKey groupKey = null; final ContentTagDeletedEvent.DeletedContentTagInfo deletedTagInfo = evt.getDeletedTagInfo(); final TagName deletedTagName = deletedTagInfo.getName(); - if (getGroupBy() == DrawableAttribute.CATEGORY && CategoryManager.isCategoryTagName(deletedTagName)) { - groupKey = new GroupKey<>(DrawableAttribute.CATEGORY, CategoryManager.categoryFromTagName(deletedTagName), null); - } else if (getGroupBy() == DrawableAttribute.TAGS && CategoryManager.isNotCategoryTagName(deletedTagName)) { + if (getGroupBy() == DrawableAttribute.CATEGORY && controller.getCategoryManager().isCategoryTagName(deletedTagName)) { + groupKey = new GroupKey<>(DrawableAttribute.CATEGORY, deletedTagName, null); + } else if (getGroupBy() == DrawableAttribute.TAGS && controller.getCategoryManager().isNotCategoryTagName(deletedTagName)) { groupKey = new GroupKey<>(DrawableAttribute.TAGS, deletedTagName, null); } if (groupKey != null) { final long fileID = deletedTagInfo.getContentID(); DrawableGroup g = removeFromGroup(groupKey, fileID); - - if (controller.getCategoryManager().getTagName(DhsImageCategory.ZERO).equals(deletedTagName) == false) { - addFileToGroup(null, new GroupKey<>(DrawableAttribute.CATEGORY, DhsImageCategory.ZERO, null), fileID); - } } } @@ -653,13 +635,13 @@ public class GroupManager { try { DrawableFile file = getDrawableDB().getFileFromID(fileId); String pathVal = file.getDrawablePath(); - GroupKey pathGroupKey = new GroupKey<>(DrawableAttribute.PATH,pathVal, file.getDataSource()); - + GroupKey pathGroupKey = new GroupKey<>(DrawableAttribute.PATH, pathVal, file.getDataSource()); + updateCurrentPathGroup(pathGroupKey); } catch (TskCoreException | TskDataException ex) { logger.log(Level.WARNING, "Error getting drawabledb for fileId " + fileId, ex); - } - + } + // Update all the groups that this file belongs to Set> groupsForFile = getAllGroupKeysForFile(fileId); for (GroupKey gk : groupsForFile) { @@ -672,45 +654,45 @@ public class GroupManager { //we fire this event for all files so that the category counts get updated during initial db population controller.getCategoryManager().fireChange(updatedFileIDs, null); } - + /** - * Checks if the given path is different from the current path group. - * If so, updates the current path group as analyzed, and sets current path - * group to the given path. - * - * The idea is that when the path of the files being processed changes, - * we have moved from one folder to the next, and the group for the - * previous PATH can be considered as analyzed and can be displayed. - * - * NOTE: this a close approximation for when all files in a folder have been processed, - * but there's some room for error - files may go down the ingest pipleline - * out of order or the events may not always arrive in the same order - * - * @param groupKey + * Checks if the given path is different from the current path group. If so, + * updates the current path group as analyzed, and sets current path group + * to the given path. + * + * The idea is that when the path of the files being processed changes, we + * have moved from one folder to the next, and the group for the previous + * PATH can be considered as analyzed and can be displayed. + * + * NOTE: this a close approximation for when all files in a folder have been + * processed, but there's some room for error - files may go down the ingest + * pipleline out of order or the events may not always arrive in the same + * order + * + * @param groupKey */ synchronized private void updateCurrentPathGroup(GroupKey groupKey) { try { if (groupKey.getAttribute() == DrawableAttribute.PATH) { - + if (this.currentPathGroup == null) { currentPathGroup = groupKey; - } - else if (groupKey.getValue().toString().equalsIgnoreCase(this.currentPathGroup.getValue().toString()) == false) { + } else if (groupKey.getValue().toString().equalsIgnoreCase(this.currentPathGroup.getValue().toString()) == false) { // mark the last path group as analyzed getDrawableDB().markGroupAnalyzed(currentPathGroup); popuplateIfAnalyzed(currentPathGroup, null); - + currentPathGroup = groupKey; } } - } - catch (TskCoreException ex) { + } catch (TskCoreException ex) { logger.log(Level.SEVERE, String.format("Error setting is_analyzed status for group: %s", groupKey.getValue().toString()), ex); //NON-NLS - } + } } /** - * Resets current path group, after marking the current path group as analyzed. + * Resets current path group, after marking the current path group as + * analyzed. */ synchronized public void resetCurrentPathGroup() { try { @@ -719,11 +701,11 @@ public class GroupManager { popuplateIfAnalyzed(currentPathGroup, null); currentPathGroup = null; } - } - catch (TskCoreException ex) { + } catch (TskCoreException ex) { logger.log(Level.SEVERE, String.format("Error resetting last path group: %s", currentPathGroup.getValue().toString()), ex); //NON-NLS } } + /** * If the group is analyzed (or other criteria based on grouping) and should * be shown to the user, then add it to the appropriate data structures so @@ -768,12 +750,12 @@ public class GroupManager { controller.getCategoryManager().registerListener(group); groupMap.put(groupKey, group); } - + // Add to analyzedGroups only if it's the a group with the selected groupBy attribute - if ((analyzedGroups.contains(group) == false) && - (getGroupBy() == group.getGroupKey().getAttribute())) { - analyzedGroups.add(group); - sortAnalyzedGroups(); + if ((analyzedGroups.contains(group) == false) + && (getGroupBy() == group.getGroupKey().getAttribute())) { + analyzedGroups.add(group); + sortAnalyzedGroups(); } updateUnSeenGroups(group); @@ -944,11 +926,11 @@ public class GroupManager { switch (groupBy.attrName) { //these cases get special treatment case CATEGORY: - results.putAll(null, Arrays.asList(DhsImageCategory.values())); + results.putAll(null, controller.getCategoryManager().getCategories()); break; case TAGS: results.putAll(null, controller.getTagsManager().getTagNamesInUse().stream() - .filter(CategoryManager::isNotCategoryTagName) + .filter(controller.getCategoryManager()::isNotCategoryTagName) .collect(Collectors.toList())); break; diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/grouping/GroupSortBy.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/grouping/GroupSortBy.java index 97a75f0f5b..b187f57297 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/grouping/GroupSortBy.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/grouping/GroupSortBy.java @@ -63,9 +63,7 @@ public class GroupSortBy implements Comparator { */ public final static GroupSortBy PRIORITY = new GroupSortBy(Bundle.GroupSortBy_priority(), "hashset_hits.png", - Comparator.comparing(DrawableGroup::getHashHitDensity) - .thenComparing(Comparator.comparing(DrawableGroup::getUncategorizedCount)) - .reversed()); + Comparator.comparing(DrawableGroup::getHashHitDensity).reversed()); private final static ObservableList values = FXCollections.unmodifiableObservableList(FXCollections.observableArrayList(PRIORITY, NONE, GROUP_BY_VALUE, FILE_COUNT)); diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/GuiUtils.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/GuiUtils.java index 8288ee4abf..f032698578 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/GuiUtils.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/GuiUtils.java @@ -56,7 +56,7 @@ public final class GuiUtils { /** * Create a MenuItem that performs the given action and also set the Action - * as the action for the given Button. Usefull to have a SplitMenuButton + * as the action for the given Button. Useful to have a SplitMenuButton * remember the last chosen menu item as its action. * * @param button diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/SummaryTablePane.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/SummaryTablePane.java index 4884f580b7..f297d2b1af 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/SummaryTablePane.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/SummaryTablePane.java @@ -34,10 +34,10 @@ import javafx.scene.layout.VBox; import javafx.util.Pair; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.datamodel.DhsImageCategory; import org.sleuthkit.autopsy.imagegallery.FXMLConstructor; import org.sleuthkit.autopsy.imagegallery.ImageGalleryController; import org.sleuthkit.autopsy.imagegallery.datamodel.CategoryManager.CategoryChangeEvent; +import org.sleuthkit.datamodel.TagName; /** * Displays summary statistics (counts) for each group @@ -45,13 +45,13 @@ import org.sleuthkit.autopsy.imagegallery.datamodel.CategoryManager.CategoryChan public class SummaryTablePane extends AnchorPane { @FXML - private TableColumn, String> catColumn; + private TableColumn, String> catColumn; @FXML - private TableColumn, Long> countColumn; + private TableColumn, Long> countColumn; @FXML - private TableView> tableView; + private TableView> tableView; private final ImageGalleryController controller; @@ -97,9 +97,9 @@ public class SummaryTablePane extends AnchorPane { */ @Subscribe public void handleCategoryChanged(CategoryChangeEvent evt) { - final ObservableList> data = FXCollections.observableArrayList(); + final ObservableList> data = FXCollections.observableArrayList(); if (Case.isCaseOpen()) { - for (DhsImageCategory cat : DhsImageCategory.values()) { + for (TagName cat : controller.getCategoryManager().getCategories()) { data.add(new Pair<>(cat, controller.getCategoryManager().getCategoryCount(cat))); } } diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/Toolbar.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/Toolbar.java index ebe8ae3698..f492b5f325 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/Toolbar.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/Toolbar.java @@ -20,12 +20,9 @@ package org.sleuthkit.autopsy.imagegallery.gui; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; -import com.google.common.util.concurrent.FutureCallback; -import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import java.util.ArrayList; -import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -64,7 +61,6 @@ import static org.sleuthkit.autopsy.casemodule.Case.Events.DATA_SOURCE_ADDED; import static org.sleuthkit.autopsy.casemodule.Case.Events.DATA_SOURCE_DELETED; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.ThreadConfined; -import org.sleuthkit.autopsy.datamodel.DhsImageCategory; import org.sleuthkit.autopsy.imagegallery.FXMLConstructor; import org.sleuthkit.autopsy.imagegallery.ImageGalleryController; import org.sleuthkit.autopsy.imagegallery.actions.CategorizeGroupAction; @@ -220,13 +216,13 @@ public class Toolbar extends ToolBar { }); initTagMenuButton(); - CategorizeGroupAction cat5GroupAction = new CategorizeGroupAction(DhsImageCategory.FIVE, controller); + CategorizeGroupAction cat5GroupAction = new CategorizeGroupAction(controller.getCategoryManager().getCategories().get(0), controller); catGroupMenuButton.setOnAction(cat5GroupAction); catGroupMenuButton.setText(cat5GroupAction.getText()); catGroupMenuButton.setGraphic(cat5GroupAction.getGraphic()); catGroupMenuButton.showingProperty().addListener(showing -> { if (catGroupMenuButton.isShowing()) { - List categoryMenues = Lists.transform(Arrays.asList(DhsImageCategory.values()), + List categoryMenues = Lists.transform(controller.getCategoryManager().getCategories(), cat -> GuiUtils.createAutoAssigningMenuItem(catGroupMenuButton, new CategorizeGroupAction(cat, controller))); catGroupMenuButton.getItems().setAll(categoryMenues); } diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/drawableviews/DrawableView.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/drawableviews/DrawableView.java index caca379014..472f0b59e9 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/drawableviews/DrawableView.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/drawableviews/DrawableView.java @@ -20,6 +20,8 @@ package org.sleuthkit.autopsy.imagegallery.gui.drawableviews; import com.google.common.eventbus.Subscribe; import java.util.Collection; +import java.util.HashMap; +import java.util.Map; import java.util.Optional; import java.util.logging.Level; import javafx.application.Platform; @@ -34,10 +36,11 @@ import org.sleuthkit.autopsy.casemodule.events.ContentTagAddedEvent; import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.ThreadConfined; -import org.sleuthkit.autopsy.datamodel.DhsImageCategory; import org.sleuthkit.autopsy.imagegallery.ImageGalleryController; import org.sleuthkit.autopsy.imagegallery.datamodel.CategoryManager; import org.sleuthkit.autopsy.imagegallery.datamodel.DrawableFile; +import org.sleuthkit.datamodel.TagName; +import org.sleuthkit.datamodel.TagName.HTML_COLOR; /** * Interface for classes that are views of a single DrawableFile. Implementation @@ -54,19 +57,9 @@ public interface DrawableView { static final CornerRadii CAT_CORNER_RADII = new CornerRadii(3); - static final Border HASH_BORDER = new Border(new BorderStroke(Color.PURPLE, BorderStrokeStyle.DASHED, CAT_CORNER_RADII, CAT_BORDER_WIDTHS)); + Border HASH_BORDER = new Border(new BorderStroke(Color.CYAN, BorderStrokeStyle.DASHED, CAT_CORNER_RADII, CAT_BORDER_WIDTHS)); - static final Border CAT1_BORDER = new Border(new BorderStroke(DhsImageCategory.ONE.getColor(), BorderStrokeStyle.SOLID, CAT_CORNER_RADII, CAT_BORDER_WIDTHS)); - - static final Border CAT2_BORDER = new Border(new BorderStroke(DhsImageCategory.TWO.getColor(), BorderStrokeStyle.SOLID, CAT_CORNER_RADII, CAT_BORDER_WIDTHS)); - - static final Border CAT3_BORDER = new Border(new BorderStroke(DhsImageCategory.THREE.getColor(), BorderStrokeStyle.SOLID, CAT_CORNER_RADII, CAT_BORDER_WIDTHS)); - - static final Border CAT4_BORDER = new Border(new BorderStroke(DhsImageCategory.FOUR.getColor(), BorderStrokeStyle.SOLID, CAT_CORNER_RADII, CAT_BORDER_WIDTHS)); - - static final Border CAT5_BORDER = new Border(new BorderStroke(DhsImageCategory.FIVE.getColor(), BorderStrokeStyle.SOLID, CAT_CORNER_RADII, CAT_BORDER_WIDTHS)); - - static final Border CAT0_BORDER = new Border(new BorderStroke(DhsImageCategory.ZERO.getColor(), BorderStrokeStyle.SOLID, CAT_CORNER_RADII, CAT_BORDER_WIDTHS)); + Map BORDER_MAP = new HashMap<>(); Region getCategoryBorderRegion(); @@ -115,38 +108,38 @@ public interface DrawableView { } - static Border getCategoryBorder(DhsImageCategory category) { - if (category != null) { - switch (category) { - case ONE: - return CAT1_BORDER; - case TWO: - return CAT2_BORDER; - case THREE: - return CAT3_BORDER; - case FOUR: - return CAT4_BORDER; - case FIVE: - return CAT5_BORDER; - case ZERO: - default: - return CAT0_BORDER; + /** + * Get the boarder for the given category. + * + * Static instances of the boarders will lazily constructed and stored in + * the BORDER_MAP. + * + * @param category + * + * @return + */ + static Border getCategoryBorder(TagName category) { + Border border = null; + if (category != null && category.getColor() != HTML_COLOR.NONE) { + border = BORDER_MAP.get(category.getDisplayName()); + if (border == null) { + border = new Border(new BorderStroke(Color.web(category.getColor().getRgbValue()), BorderStrokeStyle.SOLID, CAT_CORNER_RADII, CAT_BORDER_WIDTHS)); + BORDER_MAP.put(category.getDisplayName(), border); } - } else { - return CAT0_BORDER; } + return border; } @ThreadConfined(type = ThreadConfined.ThreadType.ANY) - default DhsImageCategory updateCategory() { + default TagName updateCategory() { if (getFile().isPresent()) { - final DhsImageCategory category = getFile().map(DrawableFile::getCategory).orElse(DhsImageCategory.ZERO); - final Border border = hasHashHit() && (category == DhsImageCategory.ZERO) ? HASH_BORDER : getCategoryBorder(category); + final TagName tagNameCat = getFile().map(DrawableFile::getCategory).orElse(null); + final Border border = hasHashHit() ? HASH_BORDER : getCategoryBorder(tagNameCat); Platform.runLater(() -> getCategoryBorderRegion().setBorder(border)); - return category; + return tagNameCat; } else { - return DhsImageCategory.ZERO; + return null; } } } diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/drawableviews/GroupPane.fxml b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/drawableviews/GroupPane.fxml index 79134d0e4d..5dcd725381 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/drawableviews/GroupPane.fxml +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/drawableviews/GroupPane.fxml @@ -182,35 +182,6 @@ - - - - - - - - - - - - - - - - - - - - - diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/drawableviews/GroupPane.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/drawableviews/GroupPane.java index 559fe4b714..fed02ac6e4 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/drawableviews/GroupPane.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/drawableviews/GroupPane.java @@ -19,12 +19,10 @@ package org.sleuthkit.autopsy.imagegallery.gui.drawableviews; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableSet; import static com.google.common.collect.Lists.transform; import com.google.common.util.concurrent.ListeningExecutorService; import java.util.ArrayList; import java.util.Arrays; -import static java.util.Arrays.asList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -50,7 +48,6 @@ import javafx.beans.property.ReadOnlyObjectWrapper; import javafx.beans.property.SimpleObjectProperty; import javafx.beans.value.ObservableValue; import javafx.collections.ObservableList; -import javafx.collections.ObservableSet; import javafx.event.ActionEvent; import javafx.event.EventHandler; import javafx.fxml.FXML; @@ -86,10 +83,7 @@ import static javafx.scene.input.KeyCode.UP; import javafx.scene.input.KeyEvent; import javafx.scene.input.MouseEvent; import javafx.scene.layout.AnchorPane; -import javafx.scene.layout.Border; import javafx.scene.layout.BorderPane; -import javafx.scene.layout.BorderStroke; -import javafx.scene.layout.BorderStrokeStyle; import javafx.scene.layout.BorderWidths; import javafx.scene.layout.CornerRadii; import javafx.scene.layout.HBox; @@ -111,7 +105,6 @@ import org.sleuthkit.autopsy.corecomponentinterfaces.ContextMenuActionsProvider; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.ThreadConfined; import org.sleuthkit.autopsy.coreutils.ThreadConfined.ThreadType; -import org.sleuthkit.autopsy.datamodel.DhsImageCategory; import org.sleuthkit.autopsy.directorytree.ExtractAction; import org.sleuthkit.autopsy.imagegallery.FXMLConstructor; import org.sleuthkit.autopsy.imagegallery.FileIDSelectionModel; @@ -134,6 +127,7 @@ import org.sleuthkit.autopsy.imagegallery.datamodel.grouping.GroupViewState; import static org.sleuthkit.autopsy.imagegallery.gui.GuiUtils.createAutoAssigningMenuItem; import org.sleuthkit.autopsy.imagegallery.utils.TaskUtils; import static org.sleuthkit.autopsy.imagegallery.utils.TaskUtils.addFXCallback; +import org.sleuthkit.datamodel.TagName; import org.sleuthkit.datamodel.TskCoreException; /** @@ -176,19 +170,6 @@ public class GroupPane extends BorderPane { private SplitMenuButton tagSelectedSplitMenu; @FXML private ToolBar headerToolBar; - @FXML - private ToggleButton cat0Toggle; - @FXML - private ToggleButton cat1Toggle; - @FXML - private ToggleButton cat2Toggle; - @FXML - private ToggleButton cat3Toggle; - @FXML - private ToggleButton cat4Toggle; - @FXML - private ToggleButton cat5Toggle; - @FXML private SegmentedButton segButton; @@ -220,11 +201,6 @@ public class GroupPane extends BorderPane { @FXML private Label catContainerLabel; @FXML - private Label catHeadingLabel; - - @FXML - private HBox catSegmentedContainer; - @FXML private HBox catSplitMenuContainer; private final ListeningExecutorService exec = TaskUtils.getExecutorForClass(GroupPane.class); @@ -244,12 +220,18 @@ public class GroupPane extends BorderPane { private ContextMenu contextMenu; - /** the current GroupViewMode of this GroupPane */ + /** + * the current GroupViewMode of this GroupPane + */ private final SimpleObjectProperty groupViewMode = new SimpleObjectProperty<>(GroupViewMode.TILE); - /** the grouping this pane is currently the view for */ + /** + * the grouping this pane is currently the view for + */ private final ReadOnlyObjectWrapper grouping = new ReadOnlyObjectWrapper<>(); + private final Map toggleButtonMap = new HashMap<>(); + /** * Map from fileIDs to their assigned cells in the tile view. This is used * to determine whether fileIDs are visible or are offscreen. No entry @@ -278,7 +260,7 @@ public class GroupPane extends BorderPane { undoAction = new UndoAction(controller); redoAction = new RedoAction(controller); - FXMLConstructor.construct(this, "GroupPane.fxml"); //NON-NLS + FXMLConstructor.construct(this, "GroupPane.fxml"); //NON-NLS } GroupViewMode getGroupViewMode() { @@ -307,7 +289,35 @@ public class GroupPane extends BorderPane { } void syncCatToggle(DrawableFile file) { - getToggleForCategory(file.getCategory()).setSelected(true); + TagName tagName = file.getCategory(); + if (tagName != null) { + getToggleForCategory(tagName).setSelected(true); + } + } + + /** + * Returns a toggle button for the given TagName. + * + * @param tagName TagName to create a button for. + * + * @return A new instance of a ToggleButton. + */ + private ToggleButton getToggleForCategory(TagName tagName) { + + ToggleButton button = toggleButtonMap.get(tagName.getDisplayName()); + + if (button == null) { + String[] split = tagName.getDisplayName().split(":"); + split = split[0].split("-"); + + int category = Integer.parseInt(split[1]); + + button = new ToggleButton(); + button.setText(Integer.toString(category)); + + toggleButtonMap.put(tagName.getDisplayName(), button); + } + return button; } public void activateTileViewer() { @@ -353,25 +363,6 @@ public class GroupPane extends BorderPane { return grouping.getReadOnlyProperty(); } - private ToggleButton getToggleForCategory(DhsImageCategory category) { - switch (category) { - case ZERO: - return cat0Toggle; - case ONE: - return cat1Toggle; - case TWO: - return cat2Toggle; - case THREE: - return cat3Toggle; - case FOUR: - return cat4Toggle; - case FIVE: - return cat5Toggle; - default: - throw new UnsupportedOperationException("Unknown category: " + category.name()); - } - } - /** * called automatically during constructor by FXMLConstructor. * @@ -384,12 +375,6 @@ public class GroupPane extends BorderPane { "GroupPane.catContainerLabel.displayText=Categorize Selected File:", "GroupPane.catHeadingLabel.displayText=Category:"}) void initialize() { - assert cat0Toggle != null : "fx:id=\"cat0Toggle\" was not injected: check your FXML file 'GroupPane.fxml'."; - assert cat1Toggle != null : "fx:id=\"cat1Toggle\" was not injected: check your FXML file 'GroupPane.fxml'."; - assert cat2Toggle != null : "fx:id=\"cat2Toggle\" was not injected: check your FXML file 'GroupPane.fxml'."; - assert cat3Toggle != null : "fx:id=\"cat3Toggle\" was not injected: check your FXML file 'GroupPane.fxml'."; - assert cat4Toggle != null : "fx:id=\"cat4Toggle\" was not injected: check your FXML file 'GroupPane.fxml'."; - assert cat5Toggle != null : "fx:id=\"cat5Toggle\" was not injected: check your FXML file 'GroupPane.fxml'."; assert gridView != null : "fx:id=\"tilePane\" was not injected: check your FXML file 'GroupPane.fxml'."; assert catSelectedSplitMenu != null : "fx:id=\"grpCatSplitMenu\" was not injected: check your FXML file 'GroupPane.fxml'."; assert tagSelectedSplitMenu != null : "fx:id=\"grpTagSplitMenu\" was not injected: check your FXML file 'GroupPane.fxml'."; @@ -399,21 +384,6 @@ public class GroupPane extends BorderPane { assert tileToggle != null : "fx:id=\"tileToggle\" was not injected: check your FXML file 'GroupPane.fxml'."; assert seenByOtherExaminersCheckBox != null : "fx:id=\"seenByOtherExaminersCheckBox\" was not injected: check your FXML file 'GroupPane.fxml'."; - for (DhsImageCategory cat : DhsImageCategory.values()) { - ToggleButton toggleForCategory = getToggleForCategory(cat); - toggleForCategory.setBorder(new Border(new BorderStroke(cat.getColor(), BorderStrokeStyle.SOLID, CORNER_RADII_2, BORDER_WIDTHS_2))); - toggleForCategory.getStyleClass().remove("radio-button"); - toggleForCategory.getStyleClass().add("toggle-button"); - toggleForCategory.selectedProperty().addListener((ov, wasSelected, toggleSelected) -> { - if (toggleSelected && slideShowPane != null) { - slideShowPane.getFileID().ifPresent(fileID -> { - selectionModel.clearAndSelect(fileID); - new CategorizeAction(controller, cat, ImmutableSet.of(fileID)).handle(null); - }); - } - }); - } - //configure flashing glow animation on next unseen group button flashAnimation.setCycleCount(Timeline.INDEFINITE); flashAnimation.setAutoReverse(true); @@ -447,14 +417,14 @@ public class GroupPane extends BorderPane { }, throwable -> logger.log(Level.SEVERE, "Error getting tag names.", throwable)//NON-NLS ); - CategorizeSelectedFilesAction cat5SelectedAction = new CategorizeSelectedFilesAction(DhsImageCategory.FIVE, controller); + CategorizeSelectedFilesAction cat5SelectedAction = new CategorizeSelectedFilesAction(controller.getCategoryManager().getCategories().get(0), controller); catSelectedSplitMenu.setOnAction(cat5SelectedAction); catSelectedSplitMenu.setText(cat5SelectedAction.getText()); catSelectedSplitMenu.setGraphic(cat5SelectedAction.getGraphic()); - List categoryMenues = transform(asList(DhsImageCategory.values()), + List categoryMenues = transform(controller.getCategoryManager().getCategories(), cat -> createAutoAssigningMenuItem(catSelectedSplitMenu, new CategorizeSelectedFilesAction(cat, controller))); catSelectedSplitMenu.getItems().setAll(categoryMenues); @@ -466,16 +436,21 @@ public class GroupPane extends BorderPane { bottomLabel.setText(Bundle.GroupPane_bottomLabel_displayText()); headerLabel.setText(Bundle.GroupPane_hederLabel_displayText()); catContainerLabel.setText(Bundle.GroupPane_catContainerLabel_displayText()); - catHeadingLabel.setText(Bundle.GroupPane_catHeadingLabel_displayText()); - //show categorization controls depending on group view mode - headerToolBar.getItems().remove(catSegmentedContainer); + + // This seems to be the only way to make sure the when the user switches + // to SLIDE_SHOW the first time that the undo\redo buttons are removed. + headerToolBar.getItems().remove(undoButton); + headerToolBar.getItems().remove(redoButton); + headerToolBar.getItems().add(undoButton); + headerToolBar.getItems().add(redoButton); + groupViewMode.addListener((ObservableValue observable, GroupViewMode oldValue, GroupViewMode newValue) -> { if (newValue == GroupViewMode.SLIDE_SHOW) { - headerToolBar.getItems().remove(catSplitMenuContainer); - headerToolBar.getItems().add(catSegmentedContainer); + headerToolBar.getItems().remove(undoButton); + headerToolBar.getItems().remove(redoButton); } else { - headerToolBar.getItems().remove(catSegmentedContainer); - headerToolBar.getItems().add(catSplitMenuContainer); + headerToolBar.getItems().add(undoButton); + headerToolBar.getItems().add(redoButton); } }); @@ -527,7 +502,7 @@ public class GroupPane extends BorderPane { //listen to tile selection and make sure it is visible in scroll area selectionModel.lastSelectedProperty().addListener((observable, oldFileID, newFileId) -> { if (groupViewMode.get() == GroupViewMode.SLIDE_SHOW - && slideShowPane != null) { + && slideShowPane != null) { slideShowPane.setFile(newFileId); } else { scrollToFileID(newFileId); @@ -775,42 +750,9 @@ public class GroupPane extends BorderPane { selectAllFiles(); t.consume(); } - ObservableSet selected = selectionModel.getSelected(); - if (selected.isEmpty() == false) { - DhsImageCategory cat = keyCodeToCat(t.getCode()); - if (cat != null) { - new CategorizeAction(controller, cat, selected).handle(null); - } - } } } - private DhsImageCategory keyCodeToCat(KeyCode t) { - if (t != null) { - switch (t) { - case NUMPAD0: - case DIGIT0: - return DhsImageCategory.ZERO; - case NUMPAD1: - case DIGIT1: - return DhsImageCategory.ONE; - case NUMPAD2: - case DIGIT2: - return DhsImageCategory.TWO; - case NUMPAD3: - case DIGIT3: - return DhsImageCategory.THREE; - case NUMPAD4: - case DIGIT4: - return DhsImageCategory.FOUR; - case NUMPAD5: - case DIGIT5: - return DhsImageCategory.FIVE; - } - } - return null; - } - private void handleArrows(KeyEvent t) { Long lastSelectFileId = selectionModel.lastSelectedProperty().get(); diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/drawableviews/MetaDataPane.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/drawableviews/MetaDataPane.java index 7766317995..e83f970d65 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/drawableviews/MetaDataPane.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/drawableviews/MetaDataPane.java @@ -19,9 +19,9 @@ package org.sleuthkit.autopsy.imagegallery.gui.drawableviews; import com.google.common.eventbus.Subscribe; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import static java.util.Collections.singletonMap; import java.util.List; import java.util.Objects; @@ -56,7 +56,6 @@ import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.events.ContentTagAddedEvent; import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.datamodel.DhsImageCategory; import org.sleuthkit.autopsy.imagegallery.FXMLConstructor; import org.sleuthkit.autopsy.imagegallery.ImageGalleryController; import org.sleuthkit.autopsy.imagegallery.datamodel.CategoryManager; @@ -165,20 +164,50 @@ public class MetaDataPane extends DrawableUIBase { titledPane.setText(Bundle.MetaDataPane_titledPane_displayName()); } + /** + * Returns the display string for the given pair. + * + * @param p A DrawableAttribute and its collection. + * + * @return The string to display. + */ @SuppressWarnings("unchecked") - static private String getValueDisplayString(Pair, Collection> p) { - if (p.getKey() == DrawableAttribute.TAGS) { - return ((Collection) p.getValue()).stream() - .map(TagName::getDisplayName) - .filter(DhsImageCategory::isNotCategoryName) - .collect(Collectors.joining(" ; ")); + private String getValueDisplayString(Pair, Collection> p) { + if (p.getKey() == DrawableAttribute.TAGS || p.getKey() == DrawableAttribute.CATEGORY) { + return getTagDisplayNames((Collection) p.getValue(), p.getKey()); } else { return p.getValue().stream() .map(value -> Objects.toString(value, "")) .collect(Collectors.joining(" ; ")); + } } + /** + * Create the list of TagName displayNames for either Tags or Categories. + * + * @param tagNameList List of TagName values + * @param attribute A DrawableAttribute value either CATEGORY or TAGS + * + * @return A list of TagNames separated by ; or an empty string. + */ + private String getTagDisplayNames(Collection tagNameList, DrawableAttribute attribute) { + String displayStr = ""; + CategoryManager controller = getController().getCategoryManager(); + List nameList = new ArrayList<>(); + if (tagNameList != null && !tagNameList.isEmpty()) { + for (TagName tagName : tagNameList) { + if ((attribute == DrawableAttribute.CATEGORY && controller.isCategoryTagName(tagName)) + || (attribute == DrawableAttribute.TAGS && !controller.isCategoryTagName(tagName))) { + nameList.add(tagName.getDisplayName()); + } + } + displayStr = String.join(";", nameList); + } + + return displayStr; + } + @Override synchronized protected void setFileHelper(Long newFileID) { setFileIDOpt(Optional.ofNullable(newFileID)); diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/drawableviews/SlideShowView.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/drawableviews/SlideShowView.java index e4d565d0ee..16796fb996 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/drawableviews/SlideShowView.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/drawableviews/SlideShowView.java @@ -50,12 +50,12 @@ import org.sleuthkit.autopsy.coreutils.ThreadConfined; import org.sleuthkit.autopsy.coreutils.ThreadConfined.ThreadType; import org.sleuthkit.autopsy.imagegallery.FXMLConstructor; import org.sleuthkit.autopsy.imagegallery.ImageGalleryController; -import org.sleuthkit.autopsy.datamodel.DhsImageCategory; import org.sleuthkit.autopsy.imagegallery.datamodel.DrawableFile; import org.sleuthkit.autopsy.imagegallery.datamodel.VideoFile; import org.sleuthkit.autopsy.imagegallery.gui.VideoPlayer; import static org.sleuthkit.autopsy.imagegallery.gui.drawableviews.DrawableUIBase.exec; import static org.sleuthkit.autopsy.imagegallery.gui.drawableviews.DrawableView.CAT_BORDER_WIDTH; +import org.sleuthkit.datamodel.TagName; /** * Displays the files of a group one at a time. Designed to be embedded in a @@ -297,14 +297,14 @@ public class SlideShowView extends DrawableTileBase { @Override @ThreadConfined(type = ThreadType.ANY) - public DhsImageCategory updateCategory() { + public TagName updateCategory() { Optional file = getFile(); if (file.isPresent()) { - DhsImageCategory updateCategory = super.updateCategory(); + TagName updateCategory = super.updateCategory(); Platform.runLater(() -> getGroupPane().syncCatToggle(file.get())); return updateCategory; } else { - return DhsImageCategory.ZERO; + return null; } } diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/navpanel/GroupCellFactory.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/navpanel/GroupCellFactory.java index c5497d31af..bc0567d85a 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/navpanel/GroupCellFactory.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/navpanel/GroupCellFactory.java @@ -117,6 +117,7 @@ class GroupCellFactory { final Node graphic = (group.getGroupByAttribute() == DrawableAttribute.TAGS) ? controller.getTagsManager().getGraphic((TagName) group.getGroupByValue()) : group.getGroupKey().getGraphic(); + final String text = getCellText(cell); final String style = getSeenStyleClass(cell); @@ -157,10 +158,10 @@ class GroupCellFactory { */ private String getCountsText(GroupCell cell) { return cell.getGroup() - .map(group -> - " (" + (sortOrder.get() == GroupComparators.ALPHABETICAL - ? group.getSize() - : sortOrder.get().getFormattedValueOfGroup(group)) + ")" + .map(group + -> " (" + (sortOrder.get() == GroupComparators.ALPHABETICAL + ? group.getSize() + : sortOrder.get().getFormattedValueOfGroup(group)) + ")" ).orElse(""); //if item is null or group is null } diff --git a/RecentActivity/build.xml b/RecentActivity/build.xml index d298fa519a..4b3d8e3347 100644 --- a/RecentActivity/build.xml +++ b/RecentActivity/build.xml @@ -22,6 +22,9 @@ + + + diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED index 35666d6973..310602b0f9 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED @@ -55,6 +55,13 @@ ExtractSafari_Error_Getting_History=An error occurred while processing Safari hi ExtractSafari_Error_Parsing_Bookmark=An error occured while processing Safari Bookmark files ExtractSafari_Error_Parsing_Cookies=An error occured while processing Safari Cookies files ExtractSafari_Module_Name=Safari +ExtractSru_error_finding_export_srudb_program=Error finding export_srudb program +ExtractSru_module_name=System Resource Usage Extractor +ExtractSru_process_error_executing_export_srudb_program=Error running export_srudb program +ExtractSru_process_errormsg_find_software_hive=Unable to find SOFTWARE HIVE file +ExtractSru_process_errormsg_find_srudb_dat=Unable to find srudb.dat file +ExtractSru_process_errormsg_write_software_hive=Unable to write SOFTWARE HIVE file +ExtractSru_process_errormsg_write_srudb_dat=Unable to write srudb.dat file ExtractZone_Internet=Internet Zone ExtractZone_Local_Intranet=Local Intranet Zone ExtractZone_Local_Machine=Local Machine Zone diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSru.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSru.java new file mode 100644 index 0000000000..77dac6f22c --- /dev/null +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSru.java @@ -0,0 +1,481 @@ + /* + * + * Autopsy Forensic Browser + * + * Copyright 2020 Basis Technology Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.recentactivity; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.logging.Level; +import java.util.Map; +import org.apache.commons.io.FilenameUtils; +import org.openide.modules.InstalledFileLocator; +import org.openide.util.NbBundle.Messages; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.casemodule.services.FileManager; +import org.sleuthkit.autopsy.coreutils.ExecUtil; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.coreutils.PlatformUtil; +import org.sleuthkit.autopsy.coreutils.SQLiteDBConnect; +import org.sleuthkit.autopsy.datamodel.ContentUtils; +import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProcessTerminator; +import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress; +import org.sleuthkit.autopsy.ingest.IngestJobContext; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.Blackboard; +import org.sleuthkit.datamodel.BlackboardArtifact; +import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_ASSOCIATED_OBJECT; +import org.sleuthkit.datamodel.BlackboardAttribute; +import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT; +import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.TskCoreException; + + +/** + * Extract the System Resource Usage database to a temp directory so it can be parsed into a SQLite db + * and then brought into extracted content + */ +final class ExtractSru extends Extract { + + private static final Logger logger = Logger.getLogger(ExtractSru.class.getName()); + + private IngestJobContext context; + + private static final String APPLICATION_USAGE_SOURCE_NAME = "System Resource Usage - Application Usage"; //NON-NLS + private static final String NETWORK_USAGE_SOURCE_NAME = "System Resource Usage - Network Usage"; + +// private static final String ARTIFACT_ATTRIBUTE_NAME = "TSK_ARTIFACT_NAME"; //NON-NLS + + private static final String MODULE_NAME = "extractSRU"; //NON-NLS + + private static final String SRU_TOOL_FOLDER = "markmckinnon"; //NON-NLS + private static final String SRU_TOOL_NAME_WINDOWS_32 = "Export_Srudb_32.exe"; //NON-NLS + private static final String SRU_TOOL_NAME_WINDOWS_64 = "Export_Srudb_64.exe"; //NON-NLS + private static final String SRU_TOOL_NAME_LINUX = "Export_Srudb_Linux.exe"; //NON-NLS + private static final String SRU_TOOL_NAME_MAC = "Export_srudb_macos"; //NON-NLS + private static final String SRU_OUTPUT_FILE_NAME = "Output.txt"; //NON-NLS + private static final String SRU_ERROR_FILE_NAME = "Error.txt"; //NON-NLS + + private static final Map applicationFilesFound = new HashMap<>(); + + @Messages({ + "ExtractSru_module_name=System Resource Usage Extractor" + }) + ExtractSru() { + this.moduleName = Bundle.ExtractSru_module_name(); + } + + @Messages({ + "ExtractSru_error_finding_export_srudb_program=Error finding export_srudb program", + "ExtractSru_process_error_executing_export_srudb_program=Error running export_srudb program" + }) + + @Override + void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { + + this.context = context; + + String modOutPath = Case.getCurrentCase().getModuleDirectory() + File.separator + "sru"; + File dir = new File(modOutPath); + if (dir.exists() == false) { + dir.mkdirs(); + } + + String tempDirPath = RAImageIngestModule.getRATempPath(Case.getCurrentCase(), "sru"); //NON-NLS + String softwareHiveFileName = getSoftwareHiveFile(dataSource, tempDirPath); + + if (softwareHiveFileName == null) { + return; + } + + AbstractFile sruAbstractFile = getSruFile(dataSource, tempDirPath); + + if (sruAbstractFile == null) { + return; //If we cannot find the srudb.dat file we cannot proceed which is ok + } + + final String sruDumper = getPathForSruDumper(); + if (sruDumper == null) { + this.addErrorMessage(Bundle.ExtractSru_error_finding_export_srudb_program()); + logger.log(Level.SEVERE, "Error finding export_srudb program"); //NON-NLS + return; //If we cannot find the export_srudb program we cannot proceed + } + + if (context.dataSourceIngestIsCancelled()) { + return; + } + + try { + String modOutFile = modOutPath + File.separator + sruAbstractFile.getId() + "_srudb.db3"; + String sruFileName = tempDirPath + File.separator + sruAbstractFile.getId() + "_" + sruAbstractFile.getName(); + + extractSruFiles(sruDumper, sruFileName, modOutFile, tempDirPath, softwareHiveFileName); + + findSruExecutedFiles(modOutFile, dataSource); + + createNetUsageArtifacts(modOutFile, sruAbstractFile); + createAppUsageArtifacts(modOutFile, sruAbstractFile); + } catch (IOException ex) { + this.addErrorMessage(Bundle.ExtractSru_process_error_executing_export_srudb_program()); + logger.log(Level.SEVERE, "SRUDB.dat file not found"); //NON-NLS + } + } + + @Messages({ + "ExtractSru_process_errormsg_find_software_hive=Unable to find SOFTWARE HIVE file", + "ExtractSru_process_errormsg_write_software_hive=Unable to write SOFTWARE HIVE file" + }) + + /** + * Extract the SOFTWARE hive file to the temp directory + * + * @param dataSource datasource where software hiive is + * @param tempDirPath temp directory to write file to + * + * @return Software hive file location + */ + String getSoftwareHiveFile(Content dataSource, String tempDirPath) { + FileManager fileManager = Case.getCurrentCase().getServices().getFileManager(); + + List softwareHiveFiles; + + try { + softwareHiveFiles = fileManager.findFiles(dataSource, "SOFTWARE"); //NON-NLS + } catch (TskCoreException ex) { + this.addErrorMessage(Bundle.ExtractSru_process_errormsg_find_software_hive()); + logger.log(Level.WARNING, "Unable to find SOFTWARE HIVE file.", ex); //NON-NLS + return null; // No need to continue + } + + String softwareHiveFileName = null; + + for (AbstractFile softwareFile : softwareHiveFiles) { + + if (softwareFile.getParentPath().endsWith("/config/")) { + softwareHiveFileName = tempDirPath + File.separator + softwareFile.getId() + "_" + softwareFile.getName(); + + try { + ContentUtils.writeToFile(softwareFile, new File(softwareHiveFileName)); + } catch (IOException ex) { + this.addErrorMessage(Bundle.ExtractSru_process_errormsg_find_software_hive()); + logger.log(Level.WARNING, String.format("Unable to write %s to temp directory. File name: %s", softwareFile.getName(), softwareFile), ex); //NON-NLS + return null; + } + } + } + return softwareHiveFileName; + } + + @Messages({ + "ExtractSru_process_errormsg_find_srudb_dat=Unable to find srudb.dat file", + "ExtractSru_process_errormsg_write_srudb_dat=Unable to write srudb.dat file" + }) + /** + * Extract the SOFTWARE hive file to the temp directory + * + * @param dataSource datasource where software hiive is + * @param tempDirPath temp directory to write file to + * + * @return Software hive file location + */ + AbstractFile getSruFile(Content dataSource, String tempDirPath) { + FileManager fileManager = Case.getCurrentCase().getServices().getFileManager(); + + List sruFiles; + + try { + sruFiles = fileManager.findFiles(dataSource, "SRUDB.DAT"); //NON-NLS + } catch (TskCoreException ex) { + this.addErrorMessage(Bundle.ExtractSru_process_errormsg_find_srudb_dat()); + logger.log(Level.WARNING, "Unable to find SRUDB.DAT file.", ex); //NON-NLS + return null; // No need to continue + } + + AbstractFile sruAbstractFile = null; + + for (AbstractFile sruFile : sruFiles) { + + String sruFileName = tempDirPath + File.separator + sruFile.getId() + "_" + sruFile.getName(); + sruAbstractFile = sruFile; + + try { + ContentUtils.writeToFile(sruFile, new File(sruFileName)); + } catch (IOException ex) { + this.addErrorMessage(Bundle.ExtractSru_process_errormsg_write_srudb_dat()); + logger.log(Level.WARNING, String.format("Unable to write %s to temp directory. File name: %s", sruFile.getName(), sruFile), ex); //NON-NLS + return null; + } + + } + return sruAbstractFile; + } + + /** + * Run the export srudb program against the srudb.dat file + * + * @param sruExePath + * @param tempDirPath + * @param tempOutPath + * + * @throws FileNotFoundException + * @throws IOException + */ + void extractSruFiles(String sruExePath, String sruFile, String tempOutFile, String tempOutPath, String softwareHiveFile) throws IOException { + final Path outputFilePath = Paths.get(tempOutPath, SRU_OUTPUT_FILE_NAME); + final Path errFilePath = Paths.get(tempOutPath, SRU_ERROR_FILE_NAME); + + List commandLine = new ArrayList<>(); + commandLine.add(sruExePath); + commandLine.add(sruFile); //NON-NLS + commandLine.add(softwareHiveFile); + commandLine.add(tempOutFile); + + ProcessBuilder processBuilder = new ProcessBuilder(commandLine); + processBuilder.redirectOutput(outputFilePath.toFile()); + processBuilder.redirectError(errFilePath.toFile()); + + ExecUtil.execute(processBuilder, new DataSourceIngestModuleProcessTerminator(context)); + } + + private String getPathForSruDumper() { + Path path = null; + if (PlatformUtil.isWindowsOS()) { + if (PlatformUtil.is64BitOS()) { + path = Paths.get(SRU_TOOL_FOLDER, SRU_TOOL_NAME_WINDOWS_64); + } else { + path = Paths.get(SRU_TOOL_FOLDER, SRU_TOOL_NAME_WINDOWS_32); + } + } else { + if ("Linux".equals(PlatformUtil.getOSName())) { + path = Paths.get(SRU_TOOL_FOLDER, SRU_TOOL_NAME_LINUX); + } else { + path = Paths.get(SRU_TOOL_FOLDER, SRU_TOOL_NAME_MAC); + } + } + File sruToolFile = InstalledFileLocator.getDefault().locate(path.toString(), + ExtractSru.class.getPackage().getName(), false); + if (sruToolFile != null) { + return sruToolFile.getAbsolutePath(); + } + + return null; + } + + private void findSruExecutedFiles(String sruDb, Content dataSource) { + + org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); + + String sqlStatement = "SELECT DISTINCT SUBSTR(LTRIM(IdBlob, '\\Device\\HarddiskVolume'), INSTR(LTRIM(IdBlob, '\\Device\\HarddiskVolume'), '\\')) " + + " application_name, idBlob source_name FROM SruDbIdMapTable WHERE idType = 0 AND idBlob NOT LIKE '!!%'"; //NON-NLS + + try (SQLiteDBConnect tempdbconnect = new SQLiteDBConnect("org.sqlite.JDBC", "jdbc:sqlite:" + sruDb); //NON-NLS + ResultSet resultSet = tempdbconnect.executeQry(sqlStatement)) { + + while (resultSet.next()) { + + if (context.dataSourceIngestIsCancelled()) { + logger.log(Level.INFO, "Cancelled SRU Artifact Creation."); //NON-NLS + return; + } + + String applicationName = resultSet.getString("application_name"); //NON-NLS + String sourceName = resultSet.getString("source_name"); //NON-NLS + + String normalizePathName = FilenameUtils.normalize(applicationName, true); + String fileName = FilenameUtils.getName(normalizePathName); + String filePath = FilenameUtils.getPath(normalizePathName); + if (fileName.contains(" [")) { + fileName = fileName.substring(0, fileName.indexOf(" [")); + } + List sourceFiles; + try { + sourceFiles = fileManager.findFiles(dataSource, fileName, filePath); //NON-NLS + for (AbstractFile sourceFile : sourceFiles) { + if (sourceFile.getParentPath().endsWith(filePath)) { + applicationFilesFound.put(sourceName.toLowerCase(), sourceFile); + } + } + + } catch (TskCoreException ex) { + logger.log(Level.WARNING, String.format("Error finding actual file %s. file may not exist", normalizePathName)); //NON-NLS + } + } + } catch (SQLException ex) { + logger.log(Level.WARNING, "Error while trying to read into a sqlite db.", ex);//NON-NLS + } + + } + + private void createNetUsageArtifacts(String sruDb, AbstractFile sruAbstractFile) { + List bba = new ArrayList<>(); + + String sqlStatement = "SELECT STRFTIME('%s', timestamp) ExecutionTime, Application_Name, User_Name, " + + " bytesSent, BytesRecvd FROM network_Usage , SruDbIdMapTable " + + " where appId = IdIndex and IdType = 0 order by ExecutionTime;"; //NON-NLS + + try (SQLiteDBConnect tempdbconnect = new SQLiteDBConnect("org.sqlite.JDBC", "jdbc:sqlite:" + sruDb); //NON-NLS + ResultSet resultSet = tempdbconnect.executeQry(sqlStatement)) { + + while (resultSet.next()) { + + if (context.dataSourceIngestIsCancelled()) { + logger.log(Level.INFO, "Cancelled SRU Net Usage Artifact Creation."); //NON-NLS + return; + } + + String applicationName = resultSet.getString("Application_Name"); //NON-NLS + Long executionTime = Long.valueOf(resultSet.getInt("ExecutionTime")); //NON-NLS + Long bytesSent = Long.valueOf(resultSet.getInt("bytesSent")); //NON-NLS + Long bytesRecvd = Long.valueOf(resultSet.getInt("BytesRecvd")); //NON-NLS + String userName = resultSet.getString("User_Name"); //NON-NLS + + Collection bbattributes = Arrays.asList( + new BlackboardAttribute( + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, getName(), + applicationName),//NON-NLS + new BlackboardAttribute( + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_NAME, getName(), + userName), + new BlackboardAttribute( + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, getName(), + executionTime), + new BlackboardAttribute( + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_BYTES_SENT, getName(), bytesSent), + new BlackboardAttribute( + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_BYTES_RECEIVED, getName(), bytesRecvd), + new BlackboardAttribute( + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, getName(), NETWORK_USAGE_SOURCE_NAME)); + + try { + BlackboardArtifact bbart = sruAbstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_PROG_RUN); + bbart.addAttributes(bbattributes); + bba.add(bbart); + BlackboardArtifact associateBbArtifact = createAssociatedArtifact(applicationName.toLowerCase(), bbart); + if (associateBbArtifact != null) { + bba.add(associateBbArtifact); + } + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, "Exception Adding Artifact.", ex);//NON-NLS + } + } + + } catch (SQLException ex) { + logger.log(Level.SEVERE, "Error while trying to read into a sqlite db.", ex);//NON-NLS + } + + try { + blackboard.postArtifacts(bba, MODULE_NAME); + } catch (Blackboard.BlackboardException ex) { + logger.log(Level.SEVERE, "Error Posting Artifact.", ex);//NON-NLS + } + } + + private void createAppUsageArtifacts(String sruDb, AbstractFile sruAbstractFile) { + List bba = new ArrayList<>(); + + String sqlStatement = "SELECT STRFTIME('%s', timestamp) ExecutionTime, Application_Name, User_Name " + + " FROM Application_Resource_Usage, SruDbIdMapTable WHERE " + + " idType = 0 and idIndex = appId order by ExecutionTime;"; //NON-NLS + + try (SQLiteDBConnect tempdbconnect = new SQLiteDBConnect("org.sqlite.JDBC", "jdbc:sqlite:" + sruDb); //NON-NLS + ResultSet resultSet = tempdbconnect.executeQry(sqlStatement)) { + + while (resultSet.next()) { + + if (context.dataSourceIngestIsCancelled()) { + logger.log(Level.INFO, "Cancelled SRU Net Usage Artifact Creation."); //NON-NLS + return; + } + + String applicationName = resultSet.getString("Application_Name"); //NON-NLS + Long executionTime = Long.valueOf(resultSet.getInt("ExecutionTime")); //NON-NLS + String userName = resultSet.getString("User_Name"); + + Collection bbattributes = Arrays.asList( + new BlackboardAttribute( + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, getName(), + applicationName),//NON-NLS + new BlackboardAttribute( + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_NAME, getName(), + userName), + new BlackboardAttribute( + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, getName(), + executionTime), + new BlackboardAttribute( + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, getName(), APPLICATION_USAGE_SOURCE_NAME)); + + try { + BlackboardArtifact bbart = sruAbstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_PROG_RUN); + bbart.addAttributes(bbattributes); + bba.add(bbart); + BlackboardArtifact associateBbArtifact = createAssociatedArtifact(applicationName.toLowerCase(), bbart); + if (associateBbArtifact != null) { + bba.add(associateBbArtifact); + } + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, "Exception Adding Artifact.", ex);//NON-NLS + } + } + + } catch (SQLException ex) { + logger.log(Level.SEVERE, "Error while trying to read into a sqlite db.", ex);//NON-NLS + } + + try { + blackboard.postArtifacts(bba, MODULE_NAME); + } catch (Blackboard.BlackboardException ex) { + logger.log(Level.SEVERE, "Error Posting Artifact.", ex);//NON-NLS + } + } + + /** + * Create associated artifacts using file path name and the artifact it associates with + * + * @param filePathName file and path of object being associated with + * + * @param bba blackboard artifact to associate with + * + * @returnv BlackboardArtifact or a null value + */ + private BlackboardArtifact createAssociatedArtifact(String filePathName, BlackboardArtifact bba) { + if (applicationFilesFound.containsKey(filePathName)) { + AbstractFile sourceFile = applicationFilesFound.get(filePathName); + Collection bbattributes2 = new ArrayList<>(); + bbattributes2.addAll(Arrays.asList( + new BlackboardAttribute(TSK_ASSOCIATED_ARTIFACT, this.getName(), + bba.getArtifactID()))); + + BlackboardArtifact associatedObjectBba = createArtifactWithAttributes(TSK_ASSOCIATED_OBJECT, sourceFile, bbattributes2); + if (associatedObjectBba != null) { + return associatedObjectBba; + } + } + + return null; + } + +} diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java index 444a6d638d..c5cb62a779 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java @@ -78,6 +78,7 @@ public final class RAImageIngestModule implements DataSourceIngestModule { Extract safari = new ExtractSafari(); Extract zoneInfo = new ExtractZoneIdentifier(); Extract recycleBin = new ExtractRecycleBin(); + Extract sru = new ExtractSru(); extractors.add(chrome); extractors.add(firefox); @@ -91,7 +92,8 @@ public final class RAImageIngestModule implements DataSourceIngestModule { extractors.add(dataSourceAnalyzer); //this needs to run after ExtractRegistry and ExtractOs extractors.add(zoneInfo); // this needs to run after the web browser modules extractors.add(recycleBin); // this needs to run after ExtractRegistry and ExtractOS - + extractors.add(sru); + browserExtractors.add(chrome); browserExtractors.add(firefox); browserExtractors.add(iexplore); diff --git a/appveyor.yml b/appveyor.yml index d7cba33584..b3cbb4032c 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -19,7 +19,6 @@ environment: LIBVHDI_HOME: "C:\\libvhdi_64bit" LIBVMDK_HOME: "C:\\libvmdk_64bit\\libvmdk" LIBEWF_HOME: "C:\\libewf_64bit" - POSTGRESQL_HOME_64: "C:\\Program Files\\PostgreSQL\\9.5" JDK_HOME: C:\Program Files\Java\jdk1.8.0 PYTHON: "C:\\Python36-x64" @@ -41,7 +40,7 @@ build_script: - python win32\updateAndBuildAll.py -m - ps: pushd bindings/java - ps: ant -version - - cmd: ant dist-PostgreSQL + - cmd: ant dist - ps: popd - cd %APPVEYOR_BUILD_FOLDER% - cmd: ant -q build diff --git a/release_scripts/update_sleuthkit_version.pl b/release_scripts/update_sleuthkit_version.pl index 89b0c42de5..26d8be5073 100755 --- a/release_scripts/update_sleuthkit_version.pl +++ b/release_scripts/update_sleuthkit_version.pl @@ -131,8 +131,8 @@ sub update_core_project_properties { my $found = 0; while () { - if (/^file\.reference\.sleuthkit\-postgresql-/) { - print CONF_OUT "file.reference.sleuthkit-postgresql-${VER}.jar=release/modules/ext/sleuthkit-postgresql-${VER}.jar\n"; + if (/^file\.reference\.sleuthkit\-/) { + print CONF_OUT "file.reference.sleuthkit-${VER}.jar=release/modules/ext/sleuthkit-${VER}.jar\n"; $found++; } @@ -167,12 +167,12 @@ sub update_core_project_xml { my $found = 0; while () { - if (/ext\/sleuthkit-postgresql/) { - print CONF_OUT " ext/sleuthkit-postgresql-${VER}.jar\n"; + if (/ext\/sleuthkit-/) { + print CONF_OUT " ext/sleuthkit-${VER}.jar\n"; $found++; } - elsif (/release\/modules\/ext\/sleuthkit-postgresql/) { - print CONF_OUT " release/modules/ext/sleuthkit-postgresql-${VER}.jar\n"; + elsif (/release\/modules\/ext\/sleuthkit-/) { + print CONF_OUT " release/modules/ext/sleuthkit-${VER}.jar\n"; $found++; } else { diff --git a/thirdparty/markmckinnon/Export_Srudb_Linux b/thirdparty/markmckinnon/Export_Srudb_Linux new file mode 100644 index 0000000000..0af32da85b Binary files /dev/null and b/thirdparty/markmckinnon/Export_Srudb_Linux differ diff --git a/thirdparty/markmckinnon/Export_srudb_macos b/thirdparty/markmckinnon/Export_srudb_macos new file mode 100644 index 0000000000..eeff65b668 Binary files /dev/null and b/thirdparty/markmckinnon/Export_srudb_macos differ diff --git a/thirdparty/markmckinnon/LICENSE-2.0.txt b/thirdparty/markmckinnon/LICENSE-2.0.txt new file mode 100644 index 0000000000..d645695673 --- /dev/null +++ b/thirdparty/markmckinnon/LICENSE-2.0.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/thirdparty/markmckinnon/export_srudb_32.exe b/thirdparty/markmckinnon/export_srudb_32.exe new file mode 100644 index 0000000000..e642c1d8df Binary files /dev/null and b/thirdparty/markmckinnon/export_srudb_32.exe differ diff --git a/thirdparty/markmckinnon/export_srudb_64.exe b/thirdparty/markmckinnon/export_srudb_64.exe new file mode 100644 index 0000000000..7b07367739 Binary files /dev/null and b/thirdparty/markmckinnon/export_srudb_64.exe differ diff --git a/travis_build.sh b/travis_build.sh index f7b7cfb72d..5f574d5726 100755 --- a/travis_build.sh +++ b/travis_build.sh @@ -4,7 +4,7 @@ set -e echo "Building TSK..." cd sleuthkit/sleuthkit ./bootstrap && ./configure --prefix=/usr && make -pushd bindings/java && ant -q dist-PostgreSQL && popd +pushd bindings/java && ant -q dist && popd echo "Building Autopsy..." && echo -en 'travis_fold:start:script.build\\r' cd $TRAVIS_BUILD_DIR/ diff --git a/unix_setup.sh b/unix_setup.sh index dc92f38236..edd6c36632 100644 --- a/unix_setup.sh +++ b/unix_setup.sh @@ -55,15 +55,15 @@ else exit 1 fi -ext_jar_filepath=$PWD/autopsy/modules/ext/sleuthkit-postgresql-$TSK_VERSION.jar; +ext_jar_filepath=$PWD/autopsy/modules/ext/sleuthkit-$TSK_VERSION.jar; echo -n "Copying sleuthkit-$TSK_VERSION.jar into the Autopsy directory..." -rm -f $ext_jar_filepath; +rm -f "$ext_jar_filepath"; if [ "$?" -gt 0 ]; then #checking if remove operation failed echo "ERROR: Deleting $ext_jar_filepath failed." echo "Please check your permissions." exit 1 else - cp $sleuthkit_jar_filepath $ext_jar_filepath + cp $sleuthkit_jar_filepath "$ext_jar_filepath" if [ "$?" -ne 0 ]; then # checking copy operation was successful echo "ERROR: Copying $sleuthkit_jar_filepath to $ext_jar_filepath failed." echo "Please check your permissions."