merge from new_table_load

This commit is contained in:
Greg DiCristofaro 2021-11-02 15:13:25 -04:00
commit 45fb8c35bb
11 changed files with 857 additions and 137 deletions

View File

@ -128,29 +128,26 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO {
}
// TODO We can probably combine all the caches at some point
private final Cache<SearchParams<AnalysisResultSearchParam>, AnalysisResultTableSearchResultsDTO> analysisResultCache = CacheBuilder.newBuilder().maximumSize(1000).build();
private final Cache<SearchParams<BlackboardArtifactSearchParam>, AnalysisResultTableSearchResultsDTO> analysisResultCache = CacheBuilder.newBuilder().maximumSize(1000).build();
private final Cache<SearchParams<AnalysisResultSetSearchParam>, AnalysisResultTableSearchResultsDTO> setHitCache = CacheBuilder.newBuilder().maximumSize(1000).build();
private final Cache<SearchParams<KeywordHitSearchParam>, AnalysisResultTableSearchResultsDTO> keywordHitCache = CacheBuilder.newBuilder().maximumSize(1000).build();
private AnalysisResultTableSearchResultsDTO fetchAnalysisResultsForTable(SearchParams<AnalysisResultSearchParam> cacheKey) throws NoCurrentCaseException, TskCoreException {
private AnalysisResultTableSearchResultsDTO fetchAnalysisResultsForTable(SearchParams<BlackboardArtifactSearchParam> cacheKey) throws NoCurrentCaseException, TskCoreException {
SleuthkitCase skCase = getCase();
Blackboard blackboard = skCase.getBlackboard();
Long dataSourceId = cacheKey.getParamData().getDataSourceId();
BlackboardArtifact.Type artType = cacheKey.getParamData().getArtifactType();
// get analysis results
List<BlackboardArtifact> arts = new ArrayList<>();
if (dataSourceId != null) {
arts.addAll(blackboard.getAnalysisResultsByType(artType.getTypeID(), dataSourceId));
} else {
arts.addAll(blackboard.getAnalysisResultsByType(artType.getTypeID()));
}
String pagedWhereClause = getWhereClause(cacheKey);
arts.addAll(blackboard.getAnalysisResultsWhere(pagedWhereClause));
blackboard.loadBlackboardAttributes(arts);
List<BlackboardArtifact> pagedArtifacts = getPaged(arts, cacheKey);
TableData tableData = createTableData(artType, pagedArtifacts);
return new AnalysisResultTableSearchResultsDTO(artType, tableData.columnKeys, tableData.rows, cacheKey.getStartItem(), arts.size());
// Get total number of results
long totalResultsCount = getTotalResultsCount(cacheKey, arts.size());
TableData tableData = createTableData(artType, arts);
return new AnalysisResultTableSearchResultsDTO(artType, tableData.columnKeys, tableData.rows, cacheKey.getStartItem(), totalResultsCount);
}
private AnalysisResultTableSearchResultsDTO fetchSetNameHitsForTable(SearchParams<? extends AnalysisResultSetSearchParam> cacheKey) throws NoCurrentCaseException, TskCoreException {
@ -161,28 +158,39 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO {
Long dataSourceId = cacheKey.getParamData().getDataSourceId();
BlackboardArtifact.Type artType = cacheKey.getParamData().getArtifactType();
// Get all hash set hits
List<AnalysisResult> allHashHits;
// We currently can't make a query on the set name field because need to use a prepared statement
String originalWhereClause = " artifacts.artifact_type_id = " + artType.getTypeID() + " ";
if (dataSourceId != null) {
allHashHits = blackboard.getAnalysisResultsByType(artType.getTypeID(), dataSourceId);
} else {
allHashHits = blackboard.getAnalysisResultsByType(artType.getTypeID());
originalWhereClause += " AND artifacts.data_source_obj_id = " + dataSourceId + " ";
}
<<<<<<< HEAD
String expectedSetName = cacheKey.getParamData().getSetName();
=======
List<BlackboardArtifact> allHashHits = new ArrayList<>();
allHashHits.addAll(blackboard.getAnalysisResultsWhere(originalWhereClause));
blackboard.loadBlackboardAttributes(allHashHits);
>>>>>>> 8124wAnalysisResults
// Filter for the selected set
List<BlackboardArtifact> arts = new ArrayList<>();
for (AnalysisResult art : allHashHits) {
List<BlackboardArtifact> hashHits = new ArrayList<>();
for (BlackboardArtifact art : allHashHits) {
BlackboardAttribute setNameAttr = art.getAttribute(BlackboardAttribute.Type.TSK_SET_NAME);
<<<<<<< HEAD
if ((expectedSetName == null && setNameAttr == null)
|| (expectedSetName != null && setNameAttr != null && expectedSetName.equals(setNameAttr.getValueString()))) {
arts.add(art);
=======
if ((setNameAttr != null) && cacheKey.getParamData().getSetName().equals(setNameAttr.getValueString())) {
hashHits.add(art);
>>>>>>> 8124wAnalysisResults
}
}
List<BlackboardArtifact> pagedArtifacts = getPaged(arts, cacheKey);
List<BlackboardArtifact> pagedArtifacts = getPaged(hashHits, cacheKey);
TableData tableData = createTableData(artType, pagedArtifacts);
return new AnalysisResultTableSearchResultsDTO(artType, tableData.columnKeys, tableData.rows, cacheKey.getStartItem(), arts.size());
return new AnalysisResultTableSearchResultsDTO(artType, tableData.columnKeys, tableData.rows, cacheKey.getStartItem(), hashHits.size());
}
@Override
@ -260,7 +268,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO {
+ "Received artifact type: {0}; data source id: {1}", artType, artifactKey.getDataSourceId() == null ? "<null>" : artifactKey.getDataSourceId()));
}
SearchParams<AnalysisResultSearchParam> searchParams = new SearchParams<>(artifactKey, startItem, maxCount);
SearchParams<BlackboardArtifactSearchParam> searchParams = new SearchParams<>(artifactKey, startItem, maxCount);
if (hardRefresh) {
analysisResultCache.invalidate(searchParams);
}
@ -287,6 +295,8 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO {
return setHitCache.get(searchParams, () -> fetchSetNameHitsForTable(searchParams));
}
// TODO - JIRA-8117
// This needs to use more than just the set name
public AnalysisResultTableSearchResultsDTO getKeywordHitsForTable(KeywordHitSearchParam artifactKey, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException {
if (artifactKey.getDataSourceId() != null && artifactKey.getDataSourceId() < 0) {
throw new IllegalArgumentException(MessageFormat.format("Illegal data. "

View File

@ -18,57 +18,14 @@
*/
package org.sleuthkit.autopsy.mainui.datamodel;
import java.util.Objects;
import org.sleuthkit.datamodel.BlackboardArtifact;
/**
* Key for analysis result in order to retrieve data from DAO.
*/
public class AnalysisResultSearchParam {
private final BlackboardArtifact.Type artifactType;
private final Long dataSourceId;
public class AnalysisResultSearchParam extends BlackboardArtifactSearchParam {
public AnalysisResultSearchParam(BlackboardArtifact.Type artifactType, Long dataSourceId) {
this.artifactType = artifactType;
this.dataSourceId = dataSourceId;
super(artifactType, dataSourceId);
}
public BlackboardArtifact.Type getArtifactType() {
return artifactType;
}
public Long getDataSourceId() {
return dataSourceId;
}
@Override
public int hashCode() {
int hash = 7;
hash = 79 * hash + Objects.hashCode(this.artifactType);
hash = 79 * hash + Objects.hashCode(this.dataSourceId);
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final AnalysisResultSearchParam other = (AnalysisResultSearchParam) obj;
if (!Objects.equals(this.artifactType, other.artifactType)) {
return false;
}
if (!Objects.equals(this.dataSourceId, other.dataSourceId)) {
return false;
}
return true;
}
}

View File

@ -21,6 +21,7 @@ import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_DOWNLOAD_SOURCE;
import static org.sleuthkit.datamodel.BlackboardArtifact.Type.TSK_ASSOCIATED_OBJECT;
@ -256,6 +257,40 @@ abstract class BlackboardArtifactDAO {
.anyMatch(tp -> BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME.equals(tp.getValueType()));
}
String getWhereClause(SearchParams<BlackboardArtifactSearchParam> cacheKey) {
Long dataSourceId = cacheKey.getParamData().getDataSourceId();
BlackboardArtifact.Type artType = cacheKey.getParamData().getArtifactType();
String originalWhereClause = " artifacts.artifact_type_id = " + artType.getTypeID() + " ";
if (dataSourceId != null) {
originalWhereClause += " AND artifacts.data_source_obj_id = " + dataSourceId + " ";
}
String pagedWhereClause = originalWhereClause
+ " ORDER BY artifacts.obj_id ASC"
+ (cacheKey.getMaxResultsCount() != null && cacheKey.getMaxResultsCount() > 0 ? " LIMIT " + cacheKey.getMaxResultsCount() : "")
+ (cacheKey.getStartItem() > 0 ? " OFFSET " + cacheKey.getStartItem() : "");
return pagedWhereClause;
}
long getTotalResultsCount(SearchParams<BlackboardArtifactSearchParam> cacheKey, long currentPageSize) throws TskCoreException, NoCurrentCaseException {
Blackboard blackboard = getCase().getBlackboard();
Long dataSourceId = cacheKey.getParamData().getDataSourceId();
BlackboardArtifact.Type artType = cacheKey.getParamData().getArtifactType();
if ( (cacheKey.getStartItem() == 0) // offset is zero AND
&& ( (cacheKey.getMaxResultsCount() != null && currentPageSize < cacheKey.getMaxResultsCount()) // number of results is less than max
|| (cacheKey.getMaxResultsCount() == null)) ) { // OR max number of results was not specified
return currentPageSize;
} else {
if (dataSourceId != null) {
return blackboard.getArtifactsCount(artType.getTypeID(), dataSourceId);
} else {
return blackboard.getArtifactsCount(artType.getTypeID());
}
}
}
String getDataSourceName(Content srcContent) throws TskCoreException {
Content dataSource = srcContent.getDataSource();
if (dataSource != null) {

View File

@ -0,0 +1,72 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel;
import java.util.Objects;
import org.sleuthkit.datamodel.BlackboardArtifact;
/**
* Key for data artifact in order to retrieve data from DAO.
*/
public class BlackboardArtifactSearchParam {
private final BlackboardArtifact.Type artifactType;
private final Long dataSourceId;
public BlackboardArtifactSearchParam(BlackboardArtifact.Type artifactType, Long dataSourceId) {
this.artifactType = artifactType;
this.dataSourceId = dataSourceId;
}
public BlackboardArtifact.Type getArtifactType() {
return artifactType;
}
public Long getDataSourceId() {
return dataSourceId;
}
@Override
public int hashCode() {
int hash = 7;
hash = 67 * hash + Objects.hashCode(this.artifactType);
hash = 67 * hash + Objects.hashCode(this.dataSourceId);
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final BlackboardArtifactSearchParam other = (BlackboardArtifactSearchParam) obj;
if (!Objects.equals(this.artifactType, other.artifactType)) {
return false;
}
if (!Objects.equals(this.dataSourceId, other.dataSourceId)) {
return false;
}
return true;
}
}

View File

@ -46,7 +46,26 @@ FileExtRootFilter_documents_displayName=Documents
FileExtRootFilter_executable_displayName=Executable
FileExtRootFilter_image_displayName=Images
FileExtRootFilter_video_displayName=Video
FileTag.name.text=File Tag
FileTypesByMimeType.name.text=By MIME Type
TagsDAO.fileColumns.accessTimeColLbl=Accessed Time
TagsDAO.fileColumns.changeTimeColLbl=Changed Time
TagsDAO.fileColumns.commentColLbl=Comment
TagsDAO.fileColumns.createdTimeColLbl=Created Time
TagsDAO.fileColumns.filePathColLbl=File Path
TagsDAO.fileColumns.md5HashColLbl=MD5 Hash
TagsDAO.fileColumns.modifiedTimeColLbl=Modified Time
TagsDAO.fileColumns.nameColLbl=Name
TagsDAO.fileColumns.noDescription=No Description
TagsDAO.fileColumns.originalName=Original Name
TagsDAO.fileColumns.sizeColLbl=Size
TagsDAO.fileColumns.userNameColLbl=User Name
TagsDAO.tagColumns.commentColLbl=Comment
TagsDAO.tagColumns.origNameColLbl=Original Name
TagsDAO.tagColumns.sourceNameColLbl=Source Name
TagsDAO.tagColumns.sourcePathColLbl=Source File Path
TagsDAO.tagColumns.typeColLbl=Result Type
TagsDAO.tagColumns.userNameColLbl=User Name
ThreePanelViewsDAO.fileColumns.accessTimeColLbl=Access Time
ThreePanelViewsDAO.fileColumns.attrAddrColLbl=Attr. Addr.
ThreePanelViewsDAO.fileColumns.changeTimeColLbl=Change Time

View File

@ -37,6 +37,7 @@ import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.DataArtifact;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
/**
@ -63,25 +64,24 @@ public class DataArtifactDAO extends BlackboardArtifactDAO {
return BlackboardArtifactDAO.getIgnoredTreeTypes();
}
private final Cache<SearchParams<DataArtifactSearchParam>, DataArtifactTableSearchResultsDTO> dataArtifactCache = CacheBuilder.newBuilder().maximumSize(1000).build();
private final Cache<SearchParams<BlackboardArtifactSearchParam>, DataArtifactTableSearchResultsDTO> dataArtifactCache = CacheBuilder.newBuilder().maximumSize(1000).build();
private DataArtifactTableSearchResultsDTO fetchDataArtifactsForTable(SearchParams<DataArtifactSearchParam> cacheKey) throws NoCurrentCaseException, TskCoreException {
Blackboard blackboard = getCase().getBlackboard();
private DataArtifactTableSearchResultsDTO fetchDataArtifactsForTable(SearchParams<BlackboardArtifactSearchParam> cacheKey) throws NoCurrentCaseException, TskCoreException {
Long dataSourceId = cacheKey.getParamData().getDataSourceId();
SleuthkitCase skCase = getCase();
Blackboard blackboard = skCase.getBlackboard();
BlackboardArtifact.Type artType = cacheKey.getParamData().getArtifactType();
// get analysis results
List<BlackboardArtifact> arts = new ArrayList<>();
if (dataSourceId != null) {
arts.addAll(blackboard.getDataArtifacts(artType.getTypeID(), dataSourceId));
} else {
arts.addAll(blackboard.getDataArtifacts(artType.getTypeID()));
}
String pagedWhereClause = getWhereClause(cacheKey);
List<BlackboardArtifact> pagedArtifacts = getPaged(arts, cacheKey);
TableData tableData = createTableData(artType, pagedArtifacts);
return new DataArtifactTableSearchResultsDTO(artType, tableData.columnKeys, tableData.rows, cacheKey.getStartItem(), arts.size());
List<BlackboardArtifact> arts = new ArrayList<>();
arts.addAll(blackboard.getDataArtifactsWhere(pagedWhereClause));
blackboard.loadBlackboardAttributes(arts);
long totalResultsCount = getTotalResultsCount(cacheKey, arts.size());
TableData tableData = createTableData(artType, arts);
return new DataArtifactTableSearchResultsDTO(artType, tableData.columnKeys, tableData.rows, cacheKey.getStartItem(), totalResultsCount);
}
@Override
@ -102,7 +102,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO {
+ "Received artifact type: {0}; data source id: {1}", artType, artifactKey.getDataSourceId() == null ? "<null>" : artifactKey.getDataSourceId()));
}
SearchParams<DataArtifactSearchParam> searchParams = new SearchParams<>(artifactKey, startItem, maxCount);
SearchParams<BlackboardArtifactSearchParam> searchParams = new SearchParams<>(artifactKey, startItem, maxCount);
if (hardRefresh) {
this.dataArtifactCache.invalidate(searchParams);
}

View File

@ -18,58 +18,14 @@
*/
package org.sleuthkit.autopsy.mainui.datamodel;
import java.util.Objects;
import org.sleuthkit.datamodel.BlackboardArtifact;
/**
* Key for data artifact in order to retrieve data from DAO.
*/
public class DataArtifactSearchParam {
private final BlackboardArtifact.Type artifactType;
private final Long dataSourceId;
public class DataArtifactSearchParam extends BlackboardArtifactSearchParam {
public DataArtifactSearchParam(BlackboardArtifact.Type artifactType, Long dataSourceId) {
this.artifactType = artifactType;
this.dataSourceId = dataSourceId;
super (artifactType, dataSourceId);
}
public BlackboardArtifact.Type getArtifactType() {
return artifactType;
}
public Long getDataSourceId() {
return dataSourceId;
}
@Override
public int hashCode() {
int hash = 7;
hash = 67 * hash + Objects.hashCode(this.artifactType);
hash = 67 * hash + Objects.hashCode(this.dataSourceId);
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final DataArtifactSearchParam other = (DataArtifactSearchParam) obj;
if (!Objects.equals(this.artifactType, other.artifactType)) {
return false;
}
if (!Objects.equals(this.dataSourceId, other.dataSourceId)) {
return false;
}
return true;
}
}

View File

@ -37,6 +37,7 @@ public class MainDAO {
private final DataArtifactDAO dataArtifactDAO = DataArtifactDAO.getInstance();
private final AnalysisResultDAO analysisResultDAO = AnalysisResultDAO.getInstance();
private final ViewsDAO viewsDAO = ViewsDAO.getInstance();
private final TagsDAO tagsDAO = TagsDAO.getInstance();
public DataArtifactDAO getDataArtifactsDAO() {
return dataArtifactDAO;
@ -49,4 +50,8 @@ public class MainDAO {
public ViewsDAO getViewsDAO() {
return viewsDAO;
}
public TagsDAO getTagsDAO() {
return tagsDAO;
}
}

View File

@ -0,0 +1,371 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import java.beans.PropertyChangeEvent;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.lang3.StringUtils;
import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagAddedEvent;
import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagDeletedEvent;
import org.sleuthkit.autopsy.casemodule.events.ContentTagAddedEvent;
import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent;
import org.sleuthkit.autopsy.core.UserPreferences;
import org.sleuthkit.autopsy.coreutils.TimeZoneUtils;
import org.sleuthkit.autopsy.events.AutopsyEvent;
import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifactTag;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.ContentTag;
import org.sleuthkit.datamodel.Tag;
import org.sleuthkit.datamodel.TagName;
import org.sleuthkit.datamodel.TskCoreException;
/**
* Provides information to populate the results viewer for data in the allTags
section.
*/
@Messages({"TagsDAO.fileColumns.nameColLbl=Name",
"TagsDAO.fileColumns.originalName=Original Name",
"TagsDAO.fileColumns.filePathColLbl=File Path",
"TagsDAO.fileColumns.commentColLbl=Comment",
"TagsDAO.fileColumns.modifiedTimeColLbl=Modified Time",
"TagsDAO.fileColumns.changeTimeColLbl=Changed Time",
"TagsDAO.fileColumns.accessTimeColLbl=Accessed Time",
"TagsDAO.fileColumns.createdTimeColLbl=Created Time",
"TagsDAO.fileColumns.sizeColLbl=Size",
"TagsDAO.fileColumns.md5HashColLbl=MD5 Hash",
"TagsDAO.fileColumns.userNameColLbl=User Name",
"TagsDAO.fileColumns.noDescription=No Description",
"TagsDAO.tagColumns.sourceNameColLbl=Source Name",
"TagsDAO.tagColumns.origNameColLbl=Original Name",
"TagsDAO.tagColumns.sourcePathColLbl=Source File Path",
"TagsDAO.tagColumns.typeColLbl=Result Type",
"TagsDAO.tagColumns.commentColLbl=Comment",
"TagsDAO.tagColumns.userNameColLbl=User Name"})
public class TagsDAO {
private static final int CACHE_SIZE = 5; // rule of thumb: 5 entries times number of cached SearchParams sub-types
private static final long CACHE_DURATION = 2;
private static final TimeUnit CACHE_DURATION_UNITS = TimeUnit.MINUTES;
private final Cache<SearchParams<?>, SearchResultsDTO> searchParamsCache = CacheBuilder.newBuilder().maximumSize(CACHE_SIZE).expireAfterAccess(CACHE_DURATION, CACHE_DURATION_UNITS).build();
private static final String USER_NAME_PROPERTY = "user.name"; //NON-NLS
private static final String FILE_TAG_TYPE_ID = "FILE_TAG";
private static final String RESULT_TAG_TYPE_ID = "RESULT_TAG";
private static final List<ColumnKey> FILE_TAG_COLUMNS = Arrays.asList(
getFileColumnKey(Bundle.TagsDAO_fileColumns_nameColLbl()),
getFileColumnKey(Bundle.TagsDAO_fileColumns_originalName()), // GVDTODO handle translation
getFileColumnKey(Bundle.TagsDAO_fileColumns_filePathColLbl()),
getFileColumnKey(Bundle.TagsDAO_fileColumns_commentColLbl()),
getFileColumnKey(Bundle.TagsDAO_fileColumns_modifiedTimeColLbl()),
getFileColumnKey(Bundle.TagsDAO_fileColumns_changeTimeColLbl()),
getFileColumnKey(Bundle.TagsDAO_fileColumns_accessTimeColLbl()),
getFileColumnKey(Bundle.TagsDAO_fileColumns_createdTimeColLbl()),
getFileColumnKey(Bundle.TagsDAO_fileColumns_sizeColLbl()),
getFileColumnKey(Bundle.TagsDAO_fileColumns_md5HashColLbl()),
getFileColumnKey(Bundle.TagsDAO_fileColumns_userNameColLbl()));
private static final List<ColumnKey> RESULT_TAG_COLUMNS = Arrays.asList(
getFileColumnKey(Bundle.TagsDAO_tagColumns_sourceNameColLbl()),
getFileColumnKey(Bundle.TagsDAO_tagColumns_origNameColLbl()),
getFileColumnKey(Bundle.TagsDAO_tagColumns_sourcePathColLbl()),
getFileColumnKey(Bundle.TagsDAO_tagColumns_typeColLbl()),
getFileColumnKey(Bundle.TagsDAO_tagColumns_commentColLbl()),
getFileColumnKey(Bundle.TagsDAO_tagColumns_userNameColLbl()));
private static TagsDAO instance = null;
synchronized static TagsDAO getInstance() {
if (instance == null) {
instance = new TagsDAO();
}
return instance;
}
private static ColumnKey getFileColumnKey(String name) {
return new ColumnKey(name, name, Bundle.TagsDAO_fileColumns_noDescription());
}
public SearchResultsDTO getTags(TagsSearchParams key, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException {
if (key.getTagName() == null) {
throw new IllegalArgumentException("Must have non-null tag name");
} else if (key.getDataSourceId() != null && key.getDataSourceId() <= 0) {
throw new IllegalArgumentException("Data source id must be greater than 0 or null");
} else if (key.getTagType() == null) {
throw new IllegalArgumentException("Must have non-null tag type");
}
SearchParams<TagsSearchParams> searchParams = new SearchParams<>(key, startItem, maxCount);
if (hardRefresh) {
this.searchParamsCache.invalidate(searchParams);
}
return searchParamsCache.get(searchParams, () -> fetchTagsDTOs(searchParams));
}
@NbBundle.Messages({"FileTag.name.text=File Tag",
"ResultTag.name.text=Result Tag"})
private SearchResultsDTO fetchTagsDTOs(SearchParams<TagsSearchParams> cacheKey) throws NoCurrentCaseException, TskCoreException {
switch (cacheKey.getParamData().getTagType()) {
case FILE:
return fetchFileTags(cacheKey);
case RESULT:
return fetchResultTags(cacheKey);
default:
throw new IllegalArgumentException("Unsupported tag type");
}
}
/**
* Returns a list of paged tag results.
*
* @param tags The tag results.
* @param searchParams The search parameters including the paging.
*
* @return The list of paged tag results.
*/
List<? extends Tag> getPaged(List<? extends Tag> tags, SearchParams<?> searchParams) {
Stream<? extends Tag> pagedTagsStream = tags.stream()
.sorted(Comparator.comparing((tag) -> tag.getId()))
.skip(searchParams.getStartItem());
if (searchParams.getMaxResultsCount() != null) {
pagedTagsStream = pagedTagsStream.limit(searchParams.getMaxResultsCount());
}
return pagedTagsStream.collect(Collectors.toList());
}
private SearchResultsDTO fetchResultTags(SearchParams<TagsSearchParams> cacheKey) throws NoCurrentCaseException, TskCoreException {
Long dataSourceId = cacheKey.getParamData().getDataSourceId();
TagName tagName = cacheKey.getParamData().getTagName();
// get all tag results
List<BlackboardArtifactTag> allTags = new ArrayList<>();
List<BlackboardArtifactTag> artifactTags = (dataSourceId != null && dataSourceId > 0)
? Case.getCurrentCaseThrows().getServices().getTagsManager().getBlackboardArtifactTagsByTagName(tagName, dataSourceId)
: Case.getCurrentCaseThrows().getServices().getTagsManager().getBlackboardArtifactTagsByTagName(tagName);
if (UserPreferences.showOnlyCurrentUserTags()) {
String userName = System.getProperty(USER_NAME_PROPERTY);
for (BlackboardArtifactTag tag : artifactTags) {
if (userName.equals(tag.getUserName())) {
allTags.add(tag);
}
}
} else {
allTags.addAll(artifactTags);
}
// get current page of tag results
List<? extends Tag> pagedTags = getPaged(allTags, cacheKey);
List<RowDTO> fileRows = new ArrayList<>();
for (Tag tag : pagedTags) {
BlackboardArtifactTag blackboardTag = (BlackboardArtifactTag) tag;
String name = blackboardTag.getContent().getName(); // As a backup.
try {
name = blackboardTag.getArtifact().getShortDescription();
} catch (TskCoreException ignore) {
// it's a WARNING, skip
}
String contentPath;
try {
contentPath = blackboardTag.getContent().getUniquePath();
} catch (TskCoreException ex) {
contentPath = NbBundle.getMessage(this.getClass(), "BlackboardArtifactTagNode.createSheet.unavail.text");
}
List<Object> cellValues = Arrays.asList(name,
null, // GVDTODO translation column
contentPath,
blackboardTag.getArtifact().getDisplayName(),
blackboardTag.getComment(),
blackboardTag.getUserName());
fileRows.add(new BaseRowDTO(
cellValues,
RESULT_TAG_TYPE_ID,
blackboardTag.getId()));
}
return new BaseSearchResultsDTO(RESULT_TAG_TYPE_ID, Bundle.ResultTag_name_text(), RESULT_TAG_COLUMNS, fileRows, 0, allTags.size());
}
private SearchResultsDTO fetchFileTags(SearchParams<TagsSearchParams> cacheKey) throws NoCurrentCaseException, TskCoreException {
Long dataSourceId = cacheKey.getParamData().getDataSourceId();
TagName tagName = cacheKey.getParamData().getTagName();
// get all tag results
List<ContentTag> allTags = new ArrayList<>();
List<ContentTag> contentTags = (dataSourceId != null && dataSourceId > 0)
? Case.getCurrentCaseThrows().getServices().getTagsManager().getContentTagsByTagName(tagName, dataSourceId)
: Case.getCurrentCaseThrows().getServices().getTagsManager().getContentTagsByTagName(tagName);
if (UserPreferences.showOnlyCurrentUserTags()) {
String userName = System.getProperty(USER_NAME_PROPERTY);
for (ContentTag tag : contentTags) {
if (userName.equals(tag.getUserName())) {
allTags.add(tag);
}
}
} else {
allTags.addAll(contentTags);
}
// get current page of tag results
List<? extends Tag> pagedTags = getPaged(allTags, cacheKey);
List<RowDTO> fileRows = new ArrayList<>();
for (Tag tag : pagedTags) {
ContentTag contentTag = (ContentTag) tag;
Content content = contentTag.getContent();
String contentPath = content.getUniquePath();
AbstractFile file = content instanceof AbstractFile ? (AbstractFile) content : null;
List<Object> cellValues = Arrays.asList(
content.getName(),
null, // GVDTODO translation column
contentPath,
contentTag.getComment(),
file != null ? TimeZoneUtils.getFormattedTime(file.getMtime()) : "",
file != null ? TimeZoneUtils.getFormattedTime(file.getCtime()) : "",
file != null ? TimeZoneUtils.getFormattedTime(file.getAtime()) : "",
file != null ? TimeZoneUtils.getFormattedTime(file.getCrtime()) : "",
content.getSize(),
file != null ? StringUtils.defaultString(file.getMd5Hash()) : "",
contentTag.getUserName());
fileRows.add(new BaseRowDTO(
cellValues,
FILE_TAG_TYPE_ID,
file.getId()));
}
return new BaseSearchResultsDTO(FILE_TAG_TYPE_ID, Bundle.FileTag_name_text(), FILE_TAG_COLUMNS, fileRows, 0, allTags.size());
}
/**
* Handles fetching and paging of data for allTags.
*/
public static class TagFetcher extends DAOFetcher<TagsSearchParams> {
/**
* Main constructor.
*
* @param params Parameters to handle fetching of data.
*/
public TagFetcher(TagsSearchParams params) {
super(params);
}
@Override
public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException {
return MainDAO.getInstance().getTagsDAO().getTags(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh);
}
@Override
public boolean isRefreshRequired(PropertyChangeEvent evt) {
TagsSearchParams params = this.getParameters();
String eventType = evt.getPropertyName();
// handle artifact/result tag changes
if (eventType.equals(Case.Events.BLACKBOARD_ARTIFACT_TAG_ADDED.toString())
|| eventType.equals(Case.Events.BLACKBOARD_ARTIFACT_TAG_DELETED.toString())) {
// ignore non-artifact/result tag changes
if (params.getTagType() != TagsSearchParams.TagType.RESULT) {
return false;
}
if (evt instanceof AutopsyEvent) {
if (evt instanceof BlackBoardArtifactTagAddedEvent) {
// An artifact associated with the current case has been tagged.
BlackBoardArtifactTagAddedEvent event = (BlackBoardArtifactTagAddedEvent) evt;
// ensure tag added event has a valid content id
if (event.getAddedTag() == null || event.getAddedTag().getContent() == null || event.getAddedTag().getArtifact() == null) {
return false;
}
return params.getTagName().getId() == event.getAddedTag().getId();
} else if (evt instanceof BlackBoardArtifactTagDeletedEvent) {
// A tag has been removed from an artifact associated with the current case.
BlackBoardArtifactTagDeletedEvent event = (BlackBoardArtifactTagDeletedEvent) evt;
// ensure tag deleted event has a valid content id
BlackBoardArtifactTagDeletedEvent.DeletedBlackboardArtifactTagInfo deletedTagInfo = event.getDeletedTagInfo();
if (deletedTagInfo == null) {
return false;
}
return params.getTagName().getId() == deletedTagInfo.getTagID();
}
}
}
// handle file/content tag changes
if (eventType.equals(Case.Events.CONTENT_TAG_ADDED.toString())
|| eventType.equals(Case.Events.CONTENT_TAG_DELETED.toString())) {
// ignore non-file/content tag changes
if (params.getTagType() != TagsSearchParams.TagType.FILE) {
return false;
}
if (evt instanceof AutopsyEvent) {
if (evt instanceof ContentTagAddedEvent) {
// Content associated with the current case has been tagged.
ContentTagAddedEvent event = (ContentTagAddedEvent) evt;
// ensure tag added event has a valid content id
if (event.getAddedTag() == null || event.getAddedTag().getContent() == null) {
return false;
}
return params.getTagName().getId() == event.getAddedTag().getId();
} else if (evt instanceof ContentTagDeletedEvent) {
// A tag has been removed from content associated with the current case.
ContentTagDeletedEvent event = (ContentTagDeletedEvent) evt;
// ensure tag deleted event has a valid content id
ContentTagDeletedEvent.DeletedContentTagInfo deletedTagInfo = event.getDeletedTagInfo();
if (deletedTagInfo == null) {
return false;
}
return params.getTagName().getId() == deletedTagInfo.getTagID();
}
}
}
return false;
}
}
}

View File

@ -0,0 +1,89 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel;
import java.util.Objects;
import org.sleuthkit.datamodel.TagName;
/**
* Key for accessing data about tags from the DAO.
*/
public class TagsSearchParams {
public enum TagType {
FILE,
RESULT;
}
private final TagType type;
private final TagName tagName;
private final Long dataSourceId;
public TagsSearchParams(TagName tagName, TagType type, Long dataSourceId) {
this.tagName = tagName;
this.type = type;
this.dataSourceId = dataSourceId;
}
public TagName getTagName() {
return tagName;
}
public TagType getTagType() {
return type;
}
public Long getDataSourceId() {
return dataSourceId;
}
@Override
public int hashCode() {
int hash = 7;
hash = 23 * hash + Objects.hashCode(this.tagName);
hash = 23 * hash + Objects.hashCode(this.type);
hash = 23 * hash + Objects.hashCode(this.dataSourceId);
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final TagsSearchParams other = (TagsSearchParams) obj;
if (!Objects.equals(this.tagName, other.tagName)) {
return false;
}
if (!Objects.equals(this.dataSourceId, other.dataSourceId)) {
return false;
}
if (!Objects.equals(this.type, other.type)) {
return false;
}
return true;
}
}

View File

@ -32,6 +32,7 @@ import org.netbeans.junit.NbModuleSuite;
import org.netbeans.junit.NbTestCase;
import org.openide.util.Exceptions;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.services.TagsManager;
import org.sleuthkit.autopsy.testutils.CaseUtils;
import org.sleuthkit.autopsy.testutils.TestUtilsException;
import org.sleuthkit.datamodel.AbstractFile;
@ -45,6 +46,7 @@ import org.sleuthkit.datamodel.DataArtifact;
import org.sleuthkit.datamodel.DataSource;
import org.sleuthkit.datamodel.Score;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TagName;
import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.datamodel.TskCoreException;
@ -72,6 +74,8 @@ public class TableSearchTest extends NbTestCase {
private static final String ARTIFACT_CONFIGURATION = "Test configuration";
private static final String ARTIFACT_JUSTIFICATION = "Test justification";
private static final Score ARTIFACT_SCORE = Score.SCORE_LIKELY_NOTABLE;
private static final long ARTIFACT_COUNT_WEB_BOOKMARK = 125;
private static final long ARTIFACT_COUNT_YARA = 150;
// Values for the hash set hit tests
private static final String HASH_SET_1 = "Hash Set 1";
@ -85,12 +89,22 @@ public class TableSearchTest extends NbTestCase {
private static final String KEYWORD_PREVIEW = "There is a bomb.";
// Extension and MIME type test
private static AbstractFile customFile;
private static final String CUSTOM_MIME_TYPE = "fake/type";
private static final String CUSTOM_MIME_TYPE_FILE_NAME = "test.fake";
private static final String CUSTOM_EXTENSION = "fake";
private static final Set<String> CUSTOM_EXTENSIONS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList("." + CUSTOM_EXTENSION))); //NON-NLS
private static final Set<String> EMPTY_RESULT_SET_EXTENSIONS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(".blah", ".blah2", ".crazy"))); //NON-NLS
// Tag test
private static final String TAG_COMMENT = "Tag comment";
private static final String TAG_DESCRIPTION = "Tag description";
private static final String MD5_COLUMN = "MD5 Hash";
private static final String FILE_PATH_COLUMN = "File Path";
private static final String MODIFIED_TIME_COLUMN = "Modified Time";
private static final String SOURCE_NAME_COLUMN = "Source Name";
private static final String SOURCE_FILE_PATH_COLUMN = "Source File Path";
/////////////////////////////////////////////////
// Data to be used across the test methods.
// These are initialized in setUpCaseDatabase().
@ -98,6 +112,7 @@ public class TableSearchTest extends NbTestCase {
Case openCase = null; // The case for testing
SleuthkitCase db = null; // The case database
Blackboard blackboard = null; // The blackboard
TagsManager tagsManager = null;// Tags manager
DataSource dataSource1 = null; // A local files data source
DataSource dataSource2 = null; // A local files data source
@ -124,6 +139,10 @@ public class TableSearchTest extends NbTestCase {
AnalysisResult keywordHitAnalysisResult = null; // A keyword hit
Content keywordHitSource = null; // The source of the keyword hit above
// Tags test
TagName knownTag1 = null;
TagName tag2 = null;
public static Test suite() {
NbModuleSuite.Configuration conf = NbModuleSuite.createConfiguration(TableSearchTest.class).
clusters(".*").
@ -148,6 +167,7 @@ public class TableSearchTest extends NbTestCase {
mimeSearchTest();
extensionSearchTest();
sizeSearchTest();
tagsTest();
}
/**
@ -159,6 +179,7 @@ public class TableSearchTest extends NbTestCase {
openCase = CaseUtils.createAsCurrentCase("testTableSearchCase");
db = openCase.getSleuthkitCase();
blackboard = db.getBlackboard();
tagsManager = openCase.getServices().getTagsManager();
// Add two logical files data sources
SleuthkitCase.CaseDbTransaction trans = db.beginTransaction();
@ -189,7 +210,7 @@ public class TableSearchTest extends NbTestCase {
fileB1.setMIMEType("text/plain");
fileB1.save();
AbstractFile customFile = db.addLocalFile(CUSTOM_MIME_TYPE_FILE_NAME, "", 67000000, 0, 0, 0, 0, true, TskData.EncodingType.NONE, folderB1);
customFile = db.addLocalFile(CUSTOM_MIME_TYPE_FILE_NAME, "", 67000000, 0, 0, 0, 0, true, TskData.EncodingType.NONE, folderB1);
customFile.setMIMEType(CUSTOM_MIME_TYPE);
customFile.save();
@ -226,6 +247,13 @@ public class TableSearchTest extends NbTestCase {
customDataArtifactSourceFile = fileA3;
customDataArtifactLinkedFile = fileA2;
// Add a lot of web bookmark data artifacts
for (int i = 0;i < ARTIFACT_COUNT_WEB_BOOKMARK;i++) {
attrs.clear();
attrs.add(new BlackboardAttribute(BlackboardAttribute.Type.TSK_COMMENT, MODULE_NAME, Integer.toString(i)));
fileA1.newDataArtifact(BlackboardArtifact.Type.TSK_WEB_BOOKMARK, attrs);
}
// Add analysis results
// Data source 1: Encryption detected (2), custom type
// Data source 2: Encryption detected
@ -250,6 +278,13 @@ public class TableSearchTest extends NbTestCase {
customAnalysisResult = customDataArtifact.newAnalysisResult(customAnalysisResultType, ARTIFACT_SCORE, ARTIFACT_CONCLUSION, ARTIFACT_CONFIGURATION, ARTIFACT_JUSTIFICATION, attrs).getAnalysisResult();
customAnalysisResultSource = customDataArtifact;
// Add a lot of YARA hit analysis results
for (int i = 0;i < ARTIFACT_COUNT_YARA;i++) {
attrs.clear();
attrs.add(new BlackboardAttribute(BlackboardAttribute.Type.TSK_COMMENT, MODULE_NAME, Integer.toString(i)));
fileA1.newAnalysisResult(BlackboardArtifact.Type.TSK_YARA_HIT, Score.SCORE_NOTABLE, "conclusion", "configuration", "justification", attrs);
}
// Add hash hits
attrs.clear();
attrs.add(new BlackboardAttribute(BlackboardAttribute.Type.TSK_SET_NAME, MODULE_NAME, HASH_SET_1));
@ -306,7 +341,25 @@ public class TableSearchTest extends NbTestCase {
null, KEYWORD_SET_1, null, attrs).getAnalysisResult();
keywordHitSource = hashHitAnalysisResult;
} catch (TestUtilsException | TskCoreException | BlackboardException ex) {
// Add tags ----
knownTag1 = tagsManager.addTagName("Tag 1", TAG_DESCRIPTION, TagName.HTML_COLOR.RED, TskData.FileKnown.KNOWN);
tag2 = tagsManager.addTagName("Tag 2", "Descrition");
// Tag the custom artifacts in data source 1
openCase.getServices().getTagsManager().addBlackboardArtifactTag(customDataArtifact, knownTag1, TAG_COMMENT);
openCase.getServices().getTagsManager().addBlackboardArtifactTag(customAnalysisResult, tag2, "Comment 2");
// Tag file in data source 1
openCase.getServices().getTagsManager().addContentTag(fileA2, tag2);
openCase.getServices().getTagsManager().addContentTag(fileA3, tag2);
// Tag file in data source 2
openCase.getServices().getTagsManager().addContentTag(fileB1, tag2);
// Tag the custom file in data source 2
openCase.getServices().getTagsManager().addContentTag(customFile, knownTag1);
} catch (TestUtilsException | TskCoreException | BlackboardException | TagsManager.TagNameAlreadyExistsException ex) {
Exceptions.printStackTrace(ex);
Assert.fail(ex.getMessage());
}
@ -373,6 +426,41 @@ public class TableSearchTest extends NbTestCase {
assertTrue(dataArtifactRowDTO.getCellValues().contains(ARTIFACT_INT));
assertTrue(dataArtifactRowDTO.getCellValues().contains(ARTIFACT_DOUBLE));
// Test paging
Long pageSize = new Long(100);
assertTrue(ARTIFACT_COUNT_WEB_BOOKMARK > pageSize);
// Get the first page
param = new DataArtifactSearchParam(BlackboardArtifact.Type.TSK_WEB_BOOKMARK, null);
results = dataArtifactDAO.getDataArtifactsForTable(param, 0, pageSize, false);
assertEquals(ARTIFACT_COUNT_WEB_BOOKMARK, results.getTotalResultsCount());
assertEquals(pageSize.longValue(), results.getItems().size());
// Save all artifact IDs from the first page
Set<Long> firstPageObjIds = new HashSet<>();
for (RowDTO row : results.getItems()) {
assertTrue(row instanceof DataArtifactRowDTO);
DataArtifactRowDTO dataRow = (DataArtifactRowDTO) row;
assertTrue(dataRow.getDataArtifact() != null);
firstPageObjIds.add(dataRow.getDataArtifact().getId());
}
assertEquals(pageSize.longValue(), firstPageObjIds.size());
// Get the second page
param = new DataArtifactSearchParam(BlackboardArtifact.Type.TSK_WEB_BOOKMARK, null);
results = dataArtifactDAO.getDataArtifactsForTable(param, pageSize, pageSize, false);
assertEquals(ARTIFACT_COUNT_WEB_BOOKMARK, results.getTotalResultsCount());
assertEquals(ARTIFACT_COUNT_WEB_BOOKMARK - pageSize, results.getItems().size());
// Make sure no artifacts from the second page appeared on the first
for (RowDTO row : results.getItems()) {
assertTrue(row instanceof DataArtifactRowDTO);
DataArtifactRowDTO dataRow = (DataArtifactRowDTO) row;
assertTrue(dataRow.getDataArtifact() != null);
assertFalse("Data artifact ID: " + dataRow.getDataArtifact().getId() + " appeared on both page 1 and page 2",
firstPageObjIds.contains(dataRow.getDataArtifact().getId()));
}
} catch (ExecutionException ex) {
Exceptions.printStackTrace(ex);
Assert.fail(ex.getMessage());
@ -476,6 +564,87 @@ public class TableSearchTest extends NbTestCase {
}
}
public void tagsTest() {
// Quick test that everything is initialized
assertTrue(db != null);
try {
TagsDAO tagsDAO = MainDAO.getInstance().getTagsDAO();
// Get "Tag1" file tags from data source 1
TagsSearchParams param = new TagsSearchParams(knownTag1, TagsSearchParams.TagType.FILE, dataSource1.getId());
SearchResultsDTO results = tagsDAO.getTags(param, 0, null, false);
assertEquals(0, results.getTotalResultsCount());
assertEquals(0, results.getItems().size());
// Get "Tag2" file tags from data source 1
param = new TagsSearchParams(tag2, TagsSearchParams.TagType.FILE, dataSource1.getId());
results = tagsDAO.getTags(param, 0, null, false);
assertEquals(2, results.getTotalResultsCount());
assertEquals(2, results.getItems().size());
// Get "Tag2" file tags from all data sources
param = new TagsSearchParams(tag2, TagsSearchParams.TagType.FILE, null);
results = tagsDAO.getTags(param, 0, null, false);
assertEquals(3, results.getTotalResultsCount());
assertEquals(3, results.getItems().size());
// Get "Tag1" file tags from data source 2
param = new TagsSearchParams(knownTag1, TagsSearchParams.TagType.FILE, dataSource2.getId());
results = tagsDAO.getTags(param, 0, null, false);
assertEquals(1, results.getTotalResultsCount());
assertEquals(1, results.getItems().size());
// Get the row
RowDTO rowDTO = results.getItems().get(0);
assertTrue(rowDTO instanceof BaseRowDTO);
BaseRowDTO tagResultRowDTO = (BaseRowDTO) rowDTO;
// Check that the file tag is for the custom file
assertTrue(tagResultRowDTO.getCellValues().contains(customFile.getName()));
// Check that a few of the expected file tag column names are present
List<String> columnDisplayNames = results.getColumns().stream().map(p -> p.getDisplayName()).collect(Collectors.toList());
assertTrue(columnDisplayNames.contains(MD5_COLUMN));
assertTrue(columnDisplayNames.contains(FILE_PATH_COLUMN));
assertTrue(columnDisplayNames.contains(MODIFIED_TIME_COLUMN));
// Check that the result tag columns are not present
assertFalse(columnDisplayNames.contains(SOURCE_NAME_COLUMN));
assertFalse(columnDisplayNames.contains(SOURCE_FILE_PATH_COLUMN));
// Get "Tag1" result tags from data source 2
param = new TagsSearchParams(knownTag1, TagsSearchParams.TagType.RESULT, dataSource2.getId());
results = tagsDAO.getTags(param, 0, null, false);
assertEquals(0, results.getTotalResultsCount());
assertEquals(0, results.getItems().size());
// Get "Tag2" result tags from data source 1
param = new TagsSearchParams(tag2, TagsSearchParams.TagType.RESULT, dataSource1.getId());
results = tagsDAO.getTags(param, 0, null, false);
assertEquals(1, results.getTotalResultsCount());
assertEquals(1, results.getItems().size());
// Get "Tag1" result tags from data source 1
param = new TagsSearchParams(knownTag1, TagsSearchParams.TagType.RESULT, dataSource1.getId());
results = tagsDAO.getTags(param, 0, null, false);
assertEquals(1, results.getTotalResultsCount());
assertEquals(1, results.getItems().size());
// Get the row
rowDTO = results.getItems().get(0);
assertTrue(rowDTO instanceof BaseRowDTO);
tagResultRowDTO = (BaseRowDTO) rowDTO;
// Check that some of the expected result tag column values are present
assertTrue(tagResultRowDTO.getCellValues().contains(TAG_COMMENT));
} catch (ExecutionException ex) {
Exceptions.printStackTrace(ex);
Assert.fail(ex.getMessage());
}
}
public void analysisResultSearchTest() {
// Quick test that everything is initialized
assertTrue(db != null);
@ -528,6 +697,41 @@ public class TableSearchTest extends NbTestCase {
assertTrue(analysisResultRowDTO.getCellValues().contains(ARTIFACT_CONFIGURATION));
assertTrue(analysisResultRowDTO.getCellValues().contains(ARTIFACT_CONCLUSION));
// Test paging
Long pageSize = new Long(100);
assertTrue(ARTIFACT_COUNT_YARA > pageSize);
// Get the first page
param = new AnalysisResultSearchParam(BlackboardArtifact.Type.TSK_YARA_HIT, null);
results = analysisResultDAO.getAnalysisResultsForTable(param, 0, pageSize, false);
assertEquals(ARTIFACT_COUNT_YARA, results.getTotalResultsCount());
assertEquals(pageSize.longValue(), results.getItems().size());
// Save all artifact IDs from the first page
Set<Long> firstPageObjIds = new HashSet<>();
for (RowDTO row : results.getItems()) {
assertTrue(row instanceof AnalysisResultRowDTO);
AnalysisResultRowDTO analysisRow = (AnalysisResultRowDTO) row;
assertTrue(analysisRow.getAnalysisResult() != null);
firstPageObjIds.add(analysisRow.getAnalysisResult().getId());
}
assertEquals(pageSize.longValue(), firstPageObjIds.size());
// Get the second page
param = new AnalysisResultSearchParam(BlackboardArtifact.Type.TSK_YARA_HIT, null);
results = analysisResultDAO.getAnalysisResultsForTable(param, pageSize, pageSize, false);
assertEquals(ARTIFACT_COUNT_YARA, results.getTotalResultsCount());
assertEquals(ARTIFACT_COUNT_YARA - pageSize, results.getItems().size());
// Make sure no artifacts from the second page appeared on the first
for (RowDTO row : results.getItems()) {
assertTrue(row instanceof AnalysisResultRowDTO);
AnalysisResultRowDTO analysisRow = (AnalysisResultRowDTO) row;
assertTrue(analysisRow.getAnalysisResult() != null);
assertFalse("Analysis result ID: " + analysisRow.getAnalysisResult().getId() + " appeared on both page 1 and page 2",
firstPageObjIds.contains(analysisRow.getAnalysisResult().getId()));
}
} catch (ExecutionException ex) {
Exceptions.printStackTrace(ex);
Assert.fail(ex.getMessage());
@ -721,5 +925,7 @@ public class TableSearchTest extends NbTestCase {
}
openCase = null;
db = null;
blackboard = null;
tagsManager = null;
}
}