Clean up from diff review, added in PageWorker and SearchWorker changes

This commit is contained in:
U-BASIS\dsmyda 2020-09-08 10:18:46 -04:00
parent 62c97436d1
commit bcce8133d9
5 changed files with 20 additions and 16 deletions

View File

@ -29,7 +29,6 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
import org.openide.util.Exceptions;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoDbUtil;
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoException;

View File

@ -30,8 +30,6 @@ import java.util.Map;
import java.util.Optional;
import java.util.StringJoiner;
import org.apache.commons.lang3.tuple.Pair;
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoException;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNormalizationException;
import org.sleuthkit.autopsy.discovery.search.DiscoveryAttributes.AttributeType;
import org.sleuthkit.autopsy.discovery.search.DiscoveryAttributes.DataSourceAttribute;
import org.sleuthkit.autopsy.discovery.search.DiscoveryKeyUtils.GroupKey;
@ -56,8 +54,7 @@ import org.sleuthkit.datamodel.TskCoreException;
class DomainSearchCacheLoader extends CacheLoader<SearchKey, Map<GroupKey, List<Result>>> {
@Override
public Map<GroupKey, List<Result>> load(SearchKey key) throws DiscoveryException, SQLException, TskCoreException,
CentralRepoException, CorrelationAttributeNormalizationException {
public Map<GroupKey, List<Result>> load(SearchKey key) throws DiscoveryException, SQLException, TskCoreException {
List<Result> domainResults = getResultDomainsFromDatabase(key);
@ -134,9 +131,15 @@ class DomainSearchCacheLoader extends CacheLoader<SearchKey, Map<GroupKey, List<
/*SELECT */" domain," +
" MIN(date) AS activity_start," +
" MAX(date) AS activity_end," +
" SUM(CASE WHEN artifact_type_id = " + TSK_WEB_DOWNLOAD.getTypeID() + " THEN 1 ELSE 0 END) AS fileDownloads," +
" SUM(CASE WHEN artifact_type_id = " + TSK_WEB_HISTORY.getTypeID() + " AND" +
" date BETWEEN " + sixtyDaysAgo.getEpochSecond() + " AND " + currentTime.getEpochSecond() + " THEN 1 ELSE 0 END) AS last60," +
" SUM(CASE " +
" WHEN artifact_type_id = " + TSK_WEB_DOWNLOAD.getTypeID() + " THEN 1 " +
" ELSE 0 " +
" END) AS fileDownloads," +
" SUM(CASE " +
" WHEN artifact_type_id = " + TSK_WEB_HISTORY.getTypeID() + " AND" +
" date BETWEEN " + sixtyDaysAgo.getEpochSecond() + " AND " + currentTime.getEpochSecond() + " THEN 1 " +
" ELSE 0 " +
" END) AS last60," +
" data_source_obj_id AS dataSource " +
"FROM blackboard_artifacts" +
@ -164,18 +167,17 @@ class DomainSearchCacheLoader extends CacheLoader<SearchKey, Map<GroupKey, List<
return domainCallback.getResultDomains();
}
/**
* A utility method to transform filters into the necessary SQL statements
* for the domainsTable query. The complexity of that query requires this
* transformation process to be conditional. The date time filter is a good
* example of the type of conditional handling that follows in the method
* below. If no dateTime filter is supplied, then in order for the query to
* be correct, an special clause needs to be added in.
* be correct, an additional clause needs to be added in.
*
* @return The whereClause and havingClause as a pair. These methods were
* combined into one in order to stress that these clauses are tightly
* coupled.
* @return The whereClause and havingClause as a pair. These methods are one
* to stress that these clauses are tightly coupled.
*/
Pair<String, String> createWhereAndHavingClause(List<AbstractFilter> filters) {
final StringJoiner whereClause = new StringJoiner(" OR ");

View File

@ -126,7 +126,8 @@ public class SearchFiltering {
}
/**
* A filter to specify date range for artifacts.
* A filter to specify date range for artifacts, start and end times should
* be in epoch seconds.
*/
public static class ArtifactDateRangeFilter extends AbstractFilter {

View File

@ -91,7 +91,8 @@ final class PageWorker extends SwingWorker<Void, Void> {
try {
// Run the search
if (resultType == SearchData.Type.DOMAIN) {
results.addAll(DomainSearch.getDomainsInGroup(System.getProperty(USER_NAME_PROPERTY), searchfilters,
DomainSearch domainSearch = new DomainSearch();
results.addAll(domainSearch.getDomainsInGroup(System.getProperty(USER_NAME_PROPERTY), searchfilters,
groupingAttribute,
groupSort,
fileSortMethod, groupKey, startingEntry, pageSize,

View File

@ -76,7 +76,8 @@ final class SearchWorker extends SwingWorker<Void, Void> {
try {
// Run the search
if (searchType == SearchData.Type.DOMAIN) {
results.putAll(DomainSearch.getGroupSizes(System.getProperty(USER_NAME_PROPERTY), filters,
DomainSearch domainSearch = new DomainSearch();
results.putAll(domainSearch.getGroupSizes(System.getProperty(USER_NAME_PROPERTY), filters,
groupingAttr,
groupSortAlgorithm,
fileSort,