merge in refactoring of top programs

This commit is contained in:
Greg DiCristofaro 2020-09-30 16:11:56 -04:00
commit 2f85ac7310
87 changed files with 820 additions and 424 deletions

View File

@ -173,7 +173,7 @@ class UnpackagePortableCaseProgressDialog extends javax.swing.JDialog implements
throw new TskCoreException("Error finding 7-Zip executable"); // NON-NLS
}
String outputFolderSwitch = "-o" + String.format("\"%s\"",outputFolder); // NON-NLS
String outputFolderSwitch = String.format("\"-o%s\"",outputFolder); // NON-NLS
ProcessBuilder procBuilder = new ProcessBuilder();
procBuilder.command(
String.format("\"%s\"",sevenZipExe.getAbsolutePath()),

View File

@ -119,6 +119,8 @@ public final class IconsUtil {
imageFile = "validationFailed.png"; //NON-NLS
} else if (typeID == ARTIFACT_TYPE.TSK_WEB_ACCOUNT_TYPE.getTypeID()) {
imageFile = "web-account-type.png"; //NON-NLS
} else if (typeID == ARTIFACT_TYPE.TSK_WEB_FORM_ADDRESS.getTypeID()) {
imageFile = "web-form-address.png"; //NON-NLS
} else {
imageFile = "artifact-icon.png"; //NON-NLS
}

View File

@ -398,6 +398,20 @@ final class DataSourceInfoUtilities {
BlackboardAttribute attr = getAttributeOrNull(artifact, attributeType);
return (attr == null) ? null : attr.getValueLong();
}
/**
* Retrieves the int value of a certain attribute type from an artifact.
*
* @param artifact The artifact.
* @param attributeType The attribute type.
*
* @return The 'getValueInt()' value or null if the attribute could not be
* retrieved.
*/
static Integer getIntOrNull(BlackboardArtifact artifact, Type attributeType) {
BlackboardAttribute attr = getAttributeOrNull(artifact, attributeType);
return (attr == null) ? null : attr.getValueInt();
}
/**
* Retrieves the long value of a certain attribute type from an artifact and

View File

@ -1,370 +0,0 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2020 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.datasourcesummary.datamodel;
import org.sleuthkit.autopsy.datasourcesummary.uiutils.DefaultArtifactUpdateGovernor;
import java.io.File;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.apache.commons.lang.StringUtils;
import org.sleuthkit.autopsy.datasourcesummary.datamodel.SleuthkitCaseProvider.SleuthkitCaseProviderException;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.DataSource;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
/**
* Provides information to populate Top Programs Summary queries.
*/
public class TopProgramsSummary implements DefaultArtifactUpdateGovernor {
private static final Set<Integer> ARTIFACT_UPDATE_TYPE_IDS = new HashSet<>(Arrays.asList(
ARTIFACT_TYPE.TSK_PROG_RUN.getTypeID()
));
/**
* A SQL join type.
*/
private enum JoinType {
LEFT,
RIGHT,
INNER,
OUTER
}
/**
* A blackboard attribute value column.
*/
private enum AttributeColumn {
value_text,
value_int32,
value_int64
}
/**
* The suffix joined to a key name for use as an identifier of a query.
*/
private static final String QUERY_SUFFIX = "_query";
/**
* Functions that determine the folder name of a list of path elements. If
* not matched, function returns null.
*/
private static final List<Function<List<String>, String>> SHORT_FOLDER_MATCHERS = Arrays.asList(
// handle Program Files and Program Files (x86) - if true, return the next folder
(pathList) -> {
if (pathList.size() < 2) {
return null;
}
String rootParent = pathList.get(0).toUpperCase();
if ("PROGRAM FILES".equals(rootParent) || "PROGRAM FILES (X86)".equals(rootParent)) {
return pathList.get(1);
} else {
return null;
}
},
// if there is a folder named "APPLICATION DATA" or "APPDATA"
(pathList) -> {
for (String pathEl : pathList) {
String uppered = pathEl.toUpperCase();
if ("APPLICATION DATA".equals(uppered) || "APPDATA".equals(uppered)) {
return "AppData";
}
}
return null;
}
);
/**
* Creates a sql statement querying the blackboard attributes table for a
* particular attribute type and returning a specified value. That query
* also joins with the blackboard artifact table.
*
* @param joinType The type of join statement to create.
* @param attributeColumn The blackboard attribute column that should be
* returned.
* @param attrType The attribute type to query for.
* @param keyName The aliased name of the attribute to return. This
* is also used to calculate the alias of the query
* same as getFullKey.
* @param bbaName The blackboard artifact table alias.
*
* @return The generated sql statement.
*/
private static String getAttributeJoin(JoinType joinType, AttributeColumn attributeColumn, BlackboardAttribute.ATTRIBUTE_TYPE attrType, String keyName, String bbaName) {
String queryName = keyName + QUERY_SUFFIX;
String innerQueryName = "inner_attribute_" + queryName;
return "\n" + joinType + " JOIN (\n"
+ " SELECT \n"
+ " " + innerQueryName + ".artifact_id,\n"
+ " " + innerQueryName + "." + attributeColumn + " AS " + keyName + "\n"
+ " FROM blackboard_attributes " + innerQueryName + "\n"
+ " WHERE " + innerQueryName + ".attribute_type_id = " + attrType.getTypeID() + " -- " + attrType.name() + "\n"
+ ") " + queryName + " ON " + queryName + ".artifact_id = " + bbaName + ".artifact_id\n";
}
/**
* Given a column key, creates the full name for the column key.
*
* @param key The column key.
*
* @return The full identifier for the column key.
*/
private static String getFullKey(String key) {
return key + QUERY_SUFFIX + "." + key;
}
/**
* Constructs a SQL 'where' statement from a list of clauses and puts
* parenthesis around each clause.
*
* @param clauses The clauses
*
* @return The generated 'where' statement.
*/
private static String getWhereString(List<String> clauses) {
if (clauses.isEmpty()) {
return "";
}
List<String> parenthesized = clauses.stream()
.map(c -> "(" + c + ")")
.collect(Collectors.toList());
return "\nWHERE " + String.join("\n AND ", parenthesized) + "\n";
}
/**
* Generates a [column] LIKE sql clause.
*
* @param column The column identifier.
* @param likeString The string that will be used as column comparison.
* @param isLike if false, the statement becomes NOT LIKE.
*
* @return The generated statement.
*/
private static String getLikeClause(String column, String likeString, boolean isLike) {
return column + (isLike ? "" : " NOT") + " LIKE '" + likeString + "'";
}
private final SleuthkitCaseProvider provider;
public TopProgramsSummary() {
this(SleuthkitCaseProvider.DEFAULT);
}
public TopProgramsSummary(SleuthkitCaseProvider provider) {
this.provider = provider;
}
@Override
public Set<Integer> getArtifactTypeIdsForRefresh() {
return ARTIFACT_UPDATE_TYPE_IDS;
}
/**
* Retrieves a list of the top programs used on the data source. Currently
* determines this based off of which prefetch results return the highest
* count.
*
* @param dataSource The data source.
* @param count The number of programs to return.
*
* @return The top results objects found.
*
* @throws SleuthkitCaseProviderException
* @throws TskCoreException
* @throws SQLException
*/
public List<TopProgramsResult> getTopPrograms(DataSource dataSource, int count)
throws SleuthkitCaseProviderException, TskCoreException, SQLException {
if (dataSource == null || count <= 0) {
return Collections.emptyList();
}
// ntosboot should be ignored
final String ntosBootIdentifier = "NTOSBOOT";
// programs in windows directory to be ignored
final String windowsDir = "/WINDOWS%";
final String nameParam = "name";
final String pathParam = "path";
final String runCountParam = "run_count";
final String lastRunParam = "last_run";
String bbaQuery = "bba";
final String query = "SELECT\n"
+ " " + getFullKey(nameParam) + " AS " + nameParam + ",\n"
+ " " + getFullKey(pathParam) + " AS " + pathParam + ",\n"
+ " MAX(" + getFullKey(runCountParam) + ") AS " + runCountParam + ",\n"
+ " MAX(" + getFullKey(lastRunParam) + ") AS " + lastRunParam + "\n"
+ "FROM blackboard_artifacts " + bbaQuery + "\n"
+ getAttributeJoin(JoinType.INNER, AttributeColumn.value_text, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, nameParam, bbaQuery)
+ getAttributeJoin(JoinType.LEFT, AttributeColumn.value_text, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH, pathParam, bbaQuery)
+ getAttributeJoin(JoinType.LEFT, AttributeColumn.value_int32, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COUNT, runCountParam, bbaQuery)
+ getAttributeJoin(JoinType.LEFT, AttributeColumn.value_int64, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, lastRunParam, bbaQuery)
+ getWhereString(Arrays.asList(
bbaQuery + ".artifact_type_id = " + BlackboardArtifact.ARTIFACT_TYPE.TSK_PROG_RUN.getTypeID(),
bbaQuery + ".data_source_obj_id = " + dataSource.getId(),
// exclude ntosBootIdentifier from results
getLikeClause(getFullKey(nameParam), ntosBootIdentifier, false),
// exclude windows directory items from results
getFullKey(pathParam) + " IS NULL OR " + getLikeClause(getFullKey(pathParam), windowsDir, false)
))
+ "GROUP BY " + getFullKey(nameParam) + ", " + getFullKey(pathParam) + "\n"
+ "ORDER BY \n"
+ " MAX(" + getFullKey(runCountParam) + ") DESC,\n"
+ " MAX(" + getFullKey(lastRunParam) + ") DESC,\n"
+ " " + getFullKey(nameParam) + " ASC";
DataSourceInfoUtilities.ResultSetHandler<List<TopProgramsResult>> handler = (resultSet) -> {
List<TopProgramsResult> progResults = new ArrayList<>();
boolean quitAtCount = false;
while (resultSet.next() && (!quitAtCount || progResults.size() < count)) {
long lastRunEpoch = resultSet.getLong(lastRunParam);
Date lastRun = (resultSet.wasNull()) ? null : new Date(lastRunEpoch * 1000);
Long runCount = resultSet.getLong(runCountParam);
if (resultSet.wasNull()) {
runCount = null;
}
if (lastRun != null || runCount != null) {
quitAtCount = true;
}
progResults.add(new TopProgramsResult(
resultSet.getString(nameParam),
resultSet.getString(pathParam),
runCount,
lastRun));
}
return progResults;
};
try (SleuthkitCase.CaseDbQuery dbQuery = provider.get().executeQuery(query);
ResultSet resultSet = dbQuery.getResultSet()) {
return handler.process(resultSet);
}
}
/**
* Determines a short folder name if any. Otherwise, returns empty string.
*
* @param strPath The string path.
* @param applicationName The application name.
*
* @return The short folder name or empty string if not found.
*/
public String getShortFolderName(String strPath, String applicationName) {
if (strPath == null) {
return "";
}
List<String> pathEls = new ArrayList<>(Arrays.asList(applicationName));
File file = new File(strPath);
while (file != null && StringUtils.isNotBlank(file.getName())) {
pathEls.add(file.getName());
file = file.getParentFile();
}
Collections.reverse(pathEls);
for (Function<List<String>, String> matchEntry : SHORT_FOLDER_MATCHERS) {
String result = matchEntry.apply(pathEls);
if (StringUtils.isNotBlank(result)) {
return result;
}
}
return "";
}
/**
* Describes a result of a program run on a datasource.
*/
public static class TopProgramsResult {
private final String programName;
private final String programPath;
private final Long runTimes;
private final Date lastRun;
/**
* Main constructor.
*
* @param programName The name of the program.
* @param programPath The path of the program.
* @param runTimes The number of runs.
*/
TopProgramsResult(String programName, String programPath, Long runTimes, Date lastRun) {
this.programName = programName;
this.programPath = programPath;
this.runTimes = runTimes;
this.lastRun = lastRun;
}
/**
* @return The name of the program
*/
public String getProgramName() {
return programName;
}
/**
* @return The path of the program.
*/
public String getProgramPath() {
return programPath;
}
/**
* @return The number of run times or null if not present.
*/
public Long getRunTimes() {
return runTimes;
}
/**
* @return The last time the program was run or null if not present.
*/
public Date getLastRun() {
return lastRun;
}
}
}

View File

@ -18,6 +18,7 @@
*/
package org.sleuthkit.autopsy.datasourcesummary.datamodel;
import java.io.File;
import org.sleuthkit.autopsy.datasourcesummary.uiutils.DefaultArtifactUpdateGovernor;
import java.util.ArrayList;
import java.util.Arrays;
@ -30,6 +31,7 @@ import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import java.util.logging.Level;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@ -54,6 +56,36 @@ import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
*/
public class UserActivitySummary implements DefaultArtifactUpdateGovernor {
/**
* Functions that determine the folder name of a list of path elements. If
* not matched, function returns null.
*/
private static final List<Function<List<String>, String>> SHORT_FOLDER_MATCHERS = Arrays.asList(
// handle Program Files and Program Files (x86) - if true, return the next folder
(pathList) -> {
if (pathList.size() < 2) {
return null;
}
String rootParent = pathList.get(0).toUpperCase();
if ("PROGRAM FILES".equals(rootParent) || "PROGRAM FILES (X86)".equals(rootParent)) {
return pathList.get(1);
} else {
return null;
}
},
// if there is a folder named "APPLICATION DATA" or "APPDATA"
(pathList) -> {
for (String pathEl : pathList) {
String uppered = pathEl.toUpperCase();
if ("APPLICATION DATA".equals(uppered) || "APPDATA".equals(uppered)) {
return "AppData";
}
}
return null;
}
);
private static final BlackboardArtifact.Type TYPE_DEVICE_ATTACHED = new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_DEVICE_ATTACHED);
private static final BlackboardArtifact.Type TYPE_WEB_HISTORY = new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_WEB_HISTORY);
@ -69,17 +101,51 @@ public class UserActivitySummary implements DefaultArtifactUpdateGovernor {
private static final BlackboardAttribute.Type TYPE_DATETIME_START = new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_DATETIME_START);
private static final BlackboardAttribute.Type TYPE_DATETIME_END = new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_DATETIME_END);
private static final BlackboardAttribute.Type TYPE_DOMAIN = new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_DOMAIN);
private static final BlackboardAttribute.Type TYPE_PROG_NAME = new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_PROG_NAME);
private static final BlackboardAttribute.Type TYPE_PATH = new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_PATH);
private static final BlackboardAttribute.Type TYPE_COUNT = new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_COUNT);
private static final String NTOS_BOOT_IDENTIFIER = "NTOSBOOT";
private static final String WINDOWS_PREFIX = "/WINDOWS";
private static final Comparator<TopAccountResult> TOP_ACCOUNT_RESULT_DATE_COMPARE = (a, b) -> a.getLastAccess().compareTo(b.getLastAccess());
private static final Comparator<TopWebSearchResult> TOP_WEBSEARCH_RESULT_DATE_COMPARE = (a, b) -> a.getDateAccessed().compareTo(b.getDateAccessed());
/**
* Sorts TopProgramsResults pushing highest run time count then most recent
* run and then the program name that comes earliest in the alphabet.
*/
private static final Comparator<TopProgramsResult> TOP_PROGRAMS_RESULT_COMPARE = (a, b) -> {
// first priority for sorting is the run times
// if non-0, this is the return value for the comparator
int runTimesCompare = nullableCompare(a.getRunTimes(), b.getRunTimes());
if (runTimesCompare != 0) {
return -runTimesCompare;
}
// second priority for sorting is the last run date
// if non-0, this is the return value for the comparator
int lastRunCompare = nullableCompare(
a.getLastRun() == null ? null : a.getLastRun().getTime(),
b.getLastRun() == null ? null : b.getLastRun().getTime());
if (lastRunCompare != 0) {
return -lastRunCompare;
}
// otherwise sort alphabetically
return (a.getProgramName() == null ? "" : a.getProgramName())
.compareToIgnoreCase((b.getProgramName() == null ? "" : b.getProgramName()));
};
private static final Set<Integer> ARTIFACT_UPDATE_TYPE_IDS = new HashSet<>(Arrays.asList(
ARTIFACT_TYPE.TSK_WEB_SEARCH_QUERY.getTypeID(),
ARTIFACT_TYPE.TSK_MESSAGE.getTypeID(),
ARTIFACT_TYPE.TSK_EMAIL_MSG.getTypeID(),
ARTIFACT_TYPE.TSK_CALLLOG.getTypeID(),
ARTIFACT_TYPE.TSK_DEVICE_ATTACHED.getTypeID(),
ARTIFACT_TYPE.TSK_WEB_HISTORY.getTypeID()
ARTIFACT_TYPE.TSK_WEB_HISTORY.getTypeID(),
ARTIFACT_TYPE.TSK_PROG_RUN.getTypeID()
));
private static final Set<String> DEVICE_EXCLUDE_LIST = new HashSet<>(Arrays.asList("ROOT_HUB", "ROOT_HUB20"));
@ -539,6 +605,188 @@ public class UserActivitySummary implements DefaultArtifactUpdateGovernor {
.collect(Collectors.toList());
}
/**
* Determines a short folder name if any. Otherwise, returns empty string.
*
* @param strPath The string path.
* @param applicationName The application name.
*
* @return The short folder name or empty string if not found.
*/
public String getShortFolderName(String strPath, String applicationName) {
if (strPath == null) {
return "";
}
List<String> pathEls = new ArrayList<>(Arrays.asList(applicationName));
File file = new File(strPath);
while (file != null && org.apache.commons.lang.StringUtils.isNotBlank(file.getName())) {
pathEls.add(file.getName());
file = file.getParentFile();
}
Collections.reverse(pathEls);
for (Function<List<String>, String> matchEntry : SHORT_FOLDER_MATCHERS) {
String result = matchEntry.apply(pathEls);
if (org.apache.commons.lang.StringUtils.isNotBlank(result)) {
return result;
}
}
return "";
}
/**
* Creates a TopProgramsResult from a TSK_PROG_RUN blackboard artifact.
*
* @param artifact The TSK_PROG_RUN blackboard artifact.
*
* @return The generated TopProgramsResult.
*/
private TopProgramsResult getTopProgramsResult(BlackboardArtifact artifact) {
String programName = DataSourceInfoUtilities.getStringOrNull(artifact, TYPE_PROG_NAME);
String path = DataSourceInfoUtilities.getStringOrNull(artifact, TYPE_PATH);
// ignore items with no name or a ntos boot identifier
if (StringUtils.isBlank(programName) || NTOS_BOOT_IDENTIFIER.equalsIgnoreCase(programName)) {
return null;
}
// ignore windows directory
if (StringUtils.startsWithIgnoreCase(path, WINDOWS_PREFIX)) {
return null;
}
Integer count = DataSourceInfoUtilities.getIntOrNull(artifact, TYPE_COUNT);
Long longCount = (count == null) ? null : (long) count;
return new TopProgramsResult(
programName,
path,
longCount,
DataSourceInfoUtilities.getDateOrNull(artifact, TYPE_DATETIME)
);
}
/**
* Retrieves the maximum date given two (possibly null) dates.
*
* @param date1 First date.
* @param date2 Second date.
*
* @return The maximum non-null date or null if both items are null.
*/
private static Date getMax(Date date1, Date date2) {
if (date1 == null) {
return date2;
} else if (date2 == null) {
return date1;
} else {
return date1.compareTo(date2) > 0 ? date1 : date2;
}
}
/**
* Returns the compare value favoring the higher non-null number.
*
* @param long1 First possibly null long.
* @param long2 Second possibly null long.
*
* @return Returns the compare value: 1,0,-1 favoring the higher non-null
* value.
*/
private static int nullableCompare(Long long1, Long long2) {
if (long1 == null && long2 == null) {
return 0;
} else if (long1 != null && long2 == null) {
return 1;
} else if (long1 == null && long2 != null) {
return -1;
}
return Long.compare(long1, long2);
}
/**
* Returns true if number is non-null and higher than 0.
*
* @param longNum The number.
*
* @return True if non-null and higher than 0.
*/
private static boolean isPositiveNum(Long longNum) {
return longNum != null && longNum > 0;
}
/**
* Retrieves the top programs results for the given data source limited to
* the count provided as a parameter. The highest run times are at the top
* of the list. If that information isn't available the last run date is
* used. If both, the last run date and the number of run times are
* unavailable, the programs will be sorted alphabetically, the count will
* be ignored and all items will be returned.
*
* @param dataSource The datasource. If the datasource is null, an empty
* list will be returned.
* @param count The number of results to return. This value must be > 0
* or an IllegalArgumentException will be thrown.
*
* @return The sorted list and limited to the count if last run or run count
* information is available on any item.
*
* @throws SleuthkitCaseProviderException
* @throws TskCoreException
*/
public List<TopProgramsResult> getTopPrograms(DataSource dataSource, int count) throws SleuthkitCaseProviderException, TskCoreException {
assertValidCount(count);
if (dataSource == null) {
return Collections.emptyList();
}
// Get TopProgramsResults for each TSK_PROG_RUN artifact
Collection<TopProgramsResult> results = caseProvider.get().getBlackboard().getArtifacts(ARTIFACT_TYPE.TSK_PROG_RUN.getTypeID(), dataSource.getId())
.stream()
// convert to a TopProgramsResult object or null if missing critical information
.map((art) -> getTopProgramsResult(art))
// remove any null items
.filter((res) -> res != null)
// group by the program name and program path
// The value will be a TopProgramsResult with the max run times
// and most recent last run date for each program name / program path pair.
.collect(Collectors.toMap(
res -> Pair.of(res.getProgramName(), res.getProgramPath()),
res -> res,
(res1, res2) -> {
return new TopProgramsResult(
res1.getProgramName(),
res1.getProgramPath(),
getMax(res1.getRunTimes(), res2.getRunTimes()),
getMax(res1.getLastRun(), res2.getLastRun()));
})).values();
List<TopProgramsResult> orderedResults = results.stream()
.sorted(TOP_PROGRAMS_RESULT_COMPARE)
.collect(Collectors.toList());
// only limit the list to count if there is no last run date and no run times.
if (orderedResults.size() > 0) {
TopProgramsResult topResult = orderedResults.get(0);
// if run times / last run information is available, the first item should have some value,
// and then the items should be limited accordingly.
if (isPositiveNum(topResult.getRunTimes())
|| (topResult.getLastRun() != null && isPositiveNum(topResult.getLastRun().getTime()))) {
return orderedResults.stream().limit(count).collect(Collectors.toList());
}
}
// otherwise return the alphabetized list with no limit applied.
return orderedResults;
}
/**
* Object containing information about a web search artifact.
*/
@ -722,4 +970,57 @@ public class UserActivitySummary implements DefaultArtifactUpdateGovernor {
return lastVisit;
}
}
/**
* Describes a result of a program run on a datasource.
*/
public static class TopProgramsResult {
private final String programName;
private final String programPath;
private final Long runTimes;
private final Date lastRun;
/**
* Main constructor.
*
* @param programName The name of the program.
* @param programPath The path of the program.
* @param runTimes The number of runs.
*/
TopProgramsResult(String programName, String programPath, Long runTimes, Date lastRun) {
this.programName = programName;
this.programPath = programPath;
this.runTimes = runTimes;
this.lastRun = lastRun;
}
/**
* @return The name of the program
*/
public String getProgramName() {
return programName;
}
/**
* @return The path of the program.
*/
public String getProgramPath() {
return programPath;
}
/**
* @return The number of run times or null if not present.
*/
public Long getRunTimes() {
return runTimes;
}
/**
* @return The last time the program was run or null if not present.
*/
public Date getLastRun() {
return lastRun;
}
}
}

View File

@ -29,12 +29,11 @@ import org.apache.commons.lang.StringUtils;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.datasourcesummary.datamodel.IngestModuleCheckUtil;
import org.sleuthkit.autopsy.datasourcesummary.datamodel.UserActivitySummary;
import org.sleuthkit.autopsy.datasourcesummary.datamodel.TopProgramsSummary;
import org.sleuthkit.autopsy.datasourcesummary.datamodel.UserActivitySummary.TopAccountResult;
import org.sleuthkit.autopsy.datasourcesummary.datamodel.UserActivitySummary.TopDeviceAttachedResult;
import org.sleuthkit.autopsy.datasourcesummary.datamodel.UserActivitySummary.TopWebSearchResult;
import org.sleuthkit.autopsy.datasourcesummary.datamodel.TopProgramsSummary.TopProgramsResult;
import org.sleuthkit.autopsy.datasourcesummary.datamodel.UserActivitySummary.TopDomainsResult;
import org.sleuthkit.autopsy.datasourcesummary.datamodel.UserActivitySummary.TopProgramsResult;
import org.sleuthkit.autopsy.datasourcesummary.uiutils.CellModelTableCellRenderer.DefaultCellModel;
import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetchWorker.DataFetchComponents;
import org.sleuthkit.autopsy.datasourcesummary.uiutils.IngestRunningLabel;
@ -227,35 +226,30 @@ public class UserActivityPanel extends BaseDataSourceSummaryPanel {
private final IngestRunningLabel ingestRunningLabel = new IngestRunningLabel();
private final List<DataFetchComponents<DataSource, ?>> dataFetchComponents;
private final TopProgramsSummary topProgramsData;
private final UserActivitySummary userActivityData;
/**
* Creates a new UserActivityPanel.
*/
public UserActivityPanel() {
this(new TopProgramsSummary(), new UserActivitySummary());
this(new UserActivitySummary());
}
/**
* Creates a new UserActivityPanel.
*
* @param topProgramsData Class from which to obtain top programs data.
* @param userActivityData Class from which to obtain remaining user
* activity data.
*/
public UserActivityPanel(
TopProgramsSummary topProgramsData,
UserActivitySummary userActivityData) {
super(topProgramsData, userActivityData);
this.topProgramsData = topProgramsData;
public UserActivityPanel(UserActivitySummary userActivityData) {
super(userActivityData);
this.userActivityData = userActivityData;
// set up data acquisition methods
this.dataFetchComponents = Arrays.asList(
// top programs query
new DataFetchComponents<DataSource, List<TopProgramsResult>>(
(dataSource) -> topProgramsData.getTopPrograms(dataSource, TOP_PROGS_COUNT),
(dataSource) -> userActivityData.getTopPrograms(dataSource, TOP_PROGS_COUNT),
(result) -> {
showResultWithModuleCheck(topProgramsTable, result,
IngestModuleCheckUtil.RECENT_ACTIVITY_FACTORY,
@ -307,7 +301,7 @@ public class UserActivityPanel extends BaseDataSourceSummaryPanel {
* @return The underlying short folder name if one exists.
*/
private String getShortFolderName(String path, String appName) {
return this.topProgramsData.getShortFolderName(path, appName);
return this.userActivityData.getShortFolderName(path, appName);
}
@Override

View File

@ -140,7 +140,7 @@ public class DataFetchWorker<A, R> extends SwingWorker<R, Void> {
}
// and pass the result to the client
resultHandler.accept(DataFetchResult.getErrorResult(inner));
resultHandler.accept(DataFetchResult.getErrorResult(ex.getCause()));
return;
}

View File

@ -26,6 +26,7 @@ import java.util.Map;
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository;
import org.sleuthkit.autopsy.discovery.search.DiscoveryKeyUtils.GroupKey;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.SleuthkitCase;
/**
@ -35,12 +36,14 @@ public class DomainSearch {
private final DomainSearchCache searchCache;
private final DomainSearchThumbnailCache thumbnailCache;
private final DomainSearchArtifactsCache artifactsCache;
/**
* Construct a new DomainSearch object.
*/
public DomainSearch() {
this(new DomainSearchCache(), new DomainSearchThumbnailCache());
this(new DomainSearchCache(), new DomainSearchThumbnailCache(),
new DomainSearchArtifactsCache());
}
/**
@ -51,9 +54,11 @@ public class DomainSearch {
* @param thumbnailCache The DomainSearchThumnailCache to use for this
* DomainSearch.
*/
DomainSearch(DomainSearchCache cache, DomainSearchThumbnailCache thumbnailCache) {
DomainSearch(DomainSearchCache cache, DomainSearchThumbnailCache thumbnailCache,
DomainSearchArtifactsCache artifactsCache) {
this.searchCache = cache;
this.thumbnailCache = thumbnailCache;
this.artifactsCache = artifactsCache;
}
/**
@ -139,17 +144,40 @@ public class DomainSearch {
}
/**
* Get a thumbnail representation of a domain name. See
* DomainSearchThumbnailRequest for more details.
* Get a thumbnail representation of a domain name.
*
* Thumbnail candidates are JPEG files that have either TSK_WEB_DOWNLOAD or
* TSK_WEB_CACHE artifacts that match the domain name (see the DomainSearch
* getArtifacts() API). JPEG files are sorted by most recent if sourced from
* TSK_WEB_DOWNLOADs and by size if sourced from TSK_WEB_CACHE artifacts.
* The first suitable thumbnail is selected.
*
* @param thumbnailRequest Thumbnail request for domain.
*
* @return An Image instance or null if no thumbnail is available.
* @return A thumbnail of the first matching JPEG, or a default thumbnail if
* no suitable JPEG exists.
*
* @throws DiscoveryException If there is an error with Discovery related
* processing.
* processing.
*/
public Image getThumbnail(DomainSearchThumbnailRequest thumbnailRequest) throws DiscoveryException {
return thumbnailCache.get(thumbnailRequest);
}
/**
* Get all blackboard artifacts that match the requested domain name.
*
* Artifacts will be selected if the requested domain name is either an
* exact match on a TSK_DOMAIN value or a substring match on a TSK_URL
* value. String matching is case insensitive.
*
* @param artifactsRequest The request containing the case, artifact type,
* and domain name.
* @return A list of blackboard artifacts that match the request criteria.
* @throws DiscoveryException If an exception is encountered during
* processing.
*/
public List<BlackboardArtifact> getArtifacts(DomainSearchArtifactsRequest artifactsRequest) throws DiscoveryException {
return artifactsCache.get(artifactsRequest);
}
}

View File

@ -47,6 +47,11 @@ public class DomainSearchArtifactsCache {
* process.
*/
public List<BlackboardArtifact> get(DomainSearchArtifactsRequest request) throws DiscoveryException {
String typeName = request.getArtifactType().getLabel();
if (!typeName.startsWith("TSK_WEB")) {
throw new IllegalArgumentException("Only web artifacts are valid arguments");
}
try {
return cache.get(request);
} catch (ExecutionException ex) {

View File

@ -39,10 +39,10 @@ import org.openide.util.ImageUtilities;
/**
* Loads a thumbnail for the given request. Thumbnail candidates are JPEG files
* that are either TSK_WEB_DOWNLOAD or TSK_WEB_CACHE artifacts. JPEG files are
* sorted by most recent if sourced from TSK_WEB_DOWNLOADs. JPEG files are
* sorted by size if sourced from TSK_WEB_CACHE artifacts. Artifacts are first
* loaded from the DomainSearchArtifactsCache and then further analyzed.
* that have either TSK_WEB_DOWNLOAD or TSK_WEB_CACHE artifacts that match the
* domain name (see the DomainSearch getArtifacts() API). JPEG files are sorted
* by most recent if sourced from TSK_WEB_DOWNLOADs and by size if sourced from
* TSK_WEB_CACHE artifacts. The first suitable thumbnail is selected.
*/
public class DomainSearchThumbnailLoader extends CacheLoader<DomainSearchThumbnailRequest, Image> {

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

View File

@ -183,6 +183,7 @@ public class HEICProcessor implements PictureProcessor {
final Path outputFile = moduleOutputFolder.resolve(baseFileName + ".jpg");
final Path imageMagickErrorOutput = moduleOutputFolder.resolve(IMAGE_MAGICK_ERROR_FILE);
Files.deleteIfExists(imageMagickErrorOutput);
Files.createFile(imageMagickErrorOutput);
// ImageMagick will write the primary image to the output file.

View File

@ -379,7 +379,10 @@ public class HTMLReport implements TableReportModule {
in = getClass().getResourceAsStream("/org/sleuthkit/autopsy/images/validationFailed.png"); //NON-NLS
break;
case TSK_WEB_ACCOUNT_TYPE:
in = getClass().getResourceAsStream("/org/sleuthkit/autopsy/images/web-account-type.png.png"); //NON-NLS
in = getClass().getResourceAsStream("/org/sleuthkit/autopsy/images/web-account-type.png"); //NON-NLS
break;
case TSK_WEB_FORM_ADDRESS:
in = getClass().getResourceAsStream("/org/sleuthkit/autopsy/images/web-form-address.png"); //NON-NLS
break;
default:
logger.log(Level.WARNING, "useDataTypeIcon: unhandled artifact type = {0}", dataType); //NON-NLS

View File

@ -83,6 +83,7 @@ public class PortableCaseReportModuleSettings implements ReportModuleSettings {
this.chunkSize = ChunkSize.NONE;
this.allTagsSelected = true;
this.allSetsSelected = true;
this.shouldIncludeApplication = false;
}
PortableCaseReportModuleSettings(List<String> setNames, List<TagName> tagNames,
@ -93,6 +94,7 @@ public class PortableCaseReportModuleSettings implements ReportModuleSettings {
this.chunkSize = chunkSize;
this.allTagsSelected = allTagsSelected;
this.allSetsSelected = allSetsSelected;
this.shouldIncludeApplication = false;
}
@Override

View File

@ -33,8 +33,8 @@ import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.DataSource;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import static org.mockito.Mockito.*;
import org.sleuthkit.autopsy.testutils.TskMockUtils;
import static org.mockito.Mockito.*;
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
import org.sleuthkit.datamodel.BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE;
@ -70,7 +70,7 @@ public class GetArtifactsTest {
*
* @throws TskCoreException
*/
private final void test(BlackboardArtifact.Type artifactType, DataSource dataSource, BlackboardAttribute.Type attributeType,
private void test(BlackboardArtifact.Type artifactType, DataSource dataSource, BlackboardAttribute.Type attributeType,
SortOrder sortOrder, int count, List<BlackboardArtifact> returnArr, TskCoreException blackboardEx,
List<BlackboardArtifact> expectedArr, Class<? extends Exception> expectedException) throws TskCoreException {

View File

@ -0,0 +1,189 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2020 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.discovery.search;
import com.google.common.collect.Lists;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
public class DomainSearchArtifactsCacheTest {
private static final ARTIFACT_TYPE WEB_ARTIFACT_TYPE = ARTIFACT_TYPE.TSK_WEB_BOOKMARK;
private static final BlackboardAttribute.Type TSK_DOMAIN = new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_DOMAIN);
private static final BlackboardAttribute.Type TSK_URL = new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_URL);
@Test(expected = IllegalArgumentException.class)
public void get_NonWebArtifactType_ShouldThrow() throws DiscoveryException {
DomainSearchArtifactsRequest request = new DomainSearchArtifactsRequest(null, "google.com", ARTIFACT_TYPE.TSK_CALLLOG);
DomainSearchArtifactsCache cache = new DomainSearchArtifactsCache();
cache.get(request);
}
/*
* This test is important for ensuring artifact loading can
* be cancelled, which is necessary for a responsive UI.
*/
@Test
public void get_ThreadInterrupted_ShouldThrow() throws TskCoreException {
SleuthkitCase mockCase = mock(SleuthkitCase.class);
BlackboardArtifact mockArtifact = mock(BlackboardArtifact.class);
when(mockCase.getBlackboardArtifacts(WEB_ARTIFACT_TYPE)).thenReturn(Lists.newArrayList(mockArtifact));
DomainSearchArtifactsRequest request = new DomainSearchArtifactsRequest(mockCase, "facebook.com", WEB_ARTIFACT_TYPE);
DomainSearchArtifactsCache cache = new DomainSearchArtifactsCache();
Thread.currentThread().interrupt();
try {
cache.get(request);
// Clear the interrupt flag on failure.
Thread.interrupted();
Assert.fail("Should have thrown a discovery exception.");
} catch (DiscoveryException ex) {
// Clear the interrupt flag on success (or failure).
Thread.interrupted();
Assert.assertEquals(InterruptedException.class, ex.getCause().getCause().getClass());
}
}
@Test
public void get_MatchingDomain_ShouldHaveSizeOne() throws TskCoreException, DiscoveryException {
SleuthkitCase mockCase = mock(SleuthkitCase.class);
BlackboardArtifact mockArtifact = mock(BlackboardArtifact.class);
when(mockArtifact.getAttribute(TSK_DOMAIN)).thenReturn(mockDomainAttribute("google.com"));
when(mockCase.getBlackboardArtifacts(WEB_ARTIFACT_TYPE)).thenReturn(Lists.newArrayList(mockArtifact));
DomainSearchArtifactsRequest request = new DomainSearchArtifactsRequest(mockCase, "google.com", WEB_ARTIFACT_TYPE);
DomainSearchArtifactsCache cache = new DomainSearchArtifactsCache();
List<BlackboardArtifact> artifacts = cache.get(request);
Assert.assertEquals(1, artifacts.size());
Assert.assertEquals(mockArtifact, artifacts.get(0));
}
@Test
public void get_MatchingUrl_ShouldHaveSizeOne() throws TskCoreException, DiscoveryException {
SleuthkitCase mockCase = mock(SleuthkitCase.class);
BlackboardArtifact mockArtifact = mock(BlackboardArtifact.class);
when(mockArtifact.getAttribute(TSK_URL)).thenReturn(mockURLAttribute("https://www.abc.com/search"));
when(mockCase.getBlackboardArtifacts(WEB_ARTIFACT_TYPE)).thenReturn(Lists.newArrayList(mockArtifact));
DomainSearchArtifactsRequest request = new DomainSearchArtifactsRequest(mockCase, "abc.com", WEB_ARTIFACT_TYPE);
DomainSearchArtifactsCache cache = new DomainSearchArtifactsCache();
List<BlackboardArtifact> artifacts = cache.get(request);
Assert.assertEquals(1, artifacts.size());
Assert.assertEquals(mockArtifact, artifacts.get(0));
}
@Test
public void get_MismatchedDomainName_ShouldBeEmpty() throws TskCoreException, DiscoveryException {
SleuthkitCase mockCase = mock(SleuthkitCase.class);
BlackboardArtifact mockArtifact = mock(BlackboardArtifact.class);
when(mockArtifact.getAttribute(TSK_DOMAIN)).thenReturn(mockDomainAttribute("google.com"));
when(mockCase.getBlackboardArtifacts(WEB_ARTIFACT_TYPE)).thenReturn(Lists.newArrayList(mockArtifact));
DomainSearchArtifactsRequest request = new DomainSearchArtifactsRequest(mockCase, "facebook.com", WEB_ARTIFACT_TYPE);
DomainSearchArtifactsCache cache = new DomainSearchArtifactsCache();
List<BlackboardArtifact> artifacts = cache.get(request);
Assert.assertEquals(0, artifacts.size());
}
@Test
public void get_MismatchedUrl_ShouldBeEmpty() throws DiscoveryException, TskCoreException {
SleuthkitCase mockCase = mock(SleuthkitCase.class);
BlackboardArtifact mockArtifact = mock(BlackboardArtifact.class);
when(mockArtifact.getAttribute(TSK_URL)).thenReturn(mockURLAttribute("https://www.dce1.com/search"));
when(mockCase.getBlackboardArtifacts(WEB_ARTIFACT_TYPE)).thenReturn(Lists.newArrayList(mockArtifact));
DomainSearchArtifactsRequest request = new DomainSearchArtifactsRequest(mockCase, "dce.com", WEB_ARTIFACT_TYPE);
DomainSearchArtifactsCache cache = new DomainSearchArtifactsCache();
List<BlackboardArtifact> artifacts = cache.get(request);
Assert.assertEquals(0, artifacts.size());
}
@Test
public void get_CaseInsensitiveDomainAttribute_ShouldHaveSizeOne() throws TskCoreException, DiscoveryException {
SleuthkitCase mockCase = mock(SleuthkitCase.class);
BlackboardArtifact mockArtifact = mock(BlackboardArtifact.class);
when(mockArtifact.getAttribute(TSK_DOMAIN)).thenReturn(mockDomainAttribute("xYZ.coM"));
when(mockCase.getBlackboardArtifacts(WEB_ARTIFACT_TYPE)).thenReturn(Lists.newArrayList(mockArtifact));
DomainSearchArtifactsRequest request = new DomainSearchArtifactsRequest(mockCase, "xyz.com", WEB_ARTIFACT_TYPE);
DomainSearchArtifactsCache cache = new DomainSearchArtifactsCache();
List<BlackboardArtifact> artifacts = cache.get(request);
Assert.assertEquals(1, artifacts.size());
Assert.assertEquals(mockArtifact, artifacts.get(0));
}
@Test
public void get_CaseInsensitiveRequestDomain_ShouldHaveSizeOne() throws TskCoreException, DiscoveryException {
SleuthkitCase mockCase = mock(SleuthkitCase.class);
BlackboardArtifact mockArtifact = mock(BlackboardArtifact.class);
when(mockArtifact.getAttribute(TSK_DOMAIN)).thenReturn(mockDomainAttribute("google.com"));
when(mockCase.getBlackboardArtifacts(WEB_ARTIFACT_TYPE)).thenReturn(Lists.newArrayList(mockArtifact));
DomainSearchArtifactsRequest request = new DomainSearchArtifactsRequest(mockCase, "GooGle.coM", WEB_ARTIFACT_TYPE);
DomainSearchArtifactsCache cache = new DomainSearchArtifactsCache();
List<BlackboardArtifact> artifacts = cache.get(request);
Assert.assertEquals(1, artifacts.size());
Assert.assertEquals(mockArtifact, artifacts.get(0));
}
@Test
public void get_CaseInsensitiveUrlAttribute_ShouldHaveSizeOne() throws TskCoreException, DiscoveryException {
SleuthkitCase mockCase = mock(SleuthkitCase.class);
BlackboardArtifact mockArtifact = mock(BlackboardArtifact.class);
when(mockArtifact.getAttribute(TSK_URL)).thenReturn(mockURLAttribute("https://www.JfK.coM/search"));
when(mockCase.getBlackboardArtifacts(WEB_ARTIFACT_TYPE)).thenReturn(Lists.newArrayList(mockArtifact));
DomainSearchArtifactsRequest request = new DomainSearchArtifactsRequest(mockCase, "jfk.com", WEB_ARTIFACT_TYPE);
DomainSearchArtifactsCache cache = new DomainSearchArtifactsCache();
List<BlackboardArtifact> artifacts = cache.get(request);
Assert.assertEquals(1, artifacts.size());
Assert.assertEquals(mockArtifact, artifacts.get(0));
}
@Test
public void get_CaseInsensitiveRequestUrl_ShouldHaveSizeOne() throws TskCoreException, DiscoveryException {
SleuthkitCase mockCase = mock(SleuthkitCase.class);
BlackboardArtifact mockArtifact = mock(BlackboardArtifact.class);
when(mockArtifact.getAttribute(TSK_URL)).thenReturn(mockURLAttribute("https://www.hop.com/search"));
when(mockCase.getBlackboardArtifacts(WEB_ARTIFACT_TYPE)).thenReturn(Lists.newArrayList(mockArtifact));
DomainSearchArtifactsRequest request = new DomainSearchArtifactsRequest(mockCase, "HoP.cOm", WEB_ARTIFACT_TYPE);
DomainSearchArtifactsCache cache = new DomainSearchArtifactsCache();
List<BlackboardArtifact> artifacts = cache.get(request);
Assert.assertEquals(1, artifacts.size());
Assert.assertEquals(mockArtifact, artifacts.get(0));
}
private BlackboardAttribute mockDomainAttribute(String value) {
return new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN, "", value);
}
private BlackboardAttribute mockURLAttribute(String value) {
return new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL, "", value);
}
}

View File

@ -48,7 +48,7 @@ public class DomainSearchTest {
};
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
Map<GroupKey, Integer> sizes = domainSearch.getGroupSizes(null,
new ArrayList<>(), null, null, null, null, null);
assertEquals(4, sizes.get(groupOne).longValue());
@ -83,7 +83,7 @@ public class DomainSearchTest {
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
Map<GroupKey, Integer> sizes = domainSearch.getGroupSizes(null,
new ArrayList<>(), null, null, null, null, null);
assertEquals(4, sizes.get(groupOne).longValue());
@ -97,7 +97,7 @@ public class DomainSearchTest {
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(new HashMap<>());
DomainSearch domainSearch = new DomainSearch(cache, null);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
Map<GroupKey, Integer> sizes = domainSearch.getGroupSizes(null,
new ArrayList<>(), null, null, null, null, null);
assertEquals(0, sizes.size());
@ -122,7 +122,7 @@ public class DomainSearchTest {
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 0, 3, null, null);
assertEquals(3, firstPage.size());
@ -150,7 +150,7 @@ public class DomainSearchTest {
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 0, 100, null, null);
assertEquals(4, firstPage.size());
@ -178,7 +178,7 @@ public class DomainSearchTest {
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 0, 2, null, null);
assertEquals(2, firstPage.size());
@ -206,7 +206,7 @@ public class DomainSearchTest {
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 3, 1, null, null);
assertEquals(1, firstPage.size());
@ -232,7 +232,7 @@ public class DomainSearchTest {
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 20, 5, null, null);
assertEquals(0, firstPage.size());
@ -257,7 +257,7 @@ public class DomainSearchTest {
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 0, 0, null, null);
assertEquals(0, firstPage.size());
@ -292,7 +292,7 @@ public class DomainSearchTest {
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 0, 3, null, null);
assertEquals(3, firstPage.size());
@ -327,7 +327,7 @@ public class DomainSearchTest {
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupTwo, 1, 2, null, null);
assertEquals(2, firstPage.size());
@ -359,7 +359,7 @@ public class DomainSearchTest {
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
int start = 0;
int size = 2;

View File

@ -133,4 +133,7 @@ public class TskMockUtils {
doNothing().when(logger.log(any(Level.class), any(Throwable.class)));
return logger;
}
private TskMockUtils() {
}
}

View File

@ -1,5 +1,8 @@
/*! \page EXIF_parser_page Picture Analyzer Module
[TOC]
\section exif_overview Overview
The Picture Analyzer module extracts EXIF (Exchangeable Image File Format) information from ingested pictures. This information can contain geolocation data for the picture, time, date, camera model and settings (exposure values, resolution, etc) and other information. The discovered attributes are added to the Blackboard. This can tell you where and when a picture was taken, and give clues to the camera that took it.

View File

@ -1,5 +1,6 @@
/*! \page ad_hoc_keyword_search_page Ad Hoc Keyword Search
[TOC]
\section ad_hoc_kw_overview Overview

View File

@ -1,5 +1,7 @@
/*! \page android_analyzer_page Android Analyzer Module
[TOC]
What Does It Do
========

View File

@ -1,5 +1,7 @@
/*! \page embedded_file_extractor_page Embedded File Extraction Module
[TOC]
\section embedded_files_overview What Does It Do

View File

@ -1,5 +1,8 @@
/*! \page auto_ingest_page Automated Ingest
[TOC]
\section auto_ingest_overview Overview
Auto ingest allows one or many computers to process \ref ds_page "data sources" automatically with minimal support from a user. The resulting \ref multiuser_page "multi-user cases" can be opened and reviewed by analysts, using any of the normal functions in Autopsy.

View File

@ -1,5 +1,8 @@
/*! \page auto_ingest_admin_page Auto Ingest Administration
[TOC]
\section auto_ingest_admin_overview Overview
Examiner nodes in an \ref auto_ingest_page environment can be given a type of administrator access. This allows an admin to:

View File

@ -1,5 +1,8 @@
/*! \page auto_ingest_setup_page Auto Ingest Configuration
[TOC]
\section auto_ingest_setup_overview Overview
A multi-user installation requires several network-based services, such as a central database and a messaging system, and automated ingest requires one or more auto ingest nodes. While you may run all of the external services on a single node, this is not likely to be ideal - spreading the services out across several machines can improve throughput. Keeping in mind that all the following machines need to be able to communicate with each other with network visibility to the shared drive, here is a description of a possible configuration:

View File

@ -1,5 +1,8 @@
/*! \page cases_page Cases
[TOC]
You need to create a case before you can analyze data in Autopsy. A case can contain one or more data sources (disk images, disk devices, logical files). The data sources can be from multiple drives in a single computer or from multiple computers. It's up to you.
Each case has its own directory that is named based on the case name. The directory will contain configuration files, a database, reports, and other files that modules generates. The main Autopsy case configuration file has an ".aut" extension.
@ -45,17 +48,9 @@ You can view the case properties by going to the "Case" menu and clicking "Case
Most of the case properties can be edited through the "Edit Details" button.
You can view the data source summary by going to the "Case" menu and clicking "Data Source Summary". The table at the top shows general information about each data source in the case. In the lower half, the first tab shows more detailed information about the selected data source.
You can view the data source summary by going to the "Case" menu and clicking "Data Source Summary" or by selecting the data source in the \ref tree_viewer_page and then the "Summary" tab. More information can be found on the \ref data_source_summary_page page.
\image html data_source_summary_details.png
The second tab, "Counts", shows the number of files found of various types and number of extracted results.
\image html data_source_summary_counts.png
The third tab, "Ingest History", shows each ingest job, the time it was completed, and which modules were run as part of the job.
\image html data_source_summary_ingest.png
\image html ds_summary_window.png
*/

View File

@ -1,5 +1,8 @@
/*! \page central_repo_page Central Repository
[TOC]
\section cr_overview Overview
The central repository allows a user to find matching artifacts both across cases and across data sources in the same case.

View File

@ -1,5 +1,8 @@
/*! \page command_line_ingest_page Command Line Ingest
[TOC]
\section command_line_ingest_overview Overview
The Command Line Ingest feature allows you to run many of Autopsy's functions from the command line. You can add data sources to cases, choose which ingest modules to run, and automatically generate a report. When complete, these cases can be opened as normal or you can simply use the reports and other output without opening Autopsy.

View File

@ -1,5 +1,8 @@
/*! \page common_properties_page Common Properties Search
[TOC]
\section common_properties_overview Overview
The Common Properties Search feature allows you to search for multiple copies of a property within the current case or within the \ref central_repo_page.

View File

@ -1,5 +1,8 @@
/*! \page communications_page Communications Visualization Tool
[TOC]
\section cvt_overview Overview
The Communications Visualization Tool gives a consolidated view of all communication events for the case. This allows an analyst to quickly view communications data such as:

View File

@ -1,5 +1,8 @@
/*! \page content_viewer_page Content Viewer
[TOC]
The Content Viewer lives in the lower right-hand side of the Autopsy main screen and shows pictures, video, hex, text, extracted strings, metadata, etc. The Content Viewer is populated when you select an entry in the \ref ui_results.
The Content Viewer is context-aware, meaning different tabs will be enabled depending on the type of content selected and which ingest modules have been run. It will default to what it considers the "most specific" tab. For example, selecting a JPG will cause the Content Viewer to automatically select the "Application" tab and will display the image there. If you instead would like the Content Viewer to stay on the previously selected tab when you change to a different content object, go to the \ref view_options_page panel through Tools->Options->Application Tab and select the "Stay on the same file viewer" option.

View File

@ -1,5 +1,8 @@
/*! \page data_source_integrity_page Data Source Integrity Module
[TOC]
\section data_source_integrity_overview Overview
The Data Source Integrity module has two purposes:

View File

@ -0,0 +1,71 @@
/*! \page data_source_summary_page Data Source Summary
[TOC]
\section ds_summary_overview Overview
The Data Source Summary viewer allows you to see an overview of file types, results, and other information for a particular data source.
\section ds_summary_opening Opening the Data Source Summary
There are two ways to view the data source summary. The first is to go to Case->Data Source Summary. This will open the summary in a new window.
\image html ds_summary_window.png
The second way to view the summary is to select the data source in the \ref tree_viewer_page and then select the "Summary" tab in the \ref result_viewer_page.
\image html ds_summary_result_viewer.png
\section ds_summary_main Sections
Each tab of the data source summary displays different types of information about the selected data source. If ingest is ongoing, the summary will be updated periodically as new data is available.
If the ingest module(s) required for a data type have not been run, you will see a note about why there is no data. For example, "Recent Programs" results are created by the \ref recent_activity_page, so you will see a message about running that module if you have not.
\image html DataSourceSummary/ds_summary_noRA.png
\subsection ds_summary_types Types
The Types tab shows counts of different file types found in the data source.
\image html ds_summary_types.png
\subsection ds_summary_user_activity User Activity
The User Activity tab shows the most recent results found in the data source.
\image html ds_summary_user_activity.png
\subsection ds_summary_analysis Analysis
The Analysis tab shows the sets with the most results from the \ref hash_db_page, the \ref keyword_search_page, and the \ref interesting_files_identifier_page.
\image html ds_summary_analysis.png
\subsection ds_summary_recent_files Recent Files
The Recent Files tab shows information on the most recent files opened and downloaded.
\image html ds_summary_recent_files.png
\subsection ds_summary_past_cases Past Cases
The Past Cases tab shows which cases had results or notable files in common with the current data source. This is based on the results in the "Interesting Items" section of the Results section in the \ref tree_viewer_page. The \ref central_repo_page ingest module must have been run with the options to "Flag items previously tagged as notable" and "Flag devices previously seen in other cases" enabled.
\image html ds_summary_past_cases.png
Note that because these entries are based on the Interesting Items results created during ingest and not querying the central repository, they will not reflect any matches in cases processed after this case. For example, suppose we create Case A and ingest a data source with Device Z. If we make a new case Case B afterward and ingest a data source that also has Device Z, we would see Case A listed in this tab for Case B, but if we reopened Case A we would not see Case B listed unless ingest was run again.
\subsection ds_summary_ingest_history Ingest History
The Ingest History tab shows which ingest modules have been run on the data source and the version of each module.
\image html ds_summary_ingest.png
\subsection ds_summary_container Container
The Container tab displays information on the data source itself, such as the size and image paths.
\image html ds_summary_container.png
*/

View File

@ -1,6 +1,9 @@
/*! \page ds_page Data Sources
[TOC]
A data source is the thing you want to analyze. It can be a disk image, some logical files, a local disk, etc. You must open a case prior to adding a data source to Autopsy.
Autopsy supports multiple types of data sources:

View File

@ -1,5 +1,8 @@
/*! \page drone_page Drone Analyzer
[TOC]
\section drone_overview Overview
The Drone Analyzer module allows you to analyze files from a drone.

View File

@ -1,5 +1,8 @@
/*! \page email_parser_page Email Parser Module
[TOC]
What Does It Do
========

View File

@ -1,5 +1,8 @@
/*! \page encryption_page Encryption Detection Module
[TOC]
\section encrypt_overview Overview
The Encryption Detection Module searches for files that could be encrypted using both a general entropy calculation and more specialized tests for certain file types.

View File

@ -1,5 +1,8 @@
/*! \page experimental_page Experimental Module
[TOC]
\section exp_overview Overview
The Experimental module, as the name implies, contains code that is not yet part of the official Autopsy release. These experimental features can be used but may be less polished than other features and will have less documentation. These modules may be changed at any time.

View File

@ -1,5 +1,8 @@
/*! \page extension_mismatch_detector_page Extension Mismatch Detector Module
[TOC]
What Does It Do
========

View File

@ -1,5 +1,8 @@
/*! \page file_export_page File Export
[TOC]
\section file_export_overview Overview
If enabled, the File Exporter will run after each \ref auto_ingest_page job and export any files from that data source that match the supplied rules. Most users will not need to use this feature - analysts can open the auto ingest cases in an examiner node and look through the data there.

View File

@ -1,5 +1,8 @@
/*! \page file_search_page File Search
[TOC]
\section about_file_search About File Search
The File Search tool can be accessed either from the Tools menu or by right-clicking on a data source node in the Data Explorer / Directory Tree. By using File Search, you can specify, filter, and show the directories and files that you want to see from the images in the currently opened case. The File Search results will be populated in a brand new Table Result viewer on the right-hand side.

View File

@ -1,5 +1,8 @@
/*! \page file_type_identification_page File Type Identification Module
[TOC]
What Does It Do
========

View File

@ -1,5 +1,8 @@
/*! \page geolocation_page Geolocation
[TOC]
\section geo_overview Overview
The Geolocation window shows artifacts that have longitude and latitude attributes as waypoints on a map. In the field, when access to online map tile servers may not be available, the Geolocation window provides support for offline map tile data sources.

View File

@ -1,5 +1,8 @@
/*! \page gpx_page GPX Analyzer
[TOC]
\section gpx_overview Overview
The GPX Analyzer modules allows you to import GPS data from a GPX file. Information on the GPX format can be found <a href="https://www.topografix.com/gpx.asp">here</a>. The following is a short sample of a GPX file:

View File

@ -1,5 +1,8 @@
/*! \page hash_db_page Hash Lookup Module
[TOC]
What Does It Do
========

View File

@ -1,4 +1,8 @@
/*! \page image_gallery_page Image Gallery Module
[TOC]
Overview
========
This document outlines the use of the Image Gallery feature of Autopsy. This feature was funded by DHS S&T to help provide free and open source digital forensics tools to law enforcement.

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 21 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 32 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 63 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 42 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 25 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 48 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 38 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 38 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 36 KiB

View File

@ -1,5 +1,8 @@
/*! \page ingest_page Ingest Modules
[TOC]
Ingest modules analyze the data in a data source. They perform all of the analysis of the files and parse their contents. Examples include \ref hash_db_page "hash calculation and lookup", \ref keyword_search_page "keyword searching", and \ref recent_activity_page "web artifact extraction".
Immediately after you add a data source to a case (see \ref ds_page), you will be presented with a dialog to configure the ingest modules to run on it. Once configured, they will run in the background and provide you real-time results when they find relevant information.

View File

@ -1,5 +1,8 @@
/*! \page interesting_files_identifier_page Interesting Files Identifier Module
[TOC]
\section interesting_files_overview Overview
The Interesting Files module allows you to automatically flag files and directories that match a set of rules. This can be useful if you always need to check whether files with a given name or path are in the data source, or if you are always interested in files with a certain type.

View File

@ -1,5 +1,8 @@
/*! \page keyword_search_page Keyword Search Module
[TOC]
\section keyword_module_overview What Does It Do
The Keyword Search module facilitates both the \ref ingest_page "ingest" portion of searching and also supports manual text searching after ingest has completed (see \ref ad_hoc_keyword_search_page). It extracts text from files being ingested, selected reports generated by other modules, and results generated by other modules. This extracted text is then added to a Solr index that can then be searched.
@ -59,7 +62,7 @@ that exist in a location that Autopsy can understand. To add support for more la
and move them to the right location. The following steps breakdown this process for you:
<ol>
<li>Navigate to https://github.com/tesseract-ocr/tesseract/wiki/Data-Files.
<li>Navigate to https://tesseract-ocr.github.io/tessdoc/Data-Files.
<li>Under the section titled "Data Files for Version 4.00 (November 29, 2016)" you will find a table containing files that represent each language. These files have the extension ".traineddata".
<li>To download the desired language, click on the links in the far right-hand column of the table. You may download as many as you like. Note that you must only choose from this table. Language files under any other sections are not guaranteed to work in Autopsy.
<li>Once you've downloaded your language files, simply drag and drop them into the "AppData\Roaming\autopsy\ocr_language_packs" folder under your user folder.

View File

@ -1,5 +1,8 @@
/*! \page live_triage_page Creating a Live Triage Drive
[TOC]
\section live_triage_overview Overview
The Live Triage feature allows you to load Autopsy onto a removable drive to run on target systems while making minimal changes to that target system. This will currently only work on Windows systems.

View File

@ -1,5 +1,8 @@
/*! \page logical_imager_page Logical Imager
[TOC]
\section logical_imager_overview Overview
The logical imager allows you to collect files from a live Windows computer. The imager is configured with rules that specify what files to collect. Rules can be based on file attributes such as folder names, extensions, and sizes. You can use this feature when you do not have time or authorization to perform a full drive acquisition.

View File

@ -1,5 +1,8 @@
/*! \page machine_translation_page Machine Translation
[TOC]
You can configure a machine translation service to use with Autopsy. If one is configured, you will be able to translate text in the \ref content_viewer_page and file names in the \ref result_viewer_page and \ref tree_viewer_page.
\section mt_config Configuration

View File

@ -57,6 +57,7 @@ The following topics are available here:
- \subpage tree_viewer_page
- \subpage result_viewer_page
- \subpage content_viewer_page
- \subpage data_source_summary_page
- \subpage machine_translation_page
- Searching

View File

@ -1,5 +1,8 @@
/*! \page manifest_tool_page Manifest Tool
[TOC]
\section manifest_tool_overview Overview
Manifest Tool is an executable designed to assist in the automated creation of manifest files which are necessary to run Auto Ingest on a data source. There is no installation necessary. To use the tool double click on Manifest Tool executable, when it opens select the option with the algorithm you wish to run from the combo box, and fill in all the available settings before clicking the Run button. A log with the success or failure of each manifest file it attempts to create will appear in the progress area.

View File

@ -1,5 +1,8 @@
/*! \page object_detection_page Object Detection
[TOC]
\section object_overview Overview
The Object Detection module uses OpenCV to try to detect objects in images.

View File

@ -1,5 +1,8 @@
/*! \page photorec_carver_page PhotoRec Carver Module
[TOC]
\section photorec_overview Overview
The PhotoRec Carver module carves files from unallocated space in the data source and sends the files found through the ingest processing chain.

View File

@ -1,5 +1,8 @@
/*! \page plaso_page Plaso
[TOC]
Plaso is a framework for running modules to extract timestamps for various types of files. The Plaso ingest module runs Plaso to generate events that are displayed in the Autopsy \ref timeline_page. For more information on Plaso, see <a href="https://plaso.readthedocs.io/en/latest/"> the documentation</a>.
\section plaso_config Running the Module

View File

@ -1,5 +1,8 @@
/*! \page portable_case_page Portable Cases
[TOC]
\section portable_case_overview Overview
A portable case is a partial copy of a normal Autopsy case that can be opened from anywhere. It contains a subset of the data from its original case and has been designed to make it easy to share relevant data with other examiners. Portable cases will contain a \ref report_case_uco report detailing the contents of the portable case.

View File

@ -1,5 +1,8 @@
/*! \page quick_start_guide Quick Start Guide
[TOC]
\section s1 Cases and Data Sources
Autopsy organizes data by <strong>case</strong>. Each case can have one or more <strong>data sources</strong>, which can be a disk image, a set of logical files, a USB-connected device, etc.

View File

@ -1,5 +1,8 @@
/*! \page recent_activity_page Recent Activity Module
[TOC]
What Does It Do
========

View File

@ -1,5 +1,8 @@
/*! \page reporting_page Reporting
[TOC]
\section reporting_overview Overview
The report modules allow the user to extract key information from a case in a variety of formats. This includes

View File

@ -1,5 +1,8 @@
/*! \page result_viewer_page Result Viewer
[TOC]
The Result Viewer is located on the top right of the Autopsy screen and shows the the contents of what was selected in the \ref tree_viewer_page.
\section result_viewer_table Table Viewers

View File

@ -1,5 +1,8 @@
/*! \page search_all_cases_page Search All Cases
[TOC]
\section search_all_cases_overview Overview
The Search All Cases feature allows you to search the \ref central_repo_page for arbitrary properties. You must have the Central Repository enabled to run

View File

@ -1,4 +1,8 @@
/*! \page stix_page STIX
[TOC]
Overview
========
This document outlines the use of the STIX feature of Autopsy. This feature allows one or more Structured Threat Information Exchange (STIX) files to be run against a data source, reporting which indicators were found in the data source. More information about STIX can be found at https://stix.mitre.org/.

View File

@ -1,5 +1,8 @@
/*! \page tagging_page Tagging and Commenting
[TOC]
Tagging (or Bookmarking) allows you to create a reference to a file or object and easily find it later or include it in a \ref reporting_page "report". Tagging is also used by the \ref central_repo_page "central repository" to mark items as notable. You can add comments to files and results using tags or through the central repository.
\section tagging_items Tagging items

View File

@ -1,5 +1,8 @@
/*! \page timeline_page Timeline
[TOC]
\section timeline_overview Overview
This document outlines the use of the Timeline feature of Autopsy. This feature was funded by DHS S&T to help provide free and open source digital forensics tools to law enforcement. The timeline feature can help answer questions such as these:

View File

@ -1,5 +1,8 @@
/*! \page translations_page Translating Documentation and the UI
[TOC]
The Autopsy user base is global. You can help out by translating the UI and this documentation.
\section translations_doc Translating Documentation

View File

@ -1,5 +1,8 @@
/*! \page tree_viewer_page Tree Viewer
[TOC]
The tree on the left-hand side of the main window is where you can browse the files in the data sources in the case and find saved results from automated analyis (ingest). The tree has five main areas:
- <b>Data Sources:</b> This shows the directory tree hierarchy of the data sources. You can navigate to a specific file or directory here. Each data source added to the case is represented as a distinct sub tree. If you add a data source multiple times, it shows up multiple times.
- <b>Views:</b> Specific types of files from the data sources are shown here, aggregated by type or other properties. Files here can come from more than one data source.

View File

@ -1,5 +1,8 @@
/*! \page triage_page Triage
[TOC]
\section triage_overview Overview
Sometimes you need to make a quick decision about a system or systems and don't have the time or resources to make full images. For example, during a knock and talk you want to know if there is notable data on their system. Or you are at a location with many systems and want to know which should be analyzed first. Autopsy has features that will allow you to quickly find the data of interest without making full images of the devices. Those features will be described below, followed by some example scenarios that show how to put everything together.

View File

@ -1,5 +1,8 @@
/*! \page troubleshooting_page Troubleshooting
[TOC]
If you are experiencing an error, we encourage you to post on the forum (https://sleuthkit.discourse.group/), including as much information as possible:
<ul>
<li>Your operating system and version of Autopsy

View File

@ -1,5 +1,8 @@
/*! \page ui_quick_search UI Quick Search
[TOC]
The user interface quick search feature allows you to search within the data on a panel for a given string, it will not search data in hidden columns or collapsed nodes.
How to use it

View File

@ -1,6 +1,9 @@
/*! \page uilayout_page UI Layout
[TOC]
<br>
\section ui_overview Overview

View File

@ -1,5 +1,8 @@
/*! \page view_options_page View Options
[TOC]
The View options allow you to configure how data is displayed in the Autopsy user interface.
There are two ways to access the options. The first way is through the gear icon above the directory tree:

View File

@ -1,5 +1,8 @@
/*! \page volatility_dsp_page Volatility Data Source Processor
[TOC]
\section Overview
The Volatility data source processor runs Volatility on a memory image and saves the individual Volatility module results. If the disk image associated with the memory image is also available, it will create Interesting Item artifacts linking the Volatility results to files in the disk image.