Merge branch 'develop' of github.com:sleuthkit/autopsy into 7333-junitWarnings
1
.gitignore
vendored
@ -3,6 +3,7 @@
|
||||
/*/build/
|
||||
*/nbproject/private/*
|
||||
/nbproject/private/*
|
||||
/apidiff_output/
|
||||
|
||||
/Core/release/
|
||||
/Core/src/org/sleuthkit/autopsy/coreutils/Version.properties
|
||||
|
@ -31,6 +31,17 @@
|
||||
</target>
|
||||
|
||||
<target name="get-thirdparty-dependencies" description="get third-party dependencies">
|
||||
<!--
|
||||
Copy netbeans localization jars:
|
||||
This contains jars provided in Netbeans 8 RCP that provide localization bundles.
|
||||
They do not appear to be included in Netbeans >= 9.
|
||||
See VIK-7434 for more information.
|
||||
-->
|
||||
<mkdir dir="${modules.dir}/locale"/>
|
||||
<copy todir="${modules.dir}/locale" >
|
||||
<fileset dir="${thirdparty.dir}/NetbeansLocalization"/>
|
||||
</copy>
|
||||
|
||||
<!--Copy photorec to release-->
|
||||
<copy todir="${basedir}/release/photorec_exec" >
|
||||
<fileset dir="${thirdparty.dir}/photorec_exec"/>
|
||||
|
@ -45,3 +45,4 @@ OpenPythonModulesFolderAction.actionName.text=Python Plugins
|
||||
OpenPythonModulesFolderAction.errorMsg.folderNotFound=Python plugins folder not found: {0}
|
||||
CTL_OpenPythonModulesFolderAction=Python Plugins
|
||||
GetTagNameAndCommentDialog.tagCombo.toolTipText=Select tag to use
|
||||
CTL_ExitAction=Exit
|
@ -96,3 +96,4 @@ OpenPythonModulesFolderAction.actionName.text=Python Plugins
|
||||
OpenPythonModulesFolderAction.errorMsg.folderNotFound=Python plugins folder not found: {0}
|
||||
CTL_OpenPythonModulesFolderAction=Python Plugins
|
||||
GetTagNameAndCommentDialog.tagCombo.toolTipText=Select tag to use
|
||||
CTL_ExitAction=Exit
|
||||
|
@ -40,7 +40,7 @@ import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
||||
* The action associated with the Case/Exit menu item. It closes the current
|
||||
* case, if any, and shuts down the application.
|
||||
*/
|
||||
@ActionRegistration(displayName = "Exit", iconInMenu = true)
|
||||
@ActionRegistration(displayName = "#CTL_ExitAction", iconInMenu = true)
|
||||
@ActionReference(path = "Menu/Case", position = 1000, separatorBefore = 999)
|
||||
@ActionID(id = "org.sleuthkit.autopsy.casemodule.ExitAction", category = "Case")
|
||||
final public class ExitAction implements ActionListener {
|
||||
|
@ -4,7 +4,6 @@ CTL_CaseCloseAct=Close Case
|
||||
CTL_CaseNewAction=New Case
|
||||
CTL_CaseDetailsAction=Case Details
|
||||
CTL_CaseDeleteAction=Delete Case
|
||||
Menu/Case/OpenRecentCase=Open Recent Case
|
||||
CTL_CaseDeleteAction=Delete Case
|
||||
OpenIDE-Module-Name=Case
|
||||
NewCaseVisualPanel1.caseNameLabel.text_1=Case Name:
|
||||
|
@ -128,6 +128,7 @@ CTL_CaseCloseAct=Close Case
|
||||
CTL_CaseNewAction=New Case
|
||||
CTL_CaseDetailsAction=Case Details
|
||||
CTL_CaseDeleteAction=Delete Case
|
||||
CTL_CaseDeleteAction=Delete Case
|
||||
CTL_CaseOpenAction=Open Case
|
||||
CTL_UnpackagePortableCaseAction=Unpack and Open Portable Case
|
||||
DeleteDataSourceAction.confirmationDialog.message=Are you sure you want to remove the selected data source from the case?\nNote that the case will be closed and re-opened during the removal.
|
||||
@ -186,8 +187,6 @@ LogicalEvidenceFilePanel.pathValidation.getOpenCase.Error=Warning: Exception whi
|
||||
LogicalEvidenceFilePanel.validatePanel.nonL01Error.text=Only files with the .l01 file extension are supported here.
|
||||
LogicalFilesDspPanel.subTypeComboBox.l01FileOption.text=Logical evidence file (L01)
|
||||
LogicalFilesDspPanel.subTypeComboBox.localFilesOption.text=Local files and folders
|
||||
Menu/Case/OpenRecentCase=Open Recent Case
|
||||
CTL_CaseDeleteAction=Delete Case
|
||||
OpenIDE-Module-Name=Case
|
||||
NewCaseVisualPanel1.caseNameLabel.text_1=Case Name:
|
||||
NewCaseVisualPanel1.caseDirLabel.text=Base Directory:
|
||||
|
@ -62,6 +62,7 @@ import javax.annotation.concurrent.GuardedBy;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import javax.swing.JOptionPane;
|
||||
import javax.swing.SwingUtilities;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.openide.util.Lookup;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
@ -157,9 +158,10 @@ import org.sleuthkit.datamodel.TskUnsupportedSchemaVersionException;
|
||||
*/
|
||||
public class Case {
|
||||
|
||||
private static final String CASE_TEMP_DIR = Case.class.getSimpleName();
|
||||
private static final int CASE_LOCK_TIMEOUT_MINS = 1;
|
||||
private static final int CASE_RESOURCES_LOCK_TIMEOUT_HOURS = 1;
|
||||
private static final String APP_NAME = UserPreferences.getAppName();
|
||||
private static final String TEMP_FOLDER = "Temp";
|
||||
private static final String SINGLE_USER_CASE_DB_NAME = "autopsy.db";
|
||||
private static final String EVENT_CHANNEL_NAME = "%s-Case-Events"; //NON-NLS
|
||||
private static final String CACHE_FOLDER = "Cache"; //NON-NLS
|
||||
@ -496,35 +498,35 @@ public class Case {
|
||||
event.getArtifacts(artifactType)));
|
||||
}
|
||||
}
|
||||
|
||||
@Subscribe
|
||||
|
||||
@Subscribe
|
||||
public void publishOsAccountAddedEvent(TskEvent.OsAccountsAddedTskEvent event) {
|
||||
for(OsAccount account: event.getOsAcounts()) {
|
||||
for (OsAccount account : event.getOsAcounts()) {
|
||||
eventPublisher.publish(new OsAccountAddedEvent(account));
|
||||
}
|
||||
}
|
||||
|
||||
@Subscribe
|
||||
|
||||
@Subscribe
|
||||
public void publishOsAccountChangedEvent(TskEvent.OsAccountsChangedTskEvent event) {
|
||||
for(OsAccount account: event.getOsAcounts()) {
|
||||
for (OsAccount account : event.getOsAcounts()) {
|
||||
eventPublisher.publish(new OsAccountChangedEvent(account));
|
||||
}
|
||||
}
|
||||
|
||||
@Subscribe
|
||||
|
||||
@Subscribe
|
||||
public void publishOsAccountDeletedEvent(TskEvent.OsAccountsDeletedTskEvent event) {
|
||||
for(Long accountId: event.getOsAcountObjectIds()) {
|
||||
for (Long accountId : event.getOsAcountObjectIds()) {
|
||||
eventPublisher.publish(new OsAccountDeletedEvent(accountId));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Publishes an autopsy event from the sleuthkit HostAddedEvent
|
||||
* Publishes an autopsy event from the sleuthkit HostAddedEvent
|
||||
* indicating that hosts have been created.
|
||||
*
|
||||
* @param event The sleuthkit event for the creation of hosts.
|
||||
*/
|
||||
@Subscribe
|
||||
@Subscribe
|
||||
public void publishHostsAddedEvent(TskEvent.HostsAddedTskEvent event) {
|
||||
eventPublisher.publish(new HostsAddedEvent(
|
||||
event == null ? Collections.emptyList() : event.getHosts()));
|
||||
@ -535,8 +537,8 @@ public class Case {
|
||||
* indicating that hosts have been updated.
|
||||
*
|
||||
* @param event The sleuthkit event for the updating of hosts.
|
||||
*/
|
||||
@Subscribe
|
||||
*/
|
||||
@Subscribe
|
||||
public void publishHostsChangedEvent(TskEvent.HostsChangedTskEvent event) {
|
||||
eventPublisher.publish(new HostsChangedEvent(
|
||||
event == null ? Collections.emptyList() : event.getHosts()));
|
||||
@ -547,32 +549,32 @@ public class Case {
|
||||
* indicating that hosts have been deleted.
|
||||
*
|
||||
* @param event The sleuthkit event for the deleting of hosts.
|
||||
*/
|
||||
@Subscribe
|
||||
*/
|
||||
@Subscribe
|
||||
public void publishHostsDeletedEvent(TskEvent.HostsDeletedTskEvent event) {
|
||||
eventPublisher.publish(new HostsRemovedEvent(
|
||||
event == null ? Collections.emptyList() : event.getHosts()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Publishes an autopsy event from the sleuthkit PersonAddedEvent
|
||||
* Publishes an autopsy event from the sleuthkit PersonAddedEvent
|
||||
* indicating that persons have been created.
|
||||
*
|
||||
* @param event The sleuthkit event for the creation of persons.
|
||||
*/
|
||||
@Subscribe
|
||||
@Subscribe
|
||||
public void publishPersonsAddedEvent(TskEvent.PersonsAddedTskEvent event) {
|
||||
eventPublisher.publish(new PersonsAddedEvent(
|
||||
event == null ? Collections.emptyList() : event.getPersons()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Publishes an autopsy event from the sleuthkit PersonChangedEvent
|
||||
* Publishes an autopsy event from the sleuthkit PersonChangedEvent
|
||||
* indicating that persons have been updated.
|
||||
*
|
||||
* @param event The sleuthkit event for the updating of persons.
|
||||
*/
|
||||
@Subscribe
|
||||
*/
|
||||
@Subscribe
|
||||
public void publishPersonsChangedEvent(TskEvent.PersonsChangedTskEvent event) {
|
||||
eventPublisher.publish(new PersonsChangedEvent(
|
||||
event == null ? Collections.emptyList() : event.getPersons()));
|
||||
@ -583,8 +585,8 @@ public class Case {
|
||||
* indicating that persons have been deleted.
|
||||
*
|
||||
* @param event The sleuthkit event for the deleting of persons.
|
||||
*/
|
||||
@Subscribe
|
||||
*/
|
||||
@Subscribe
|
||||
public void publishPersonsDeletedEvent(TskEvent.PersonsDeletedTskEvent event) {
|
||||
eventPublisher.publish(new PersonsDeletedEvent(
|
||||
event == null ? Collections.emptyList() : event.getPersons()));
|
||||
@ -1469,6 +1471,13 @@ public class Case {
|
||||
return hostPath.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return A subdirectory of java.io.tmpdir.
|
||||
*/
|
||||
private Path getBaseSystemTempPath() {
|
||||
return Paths.get(System.getProperty("java.io.tmpdir"), APP_NAME, getName());
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the full path to the temp directory for this case, creating it if it
|
||||
* does not exist.
|
||||
@ -1476,7 +1485,45 @@ public class Case {
|
||||
* @return The temp subdirectory path.
|
||||
*/
|
||||
public String getTempDirectory() {
|
||||
return UserMachinePreferences.getTempDirectory();
|
||||
// NOTE: UserPreferences may also be affected by changes in this method.
|
||||
// See JIRA-7505 for more information.
|
||||
Path basePath = null;
|
||||
// get base temp path for the case based on user preference
|
||||
switch (UserMachinePreferences.getTempDirChoice()) {
|
||||
case CUSTOM:
|
||||
String customDirectory = UserMachinePreferences.getCustomTempDirectory();
|
||||
basePath = (StringUtils.isBlank(customDirectory))
|
||||
? null
|
||||
: Paths.get(customDirectory, APP_NAME, getName());
|
||||
break;
|
||||
case CASE:
|
||||
basePath = Paths.get(getCaseDirectory());
|
||||
break;
|
||||
case SYSTEM:
|
||||
default:
|
||||
// at this level, if the case directory is specified for a temp
|
||||
// directory, return the system temp directory instead.
|
||||
basePath = getBaseSystemTempPath();
|
||||
break;
|
||||
}
|
||||
|
||||
basePath = basePath == null ? getBaseSystemTempPath() : basePath;
|
||||
|
||||
// get sub directories based on multi user vs. single user
|
||||
Path caseRelPath = (CaseType.MULTI_USER_CASE.equals(getCaseType()))
|
||||
? Paths.get(NetworkUtils.getLocalHostName(), TEMP_FOLDER)
|
||||
: Paths.get(TEMP_FOLDER);
|
||||
|
||||
File caseTempDir = basePath
|
||||
.resolve(caseRelPath)
|
||||
.toFile();
|
||||
|
||||
// ensure directory exists
|
||||
if (!caseTempDir.exists()) {
|
||||
caseTempDir.mkdirs();
|
||||
}
|
||||
|
||||
return caseTempDir.getAbsolutePath();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -27,3 +27,7 @@ ServicesMonitor.remoteKeywordSearch.displayName.text=Multi-user keyword search s
|
||||
ServicesMonitor.messaging.displayName.text=Messaging service
|
||||
ServicesMonitor.databaseConnectionInfo.error.msg=Error accessing case database connection info
|
||||
ServicesMonitor.messagingService.connErr.text=Error accessing messaging service connection info
|
||||
Actions/Case=Case
|
||||
Menu/Case=Case
|
||||
Toolbars/Case=Case
|
||||
Menu/Case/OpenRecentCase=Open Recent Case
|
@ -31,3 +31,7 @@ ServicesMonitor.remoteKeywordSearch.displayName.text=Multi-user keyword search s
|
||||
ServicesMonitor.messaging.displayName.text=Messaging service
|
||||
ServicesMonitor.databaseConnectionInfo.error.msg=Error accessing case database connection info
|
||||
ServicesMonitor.messagingService.connErr.text=Error accessing messaging service connection info
|
||||
Actions/Case=Case
|
||||
Menu/Case=Case
|
||||
Toolbars/Case=Case
|
||||
Menu/Case/OpenRecentCase=Open Recent Case
|
||||
|
@ -18,12 +18,14 @@
|
||||
*/
|
||||
package org.sleuthkit.autopsy.core;
|
||||
|
||||
import java.io.File;
|
||||
import java.nio.file.Paths;
|
||||
import org.sleuthkit.autopsy.coreutils.TextConverter;
|
||||
import java.util.prefs.BackingStoreException;
|
||||
import org.sleuthkit.autopsy.events.MessageServiceConnectionInfo;
|
||||
import java.util.prefs.PreferenceChangeListener;
|
||||
import java.util.prefs.Preferences;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.openide.util.NbPreferences;
|
||||
import org.python.icu.util.TimeZone;
|
||||
import org.sleuthkit.autopsy.machinesettings.UserMachinePreferences;
|
||||
@ -93,7 +95,8 @@ public final class UserPreferences {
|
||||
private static final String GEO_OSM_SERVER_ADDRESS = "GeolocationOsmServerAddress";
|
||||
private static final String GEO_MBTILES_FILE_PATH = "GeolcoationMBTilesFilePath";
|
||||
private static final String HEALTH_MONITOR_REPORT_PATH = "HealthMonitorReportPath";
|
||||
|
||||
private static final String TEMP_FOLDER = "Temp";
|
||||
|
||||
// Prevent instantiation.
|
||||
private UserPreferences() {
|
||||
}
|
||||
@ -348,27 +351,27 @@ public final class UserPreferences {
|
||||
public static void setIndexingServerPort(int port) {
|
||||
preferences.putInt(SOLR8_SERVER_PORT, port);
|
||||
}
|
||||
|
||||
|
||||
public static String getSolr4ServerHost() {
|
||||
return preferences.get(SOLR4_SERVER_HOST, "");
|
||||
}
|
||||
|
||||
public static void setSolr4ServerHost(String hostName) {
|
||||
preferences.put(SOLR4_SERVER_HOST, hostName);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static String getSolr4ServerPort() {
|
||||
return preferences.get(SOLR4_SERVER_PORT, "");
|
||||
}
|
||||
|
||||
public static void setSolr4ServerPort(String port) {
|
||||
preferences.put(SOLR4_SERVER_PORT, port);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static String getZkServerHost() {
|
||||
return preferences.get(ZK_SERVER_HOST, "");
|
||||
}
|
||||
|
||||
|
||||
public static void setZkServerHost(String hostName) {
|
||||
preferences.put(ZK_SERVER_HOST, hostName);
|
||||
}
|
||||
@ -380,7 +383,7 @@ public final class UserPreferences {
|
||||
public static void setZkServerPort(String port) {
|
||||
preferences.put(ZK_SERVER_PORT, port);
|
||||
}
|
||||
|
||||
|
||||
public static void setTextTranslatorName(String textTranslatorName) {
|
||||
preferences.put(TEXT_TRANSLATOR_NAME, textTranslatorName);
|
||||
}
|
||||
@ -388,14 +391,14 @@ public final class UserPreferences {
|
||||
public static String getTextTranslatorName() {
|
||||
return preferences.get(TEXT_TRANSLATOR_NAME, null);
|
||||
}
|
||||
|
||||
|
||||
public static void setUseOcrInTranslation(boolean enableOcr) {
|
||||
preferences.putBoolean(OCR_TRANSLATION_ENABLED, enableOcr);
|
||||
}
|
||||
|
||||
public static boolean getUseOcrInTranslation() {
|
||||
return preferences.getBoolean(OCR_TRANSLATION_ENABLED, true);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Persists message service connection info.
|
||||
@ -536,10 +539,11 @@ public final class UserPreferences {
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the maximum JVM heap size (in MB) for the embedded Solr server. The returned value
|
||||
* depends on the platform (64bit vs 32bit).
|
||||
* Get the maximum JVM heap size (in MB) for the embedded Solr server. The
|
||||
* returned value depends on the platform (64bit vs 32bit).
|
||||
*
|
||||
* @return Saved value or default (2 GB for 64bit platforms, 512MB for 32bit)
|
||||
* @return Saved value or default (2 GB for 64bit platforms, 512MB for
|
||||
* 32bit)
|
||||
*/
|
||||
public static int getMaxSolrVMSize() {
|
||||
if (PlatformUtil.is64BitJVM()) {
|
||||
@ -594,20 +598,21 @@ public final class UserPreferences {
|
||||
public static String getExternalHexEditorPath() {
|
||||
return preferences.get(EXTERNAL_HEX_EDITOR_PATH, Paths.get("C:", "Program Files", "HxD", "HxD.exe").toString());
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Set the geolocation tile server option.
|
||||
*
|
||||
* @param option
|
||||
*
|
||||
* @param option
|
||||
*/
|
||||
public static void setGeolocationTileOption(int option) {
|
||||
preferences.putInt(GEO_TILE_OPTION, option);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the Geolocation tile option. If not found, the value will
|
||||
* Retrieves the Geolocation tile option. If not found, the value will
|
||||
* default to 0.
|
||||
* @return
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static int getGeolocationtTileOption() {
|
||||
return preferences.getInt(GEO_TILE_OPTION, 0);
|
||||
@ -615,8 +620,8 @@ public final class UserPreferences {
|
||||
|
||||
/**
|
||||
* Sets the path to the OSM tile zip file.
|
||||
*
|
||||
* @param absolutePath
|
||||
*
|
||||
* @param absolutePath
|
||||
*/
|
||||
public static void setGeolocationOsmZipPath(String absolutePath) {
|
||||
preferences.put(GEO_OSM_TILE_ZIP_PATH, absolutePath);
|
||||
@ -625,7 +630,7 @@ public final class UserPreferences {
|
||||
/**
|
||||
* Retrieves the path for the OSM tile zip file or returns empty string if
|
||||
* none was found.
|
||||
*
|
||||
*
|
||||
* @return Path to zip file
|
||||
*/
|
||||
public static String getGeolocationOsmZipPath() {
|
||||
@ -633,9 +638,10 @@ public final class UserPreferences {
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the address of geolocation window user defined OSM server data source.
|
||||
*
|
||||
* @param address
|
||||
* Sets the address of geolocation window user defined OSM server data
|
||||
* source.
|
||||
*
|
||||
* @param address
|
||||
*/
|
||||
public static void setGeolocationOsmServerAddress(String address) {
|
||||
preferences.put(GEO_OSM_SERVER_ADDRESS, address);
|
||||
@ -643,40 +649,72 @@ public final class UserPreferences {
|
||||
|
||||
/**
|
||||
* Retrieves the address to the OSM server or null if one was not found.
|
||||
*
|
||||
*
|
||||
* @return Address of OSM server
|
||||
*/
|
||||
public static String getGeolocationOsmServerAddress() {
|
||||
return preferences.get(GEO_OSM_SERVER_ADDRESS, "");
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Sets the path for Geolocation MBTiles data source file.
|
||||
*
|
||||
* @param absolutePath
|
||||
*
|
||||
* @param absolutePath
|
||||
*/
|
||||
public static void setGeolocationMBTilesFilePath(String absolutePath) {
|
||||
preferences.put(GEO_MBTILES_FILE_PATH, absolutePath);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Retrieves the path for the Geolocation MBTiles data source file.
|
||||
*
|
||||
* @return Absolute path to MBTiles file or empty string if none was found.
|
||||
*
|
||||
* @return Absolute path to MBTiles file or empty string if none was found.
|
||||
*/
|
||||
public static String getGeolocationMBTilesFilePath() {
|
||||
return preferences.get(GEO_MBTILES_FILE_PATH, "");
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Retrieves the root application temp directory.
|
||||
*
|
||||
* @return A subdirectory of java.io.tmpdir.
|
||||
*/
|
||||
private static File getSystemTempDirFile() {
|
||||
return Paths.get(System.getProperty("java.io.tmpdir"), getAppName(), TEMP_FOLDER).toFile();
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the application temp directory and ensures the directory
|
||||
* exists.
|
||||
*
|
||||
* @return The absolute path to the application temp directory.
|
||||
*/
|
||||
public static String getAppTempDirectory() {
|
||||
return UserMachinePreferences.getTempDirectory();
|
||||
// NOTE: If this code changes, Case.getTempDirectory() should likely be checked
|
||||
// as well. See JIRA 7505 for more information.
|
||||
File appTempDir = null;
|
||||
switch (UserMachinePreferences.getTempDirChoice()) {
|
||||
case CUSTOM:
|
||||
String customDirectory = UserMachinePreferences.getCustomTempDirectory();
|
||||
appTempDir = (StringUtils.isBlank(customDirectory))
|
||||
? null
|
||||
: Paths.get(customDirectory, getAppName(), TEMP_FOLDER).toFile();
|
||||
break;
|
||||
case SYSTEM:
|
||||
default:
|
||||
// at this level, if the case directory is specified for a temp
|
||||
// directory, return the system temp directory instead.
|
||||
appTempDir = getSystemTempDirFile();
|
||||
break;
|
||||
}
|
||||
|
||||
appTempDir = appTempDir == null ? getSystemTempDirFile() : appTempDir;
|
||||
|
||||
if (!appTempDir.exists()) {
|
||||
appTempDir.mkdirs();
|
||||
}
|
||||
|
||||
return appTempDir.getAbsolutePath();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Set the last used health monitor report path.
|
||||
*
|
||||
@ -689,9 +727,10 @@ public final class UserPreferences {
|
||||
/**
|
||||
* Gets the last used health monitor report path.
|
||||
*
|
||||
* @return Last used health monitor report path. Empty string if no value has been recorded.
|
||||
* @return Last used health monitor report path. Empty string if no value
|
||||
* has been recorded.
|
||||
*/
|
||||
public static String getHealthMonitorReportPath() {
|
||||
return preferences.get(HEALTH_MONITOR_REPORT_PATH, "");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -40,6 +40,7 @@
|
||||
====================================================== -->
|
||||
<folder name="Actions">
|
||||
<folder name="Case">
|
||||
<attr name="SystemFileSystem.localizingBundle" stringvalue="org.sleuthkit.autopsy.core.Bundle"/>
|
||||
<file name="org-sleuthkit-autopsy-casemodule-AddImageAction.instance"/>
|
||||
<file name="org-sleuthkit-autopsy-casemodule-CaseCloseAction.instance"/>
|
||||
<file name="org-sleuthkit-autopsy-casemodule-CaseNewAction.instance">
|
||||
@ -140,13 +141,14 @@
|
||||
<file name="Edit_hidden"/>
|
||||
<file name="File_hidden"/>
|
||||
<folder name="Case">
|
||||
<attr name="SystemFileSystem.localizingBundle" stringvalue="org.sleuthkit.autopsy.core.Bundle"/>
|
||||
<file name="org-sleuthkit-autopsy-casemodule-CaseNewAction.shadow">
|
||||
<attr name="originalFile" stringvalue="Actions/Case/org-sleuthkit-autopsy-casemodule-CaseNewAction.instance"/>
|
||||
<attr name="position" intvalue="100"/>
|
||||
</file>
|
||||
<folder name="Open Recent Case">
|
||||
<folder name="OpenRecentCase">
|
||||
<attr name="SystemFileSystem.localizingBundle" stringvalue="org.sleuthkit.autopsy.core.Bundle"/>
|
||||
<attr name="position" intvalue="101"/>
|
||||
<attr name="SystemFileSystem.localizingBundle" stringvalue="org.sleuthkit.autopsy.casemodule.Bundle"/>
|
||||
<file name="org-sleuthkit-autopsy-casemodule-RecentCasesAction.shadow">
|
||||
<attr name="originalFile" stringvalue="Actions/Case/org-sleuthkit-autopsy-casemodule-RecentCases.instance"/>
|
||||
</file>
|
||||
@ -281,7 +283,6 @@
|
||||
<folder name="Help">
|
||||
<file name="org-netbeans-core-actions-AboutAction.shadow_hidden"/>
|
||||
<file name="org-netbeans-modules-autoupdate-ui-actions-CheckForUpdatesAction.shadow_hidden"/>
|
||||
<attr name="master-help.xml/org-sleuthkit-autopsy-corecomponents-CustomAboutAction.shadow" boolvalue="true"/>
|
||||
</folder>
|
||||
</folder>
|
||||
|
||||
@ -378,6 +379,7 @@
|
||||
<file name="UndoRedo_hidden"/>
|
||||
<file name="File_hidden"/>
|
||||
<folder name="Case">
|
||||
<attr name="SystemFileSystem.localizingBundle" stringvalue="org.sleuthkit.autopsy.core.Bundle"/>
|
||||
<attr name="position" intvalue="90"/>
|
||||
<!--<file name="org-sleuthkit-autopsy-casemodule-AddImageAction.instance">
|
||||
<attr name="delegate" newvalue="org.sleuthkit.autopsy.casemodule.AddImageAction"/>
|
||||
|
@ -24,15 +24,14 @@ import org.openide.DialogDescriptor;
|
||||
import org.openide.DialogDisplayer;
|
||||
import org.openide.awt.ActionID;
|
||||
import org.openide.awt.ActionReference;
|
||||
import org.openide.awt.ActionRegistration;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
|
||||
/**
|
||||
* Action to open custom implementation of the "About" window from the Help
|
||||
* menu.
|
||||
*/
|
||||
@ActionID(id = "org.sleuthkit.autopsy.corecomponents.AboutWindowAction", category = "Help")
|
||||
@ActionRegistration(displayName = "#CTL_CustomAboutAction", iconInMenu = true, lazy = false)
|
||||
@ActionReference(path = "Menu/Help", position = 3000, separatorBefore = 2999)
|
||||
public class AboutWindowAction extends AboutAction {
|
||||
|
||||
|
@ -1,6 +1,5 @@
|
||||
CTL_DataContentAction=DataContent
|
||||
CTL_DataContentTopComponent=Data Content
|
||||
CTL_CustomAboutAction=About
|
||||
OptionsCategory_Name_General=Application
|
||||
OptionsCategory_Keywords_General=Autopsy Options
|
||||
HINT_DataContentTopComponent=This is a DataContent window
|
||||
|
@ -28,7 +28,6 @@ AutopsyOptionsPanel_tempDirectoryBrowseButtonActionPerformed_onInvalidPath_descr
|
||||
AutopsyOptionsPanel_tempDirectoryBrowseButtonActionPerformed_onInvalidPath_title=Path cannot be used
|
||||
CTL_DataContentAction=DataContent
|
||||
CTL_DataContentTopComponent=Data Content
|
||||
CTL_CustomAboutAction=About
|
||||
CTL_OfflineHelpAction=Offline Autopsy Documentation
|
||||
CTL_OnlineHelpAction=Online Autopsy Documentation
|
||||
DataContentViewerArtifact.failedToGetAttributes.message=Failed to get some or all attributes from case database
|
||||
|
@ -1,4 +1,3 @@
|
||||
CTL_OpenGeolocation=Geolocation
|
||||
CTL_GeolocationTopComponentAction=GeolocationTopComponent
|
||||
CTL_GeolocationTopComponent=Geolocation
|
||||
RefreshPanel.refreshLabel.text=The geolocation data has been updated, the visualization may be out of date.
|
||||
|
@ -1,6 +1,6 @@
|
||||
CTL_OpenGeolocation=Geolocation
|
||||
CTL_GeolocationTopComponentAction=GeolocationTopComponent
|
||||
CTL_GeolocationTopComponent=Geolocation
|
||||
CTL_OpenGeolocation=Geolocation
|
||||
GeoFilterPanel_ArtifactType_List_Title=Types
|
||||
GeoFilterPanel_DataSource_List_Title=Data Sources
|
||||
GeoFilterPanel_empty_artifactType=Unable to apply filter, please select one or more artifact types.
|
||||
@ -40,8 +40,6 @@ HidingPane_default_title=Filters
|
||||
MapPanel_connection_failure_message=Failed to connect to new geolocation map tile source.
|
||||
MapPanel_connection_failure_message_title=Connection Failure
|
||||
MayWaypoint_ExternalViewer_label=Open in ExternalViewer
|
||||
OpenGeolocationAction_displayName=Geolocation
|
||||
OpenGeolocationAction_name=Geolocation
|
||||
RefreshPanel.refreshLabel.text=The geolocation data has been updated, the visualization may be out of date.
|
||||
RefreshPanel.refreshButton.text=Refresh View
|
||||
RefreshPanel.closeButton.text=
|
||||
|
@ -46,17 +46,13 @@ import org.sleuthkit.autopsy.core.RuntimeProperties;
|
||||
@ActionReferences(value = {
|
||||
@ActionReference(path = "Menu/Tools", position = 103),
|
||||
@ActionReference(path = "Toolbars/Case", position = 103)})
|
||||
@Messages({"CTL_OpenGeolocation=Geolocation"})
|
||||
public class OpenGeolocationAction extends CallableSystemAction {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
private final JButton toolbarButton = new JButton(getName(),
|
||||
new ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/images/blueGeo24.png"))); //NON-NLS
|
||||
|
||||
@Messages({
|
||||
"OpenGeolocationAction_name=Geolocation",
|
||||
"OpenGeolocationAction_displayName=Geolocation"
|
||||
})
|
||||
|
||||
/**
|
||||
* Constructs the new action of opening the Geolocation window.
|
||||
*/
|
||||
@ -98,7 +94,7 @@ public class OpenGeolocationAction extends CallableSystemAction {
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return Bundle.OpenGeolocationAction_displayName();
|
||||
return Bundle.CTL_OpenGeolocation();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -21,21 +21,19 @@ package org.sleuthkit.autopsy.machinesettings;
|
||||
import java.io.File;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Optional;
|
||||
import java.util.logging.Level;
|
||||
import java.util.prefs.Preferences;
|
||||
import java.util.stream.Stream;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.openide.util.NbPreferences;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.core.UserPreferences;
|
||||
import org.sleuthkit.autopsy.coreutils.FileUtil;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.NetworkUtils;
|
||||
|
||||
/**
|
||||
* Provides case-specific settings like the user-specified temp folder.
|
||||
*
|
||||
* NOTE: The Case class also handles providing a temp directory. When altering
|
||||
* code in this class, also look at the Case class as well.
|
||||
*/
|
||||
public final class UserMachinePreferences {
|
||||
|
||||
@ -64,6 +62,7 @@ public final class UserMachinePreferences {
|
||||
* (whitespace and case insensitive).
|
||||
*
|
||||
* @param val The string value.
|
||||
*
|
||||
* @return The choice or empty if not found.
|
||||
*/
|
||||
static Optional<TempDirChoice> getValue(String val) {
|
||||
@ -80,98 +79,8 @@ public final class UserMachinePreferences {
|
||||
private static final String CUSTOM_TEMP_DIR_KEY = "TempDirectory";
|
||||
private static final String TEMP_DIR_CHOICE_KEY = "TempDirChoice";
|
||||
|
||||
private static final String AUTOPSY_SUBDIR = UserPreferences.getAppName();
|
||||
private static final String CASE_SUBDIR = "Temp";
|
||||
|
||||
private static final TempDirChoice DEFAULT_CHOICE = TempDirChoice.SYSTEM;
|
||||
|
||||
/**
|
||||
* Returns the name of this computer's host name to be used as a directory
|
||||
* in some instances.
|
||||
*
|
||||
* @return The name of this computer's host name to be used as a directory
|
||||
* in some instances.
|
||||
*/
|
||||
private static String getHostName() {
|
||||
return NetworkUtils.getLocalHostName();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return A subdirectory of java.io.tmpdir.
|
||||
*/
|
||||
private static File getSystemTempDirFile() {
|
||||
return Paths.get(System.getProperty("java.io.tmpdir"), AUTOPSY_SUBDIR).toFile();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return A subdirectory of the open case or getSystemTempDirFile if no
|
||||
* open case.
|
||||
*/
|
||||
private static File getCaseTempDirFile() {
|
||||
try {
|
||||
Case autCase = Case.getCurrentCaseThrows();
|
||||
String caseDirStr = autCase.getCaseDirectory();
|
||||
switch (autCase.getCaseType()) {
|
||||
case MULTI_USER_CASE: return Paths.get(caseDirStr, getHostName(), CASE_SUBDIR).toFile();
|
||||
case SINGLE_USER_CASE: return Paths.get(caseDirStr, CASE_SUBDIR).toFile();
|
||||
default:
|
||||
logger.log(Level.SEVERE, "Unknown case type: " + autCase.getCaseType());
|
||||
return getSystemTempDirFile();
|
||||
}
|
||||
|
||||
} catch (NoCurrentCaseException ex) {
|
||||
return getSystemTempDirFile();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the custom directory subdirectory to be used for temp files
|
||||
* (otherwise java.io.tmpdir subdir).
|
||||
*
|
||||
* @return A subdirectory of the custom user-specified path. If no path is
|
||||
* specified, getSystemTempDirFile() is returned instead.
|
||||
*/
|
||||
private static File getCustomTempDirFile() {
|
||||
String customDirectory = getCustomTempDirectory();
|
||||
return (StringUtils.isBlank(customDirectory))
|
||||
? getSystemTempDirFile() : Paths.get(customDirectory, AUTOPSY_SUBDIR, getHostName()).toFile();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the temp directory file to use based on user choice.
|
||||
*
|
||||
* @return The directory.
|
||||
*/
|
||||
private static File getTempDirFile() {
|
||||
TempDirChoice choice = getTempDirChoice();
|
||||
switch (choice) {
|
||||
case CASE:
|
||||
return getCaseTempDirFile();
|
||||
case CUSTOM:
|
||||
return getCustomTempDirFile();
|
||||
case SYSTEM:
|
||||
default:
|
||||
return getSystemTempDirFile();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the temp directory to use based on settings. This method also
|
||||
* ensures the temp directory has been created.
|
||||
*
|
||||
* @return The base user-specified temporary directory.
|
||||
*/
|
||||
public static String getTempDirectory() {
|
||||
File dir = getTempDirFile();
|
||||
dir = dir == null ? getSystemTempDirFile() : dir;
|
||||
|
||||
if (!dir.exists()) {
|
||||
dir.mkdirs();
|
||||
}
|
||||
|
||||
return dir.getAbsolutePath();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The user-specified custom temp directory path or empty string.
|
||||
*/
|
||||
@ -188,7 +97,8 @@ public final class UserMachinePreferences {
|
||||
* @return True if this is a valid location for a temp directory.
|
||||
*
|
||||
* @throws UserMachinePreferencesException If path could not be validated
|
||||
* due to mkdirs failure or the directory is not read/write.
|
||||
* due to mkdirs failure or the
|
||||
* directory is not read/write.
|
||||
*/
|
||||
@NbBundle.Messages({
|
||||
"# {0} - path",
|
||||
@ -219,7 +129,7 @@ public final class UserMachinePreferences {
|
||||
* @param path The path to the directory.
|
||||
*
|
||||
* @throws UserMachinePreferencesException If the directory cannot be
|
||||
* accessed or created.
|
||||
* accessed or created.
|
||||
*/
|
||||
public static void setCustomTempDirectory(String path) throws UserMachinePreferencesException {
|
||||
validateTempDirectory(path);
|
||||
@ -228,7 +138,8 @@ public final class UserMachinePreferences {
|
||||
|
||||
/**
|
||||
* @return The user selection for how the temp directory should be handled
|
||||
* (temp directory in case folder, in java.io.tmpdir, custom path).
|
||||
* (temp directory in case folder, in java.io.tmpdir, custom path).
|
||||
* Guaranteed to be non-null.
|
||||
*/
|
||||
public static TempDirChoice getTempDirChoice() {
|
||||
return TempDirChoice.getValue(preferences.get(TEMP_DIR_CHOICE_KEY, null))
|
||||
@ -239,6 +150,7 @@ public final class UserMachinePreferences {
|
||||
* Sets the temp directory choice (i.e. system, case, custom).
|
||||
*
|
||||
* @param tempDirChoice The choice (must be non-null).
|
||||
*
|
||||
* @throws UserMachinePreferencesException
|
||||
*/
|
||||
public static void setTempDirChoice(TempDirChoice tempDirChoice) throws UserMachinePreferencesException {
|
||||
|
@ -34,7 +34,11 @@ ILeappAnalyzerIngestModule.running.iLeapp=Running iLeapp
|
||||
ILeappAnalyzerIngestModule.starting.iLeapp=Starting iLeapp
|
||||
ILeappAnalyzerModuleFactory_moduleDesc=Uses iLEAPP to analyze logical acquisitions of iOS devices.
|
||||
ILeappAnalyzerModuleFactory_moduleName=iOS Analyzer (iLEAPP)
|
||||
LeappFileProcessor.cannot.create.calllog.relationship=Cannot create TSK_CALLLOG Relationship.
|
||||
LeappFileProcessor.cannot.create.contact.relationship=Cannot create TSK_CONTACT Relationship.
|
||||
LeappFileProcessor.cannot.create.message.relationship=Cannot create TSK_MESSAGE Relationship.
|
||||
LeappFileProcessor.cannot.create.trackpoint.relationship=Cannot create TSK_TRACK_POINT artifact.
|
||||
LeappFileProcessor.cannot.create.waypoint.relationship=Cannot create TSK_WAYPOINT artifact.
|
||||
LeappFileProcessor.cannot.load.artifact.xml=Cannot load xml artifact file.
|
||||
LeappFileProcessor.cannotBuildXmlParser=Cannot buld an XML parser.
|
||||
LeappFileProcessor.completed=Leapp Processing Completed
|
||||
|
@ -75,15 +75,15 @@
|
||||
<TableView fx:id="table" tableMenuButtonVisible="true" BorderPane.alignment="CENTER">
|
||||
<columns>
|
||||
<TableColumn fx:id="dateTimeColumn" editable="false" maxWidth="200.0" minWidth="150.0" prefWidth="150.0" resizable="false" sortable="false" text="Date/Time" />
|
||||
<TableColumn fx:id="typeColumn" editable="false" maxWidth="100.0" minWidth="100.0" prefWidth="100.0" sortable="false" text="Event Type" />
|
||||
<TableColumn fx:id="typeColumn" editable="false" minWidth="100.0" maxWidth="500.0" prefWidth="100.0" sortable="false" text="Event Type" resizable="true" />
|
||||
<TableColumn fx:id="descriptionColumn" editable="false" maxWidth="3000.0" minWidth="100.0" prefWidth="300.0" sortable="false" text="Description" />
|
||||
<TableColumn fx:id="idColumn" editable="false" maxWidth="50.0" minWidth="50.0" prefWidth="50.0" resizable="false" sortable="false" text="ID" />
|
||||
<TableColumn fx:id="taggedColumn" maxWidth="75.0" minWidth="75.0" prefWidth="75.0" resizable="false" text="Tagged" />
|
||||
<TableColumn fx:id="hashHitColumn" maxWidth="75.0" minWidth="75.0" prefWidth="75.0" resizable="false" text="Hash Hit" />
|
||||
</columns>
|
||||
<columnResizePolicy>
|
||||
<columnResizePolicy>
|
||||
<TableView fx:constant="CONSTRAINED_RESIZE_POLICY" />
|
||||
</columnResizePolicy>
|
||||
</columnResizePolicy>
|
||||
</TableView>
|
||||
</center>
|
||||
</fx:root>
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011-2020 Basis Technology Corp.
|
||||
* Copyright 2011-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -53,6 +53,8 @@ import java.util.logging.Level;
|
||||
import javax.swing.AbstractAction;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.stream.Collectors;
|
||||
import static java.util.stream.Collectors.toList;
|
||||
import org.apache.solr.client.solrj.SolrQuery;
|
||||
@ -85,6 +87,7 @@ import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.Case.CaseType;
|
||||
import org.sleuthkit.autopsy.casemodule.CaseMetadata;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.core.RuntimeProperties;
|
||||
import org.sleuthkit.autopsy.core.UserPreferences;
|
||||
import org.sleuthkit.autopsy.coreutils.FileUtil;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
@ -95,7 +98,6 @@ import org.sleuthkit.autopsy.coreutils.ThreadUtils;
|
||||
import org.sleuthkit.autopsy.healthmonitor.HealthMonitor;
|
||||
import org.sleuthkit.autopsy.healthmonitor.TimingMetric;
|
||||
import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchServiceException;
|
||||
import org.sleuthkit.autopsy.report.GeneralReportSettings;
|
||||
import org.sleuthkit.autopsy.report.ReportProgressPanel;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
|
||||
@ -2030,6 +2032,13 @@ public class Server {
|
||||
private final List<SolrInputDocument> buffer;
|
||||
private final Object bufferLock;
|
||||
|
||||
/* (JIRA-7521) Sometimes we get into a situation where Solr server is no longer able to index new data.
|
||||
* Typically main reason for this is Solr running out of memory. In this case we will stop trying to send new
|
||||
* data to Solr (for this collection) after certain number of consecutive batches have failed. */
|
||||
private static final int MAX_NUM_CONSECUTIVE_FAILURES = 5;
|
||||
private AtomicInteger numConsecutiveFailures = new AtomicInteger(0);
|
||||
private AtomicBoolean skipIndexing = new AtomicBoolean(false);
|
||||
|
||||
private final ScheduledThreadPoolExecutor periodicTasksExecutor;
|
||||
private static final long PERIODIC_BATCH_SEND_INTERVAL_MINUTES = 10;
|
||||
private static final int NUM_BATCH_UPDATE_RETRIES = 10;
|
||||
@ -2076,6 +2085,11 @@ public class Server {
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
|
||||
if (skipIndexing.get()) {
|
||||
return;
|
||||
}
|
||||
|
||||
List<SolrInputDocument> clone;
|
||||
synchronized (bufferLock) {
|
||||
|
||||
@ -2242,6 +2256,10 @@ public class Server {
|
||||
* @throws KeywordSearchModuleException
|
||||
*/
|
||||
void addDocument(SolrInputDocument doc) throws KeywordSearchModuleException {
|
||||
|
||||
if (skipIndexing.get()) {
|
||||
return;
|
||||
}
|
||||
|
||||
List<SolrInputDocument> clone;
|
||||
synchronized (bufferLock) {
|
||||
@ -2268,6 +2286,10 @@ public class Server {
|
||||
*
|
||||
* @throws KeywordSearchModuleException
|
||||
*/
|
||||
@NbBundle.Messages({
|
||||
"Collection.unableToIndexData.error=Unable to add data to text index. All future text indexing for the current case will be skipped.",
|
||||
|
||||
})
|
||||
private void sendBufferedDocs(List<SolrInputDocument> docBuffer) throws KeywordSearchModuleException {
|
||||
|
||||
if (docBuffer.isEmpty()) {
|
||||
@ -2293,6 +2315,7 @@ public class Server {
|
||||
}
|
||||
}
|
||||
if (success) {
|
||||
numConsecutiveFailures.set(0);
|
||||
if (reTryAttempt > 0) {
|
||||
logger.log(Level.INFO, "Batch update suceeded after {0} re-try", reTryAttempt); //NON-NLS
|
||||
}
|
||||
@ -2304,10 +2327,29 @@ public class Server {
|
||||
throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg")); //NON-NLS
|
||||
} catch (Exception ex) {
|
||||
// Solr throws a lot of unexpected exception types
|
||||
numConsecutiveFailures.incrementAndGet();
|
||||
logger.log(Level.SEVERE, "Could not add batched documents to index", ex); //NON-NLS
|
||||
|
||||
// display message to user that that a document batch is missing from the index
|
||||
MessageNotifyUtil.Notify.error(
|
||||
NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg"),
|
||||
NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg"));
|
||||
throw new KeywordSearchModuleException(
|
||||
NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg"), ex); //NON-NLS
|
||||
} finally {
|
||||
if (numConsecutiveFailures.get() >= MAX_NUM_CONSECUTIVE_FAILURES) {
|
||||
// skip all future indexing
|
||||
skipIndexing.set(true);
|
||||
logger.log(Level.SEVERE, "Unable to add data to text index. All future text indexing for the current case will be skipped!"); //NON-NLS
|
||||
|
||||
// display message to user that no more data will be added to the index
|
||||
MessageNotifyUtil.Notify.error(
|
||||
NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg"),
|
||||
Bundle.Collection_unableToIndexData_error());
|
||||
if (RuntimeProperties.runningWithGUI()) {
|
||||
MessageNotifyUtil.Message.error(Bundle.Collection_unableToIndexData_error());
|
||||
}
|
||||
}
|
||||
docBuffer.clear();
|
||||
}
|
||||
}
|
||||
|
@ -13,6 +13,7 @@ ChromeCacheExtractor.progressMsg={0}: Extracting cache entry {1} of {2} entries
|
||||
DataSourceUsage_AndroidMedia=Android Media Card
|
||||
DataSourceUsage_DJU_Drone_DAT=DJI Internal SD Card
|
||||
DataSourceUsage_FlashDrive=Flash Drive
|
||||
# {0} - OS name
|
||||
DataSourceUsageAnalyzer.customVolume.label=OS Drive ({0})
|
||||
DataSourceUsageAnalyzer.parentModuleName=Recent Activity
|
||||
DefaultPriorityDomainCategorizer_searchEngineCategory=Search Engine
|
||||
|
@ -1149,7 +1149,7 @@ class ExtractRegistry extends Extract {
|
||||
for (AbstractFile systemHive: regFiles) {
|
||||
if (systemHive.getName().toLowerCase().equals("system") && systemHive.getSize() > 0) {
|
||||
|
||||
String systemFileNameLocal = RAImageIngestModule.getRATempPath(currentCase, "reg", ingestJobId) + File.separator + systemHive.getName();
|
||||
String systemFileNameLocal = RAImageIngestModule.getRATempPath(currentCase, "reg", ingestJobId) + File.separator + "Domain-" + systemHive.getName();
|
||||
File systemFileNameLocalFile = new File(systemFileNameLocal);
|
||||
|
||||
if (!systemFileNameLocalFile.exists()) {
|
||||
|
295
apidiff.py
Normal file
@ -0,0 +1,295 @@
|
||||
"""
|
||||
Generates an api diff from one commit to another. This script relies on gitpython and similarly require git
|
||||
installed on the system. This script also requires python 3.
|
||||
|
||||
This script can be called as follows:
|
||||
|
||||
python apidiff.py <previous tag id> <latest tag id> -r <repo path> -o <output path>
|
||||
|
||||
If the '-o' flag is not specified, this script will create a folder at apidiff_output in the same directory as the
|
||||
script. For full list of options call:
|
||||
|
||||
python apidiff.py -h
|
||||
"""
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import Tuple, Iterator, List
|
||||
|
||||
import argparse as argparse
|
||||
from git import Repo, Blob, Tree
|
||||
|
||||
"""
|
||||
These are exit codes for jdiff:
|
||||
return code 1 = error in jdiff
|
||||
return code 100 = no changes
|
||||
return code 101 = compatible changes
|
||||
return code 102 = incompatible changes
|
||||
"""
|
||||
NO_CHANGES = 100
|
||||
COMPATIBLE = 101
|
||||
NON_COMPATIBLE = 102
|
||||
ERROR = 1
|
||||
|
||||
|
||||
def compare_xml(jdiff_path: str, root_dir: str, output_folder: str, oldapi_folder: str,
|
||||
newapi_folder: str, api_file_name: str, log_path: str) -> int:
|
||||
"""
|
||||
Compares xml generated by jdiff using jdiff.
|
||||
:param jdiff_path: Path to jdiff jar.
|
||||
:param root_dir: directory for output .
|
||||
:param output_folder: Folder for diff output.
|
||||
:param oldapi_folder: Folder name of old api (i.e. release-4.10.2).
|
||||
:param newapi_folder: Folder name of new api (i.e. release-4.10.2).
|
||||
:param api_file_name: Name of xml file name (i.e. if output.xml, just 'output')
|
||||
:param log_path: Path to log file.
|
||||
:return: jdiff exit code.
|
||||
"""
|
||||
jdiff_parent = os.path.dirname(jdiff_path)
|
||||
|
||||
null_file = fix_path(os.path.join(jdiff_parent, "lib", "Null.java"))
|
||||
|
||||
# comments are expected in a specific place
|
||||
make_dir(os.path.join(root_dir,
|
||||
output_folder,
|
||||
f"user_comments_for_{oldapi_folder}",
|
||||
f"{api_file_name}_to_{newapi_folder}"))
|
||||
|
||||
log = open(log_path, "w")
|
||||
cmd = ["javadoc",
|
||||
"-doclet", "jdiff.JDiff",
|
||||
"-docletpath", fix_path(jdiff_path),
|
||||
"-d", fix_path(output_folder),
|
||||
"-oldapi", fix_path(os.path.join(oldapi_folder, api_file_name)),
|
||||
"-newapi", fix_path(os.path.join(newapi_folder, api_file_name)),
|
||||
"-script",
|
||||
null_file]
|
||||
|
||||
code = None
|
||||
try:
|
||||
jdiff = subprocess.Popen(cmd, stdout=log, stderr=log, cwd=root_dir)
|
||||
jdiff.wait()
|
||||
code = jdiff.returncode
|
||||
except Exception as e:
|
||||
log_and_print(log, f"Error executing javadoc: {str(e)}\nExiting...")
|
||||
exit(1)
|
||||
log.close()
|
||||
|
||||
print(f"Compared XML for {oldapi_folder} {newapi_folder}")
|
||||
if code == NO_CHANGES:
|
||||
print(" No API changes")
|
||||
elif code == COMPATIBLE:
|
||||
print(" API Changes are backwards compatible")
|
||||
elif code == NON_COMPATIBLE:
|
||||
print(" API Changes are not backwards compatible")
|
||||
else:
|
||||
print(" *Error in XML, most likely an empty module")
|
||||
sys.stdout.flush()
|
||||
return code
|
||||
|
||||
|
||||
def gen_xml(jdiff_path: str, output_path: str, log_output_path: str, src: str, packages: List[str]):
|
||||
"""
|
||||
Uses jdiff to generate an xml representation of the source code.
|
||||
:param jdiff_path: Path to jdiff jar.
|
||||
:param output_path: Path to output path of diff.
|
||||
:param log_output_path: The log output path.
|
||||
:param src: The path to the source code.
|
||||
:param packages: The packages to process.
|
||||
"""
|
||||
make_dir(output_path)
|
||||
|
||||
log = open_log_file(log_output_path)
|
||||
log_and_print(log, f"Generating XML for: {src} outputting to: {output_path}")
|
||||
cmd = ["javadoc",
|
||||
"-doclet", "jdiff.JDiff",
|
||||
"-docletpath", fix_path(jdiff_path),
|
||||
"-apiname", fix_path(output_path),
|
||||
"-sourcepath", fix_path(src)]
|
||||
cmd = cmd + packages
|
||||
try:
|
||||
jdiff = subprocess.Popen(cmd, stdout=log, stderr=log)
|
||||
jdiff.wait()
|
||||
except Exception as e:
|
||||
log_and_print(log, f"Error executing javadoc {str(e)}\nExiting...")
|
||||
exit(1)
|
||||
|
||||
log_and_print(log, f"Generated XML for: " + str(packages))
|
||||
log.close()
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
def _list_paths(root_tree: Tree, src_folder, path: Path = None) -> Iterator[Tuple[str, Blob]]:
|
||||
"""
|
||||
Given the root path to serve as a prefix, walks the tree of a git commit returning all files and blobs.
|
||||
Repurposed from: https://www.enricozini.org/blog/2019/debian/gitpython-list-all-files-in-a-git-commit/
|
||||
Args:
|
||||
root_tree: The tree of the commit to walk.
|
||||
src_folder: relative path in repo to source folder that will be copied.
|
||||
path: The path to use as a prefix.
|
||||
Returns: A tuple iterator where each tuple consists of the path as a string and a blob of the file.
|
||||
"""
|
||||
for blob in root_tree.blobs:
|
||||
next_path = Path(path) / blob.name if path else blob.name
|
||||
if Path(src_folder) in Path(next_path).parents:
|
||||
ret_item = (next_path, blob)
|
||||
yield ret_item
|
||||
for tree in root_tree.trees:
|
||||
next_path = Path(path) / tree.name if path else tree.name
|
||||
yield from _list_paths(tree, src_folder, next_path)
|
||||
|
||||
|
||||
def _get_tree(repo_path: str, commit_id: str) -> Tree:
|
||||
"""
|
||||
Retrieves the git tree that can be walked for files and file content at the specified commit.
|
||||
Args:
|
||||
repo_path: The path to the repo or a child directory of the repo.
|
||||
commit_id: The commit id.
|
||||
Returns: The tree.
|
||||
"""
|
||||
repo = Repo(repo_path, search_parent_directories=True)
|
||||
commit = repo.commit(commit_id.strip())
|
||||
return commit.tree
|
||||
|
||||
|
||||
def copy_commit_paths(repo_path, commit_id, src_folder, output_folder):
|
||||
"""
|
||||
Copies all files located within a repo in the folder 'src_folder' to 'output_folder'.
|
||||
:param repo_path: The path to the repo.
|
||||
:param commit_id: The commit id.
|
||||
:param src_folder: The relative path in the repo to the source folder.
|
||||
:param output_folder: The output folder where the source will be copied.
|
||||
"""
|
||||
tree = _get_tree(repo_path, commit_id)
|
||||
for rel_path, blob in _list_paths(tree, src_folder):
|
||||
output_path = os.path.join(output_folder, os.path.relpath(rel_path, src_folder))
|
||||
parent_folder = os.path.dirname(output_path)
|
||||
make_dir(parent_folder)
|
||||
output_file = open(output_path, 'w')
|
||||
output_file.write(blob.data_stream.read().decode('utf-8'))
|
||||
output_file.close()
|
||||
|
||||
|
||||
def open_log_file(log_path):
|
||||
"""
|
||||
Opens a path to a lof file for appending. Creating directories and log file as necessary.
|
||||
:param log_path: The path to the log file.
|
||||
:return: The log file opened for writing.
|
||||
"""
|
||||
if not os.path.exists(log_path):
|
||||
make_dir(os.path.dirname(log_path))
|
||||
Path(log_path).touch()
|
||||
|
||||
return open(log_path, 'a+')
|
||||
|
||||
|
||||
def fix_path(path):
|
||||
"""
|
||||
Generates a path that is escaped from cygwin paths if present.
|
||||
:param path: Path (possibly including cygdrive).
|
||||
:return: The normalized path.
|
||||
"""
|
||||
if "cygdrive" in path:
|
||||
new_path = path[11:]
|
||||
return "C:/" + new_path
|
||||
else:
|
||||
return path
|
||||
|
||||
|
||||
def log_and_print(log, message):
|
||||
"""
|
||||
Creates a log entry and prints to stdout.
|
||||
:param log: The log file object.
|
||||
:param message: The string to be printed.
|
||||
"""
|
||||
time_stamp = time.strftime('%Y-%m-%d %H:%M:%S')
|
||||
print(f"{time_stamp}: {message}")
|
||||
log.write(f"{time_stamp}: {message}\n")
|
||||
|
||||
|
||||
def make_dir(dir_path: str):
|
||||
"""
|
||||
Create the given directory, if it doesn't already exist.
|
||||
:param dir_path: The path to the directory.
|
||||
:return: True if created.
|
||||
"""
|
||||
try:
|
||||
if not os.path.isdir(dir_path):
|
||||
os.makedirs(dir_path)
|
||||
if os.path.isdir(dir_path):
|
||||
return True
|
||||
return False
|
||||
except IOError:
|
||||
print("Exception thrown when creating directory: " + dir_path)
|
||||
return False
|
||||
|
||||
|
||||
def run_compare(output_path: str, jdiff_path: str, repo_path: str, src_rel_path: str, prev_commit_id: str,
|
||||
latest_commit_id: str, packages: List[str]):
|
||||
"""
|
||||
Runs a comparison of the api between two different commits/branches/tags of the same repo generating a jdiff diff.
|
||||
:param output_path: The output path for artifacts.
|
||||
:param jdiff_path: The path to the jdiff jar.
|
||||
:param repo_path: The path to the repo.
|
||||
:param src_rel_path: The relative path in the repo to the source directory.
|
||||
:param prev_commit_id: The previous commit/branch/tag id.
|
||||
:param latest_commit_id: The latest commit/branch/tag id.
|
||||
:param packages: The packages to be considered for the api diff.
|
||||
"""
|
||||
log_path = os.path.join(output_path, "messages.log")
|
||||
output_file_name = "output"
|
||||
diff_dir = "diff"
|
||||
src_folder = "src"
|
||||
|
||||
for commit_id in [prev_commit_id, latest_commit_id]:
|
||||
src_copy = os.path.join(output_path, src_folder, commit_id)
|
||||
copy_commit_paths(repo_path, commit_id, src_rel_path, src_copy)
|
||||
gen_xml(jdiff_path, os.path.join(output_path, commit_id, output_file_name), log_path, src_copy, packages)
|
||||
|
||||
# compare the two
|
||||
compare_xml(jdiff_path, output_path, os.path.join(output_path, diff_dir),
|
||||
prev_commit_id, latest_commit_id, output_file_name, log_path)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Generates a jdiff diff of the java api between two commits in a "
|
||||
"repo.",
|
||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||
parser.add_argument(dest='prev_commit', type=str, help=r'The git commit id/branch/tag to be used for the first '
|
||||
r'commit')
|
||||
parser.add_argument(dest='latest_commit', type=str, help=r'The git commit id/branch/tag to be used for the latest '
|
||||
r'commit')
|
||||
parser.add_argument('-r', '--repo', dest='repo_path', type=str, required=True,
|
||||
help='The path to the repo. If not specified, path of script is used.')
|
||||
|
||||
parser.add_argument('-o', '--output', dest='output_path', type=str, required=False,
|
||||
help='The location for output of all artifacts. Defaults to an output folder in same directory'
|
||||
'as script')
|
||||
parser.add_argument('-s', '--src', dest='src_rel_folder', type=str, required=False, default="bindings/java/src",
|
||||
help='The relative path within the repo of the src folder.')
|
||||
# list of packages can be specified like this:
|
||||
# https://stackoverflow.com/questions/15753701/how-can-i-pass-a-list-as-a-command-line-argument-with-argparse
|
||||
parser.add_argument('-p', '--packages', dest='packages', nargs='+', required=False,
|
||||
default=["org.sleuthkit.datamodel"], help='The packages to consider in api diff.')
|
||||
parser.add_argument('-j', '--jdiff', dest='jdiff_path', type=str, required=False,
|
||||
help='The packages to consider in api diff.')
|
||||
|
||||
args = parser.parse_args()
|
||||
script_path = os.path.dirname(os.path.realpath(__file__))
|
||||
repo_path = args.repo_path if args.repo_path else script_path
|
||||
output_path = args.output_path if args.output_path else os.path.join(script_path, "apidiff_output")
|
||||
jdiff_path = args.jdiff_path if args.jdiff_path else os.path.join(script_path,
|
||||
"thirdparty/jdiff/v-custom/jdiff.jar")
|
||||
run_compare(output_path=output_path,
|
||||
jdiff_path=jdiff_path,
|
||||
repo_path=repo_path,
|
||||
packages=args.packages,
|
||||
src_rel_path=args.src_rel_folder,
|
||||
prev_commit_id=args.prev_commit,
|
||||
latest_commit_id=args.latest_commit)
|
||||
|
||||
|
||||
main()
|
@ -14,11 +14,16 @@ The first tab on the options panel is for general application settings.
|
||||
|
||||
\image html options_application.png
|
||||
|
||||
The top section lets you adjust how much memory is used by Autopsy and how many log files to keep. Generally each Autopsy session generates one log file, though it can generate more if the log file becomes too large.
|
||||
The top section lets you adjust how much memory is used by Autopsy and how many log files to keep. Generally each Autopsy session generates one log file, though it can generate more if the log file becomes too large. You can also specify a custom location to write heap dumps to.
|
||||
|
||||
The next section lets you specify where Autopsy should store temporary files. These files will be deleted when a case is closed.
|
||||
The next section lets you specify where Autopsy should store temporary files. These files will be deleted when a case is closed. There are three options:
|
||||
<ul>
|
||||
<li>Local temp directory - Uses the system temp folder (On Windows, typically C:\\Users\\(user name)\\AppData\\Local\\Temp\\Autopsy)
|
||||
<li>Temp folder in case directory - Puts temporariy files in the "temp" directory in the case folder
|
||||
<li>Custom - Will use the given folder as a base for the temporary files
|
||||
</ul>
|
||||
|
||||
The final section lets you set a custom logo.
|
||||
The next section lets you set a custom logo.
|
||||
|
||||
\image html options_logo.png
|
||||
|
||||
@ -26,6 +31,8 @@ This logo will be displayed in any generated \ref report_html "HTML reports".
|
||||
|
||||
\image html options_logo_report.jpg
|
||||
|
||||
The final section lists instructions on how to change scaling for high DPI Windows systems.
|
||||
|
||||
\section config_view View Options
|
||||
|
||||
See the \ref view_options_page page for a description of how you can customize what data is displayed in Autopsy.
|
||||
|
@ -75,23 +75,17 @@ Registry hive files can be viewed in a format similar to a registry editor.
|
||||
|
||||
\image html content_viewer_registry.png
|
||||
|
||||
\section cv_message Message
|
||||
|
||||
The Message tab shows details of emails and SMS messages.
|
||||
|
||||
\image html content_viewer_message.png
|
||||
|
||||
\section cv_metadata File Metadata
|
||||
|
||||
The File Metadata tab displays basic information about the file, such as type, size, and hash. It also displays the output of the Sleuth Kit istat tool.
|
||||
|
||||
\image html content_viewer_metadata.png
|
||||
|
||||
\section cv_context Context
|
||||
\section cv_os_account OS Accounts
|
||||
|
||||
The Context tab shows information on where a file came from and allows you to navigate to the original result. For example, it can show the the URL for downloaded files and the email message a file was attached to. In the image below you can see the context for an image that was sent as an email attachment.
|
||||
The OS Accounts tab displays information on the OS account associated with a given result, if present. It is also used to give details on accounts listed under the OS Accounts node in the tree.
|
||||
|
||||
\image html content_viewer_context.png
|
||||
\image html content_viewer_os_account.png
|
||||
|
||||
\section cv_results Results
|
||||
|
||||
@ -101,6 +95,12 @@ The Results tab is active when selecting items with associated results such as k
|
||||
<br>
|
||||
\image html content_viewer_results_bookmark.png
|
||||
|
||||
\section cv_context Context
|
||||
|
||||
The Context tab shows information on where a file came from and allows you to navigate to the original result. For example, it can show the the URL for downloaded files and the email message a file was attached to. In the image below you can see the context for an image that was sent as an email attachment.
|
||||
|
||||
\image html content_viewer_context.png
|
||||
|
||||
\section cv_annotations Annotations
|
||||
|
||||
The Annotations tab shows information added by an analyst about a file or result. It displays any tags and comments associated with the file or result, and if the \ref central_repo_page is enabled it will also display any comments saved to the Central Repository.
|
||||
|
@ -80,4 +80,10 @@ The Container tab displays information on the data source itself, such as the si
|
||||
|
||||
\image html ds_summary_container.png
|
||||
|
||||
\subsection ds_summary_export Export
|
||||
|
||||
The Export tab allows you to export the contents of the other data source summary tabs to an Excel-formatted file.
|
||||
|
||||
\image html ds_summary_export.png
|
||||
|
||||
*/
|
@ -25,6 +25,18 @@ The data source must remain accessible for the duration of the analysis because
|
||||
|
||||
Regardless of the type of data source, there are some common steps in the process:
|
||||
<ol>
|
||||
|
||||
<li> You will choose the host for the data source you are going to add. See the \ref host_page "hosts page" for more information about hosts.
|
||||
|
||||
\image html data_source_host_select.png
|
||||
|
||||
There are three options:
|
||||
<ul>
|
||||
<li> <b>Generate new host based on data source name</b> - this will typically create a host with a name similar to your data source with the ID used in the database appended for uniqueness.
|
||||
<li> <b>Specify new host name</b> - this allows you to enter a host name.
|
||||
<li> <b>Use existing host</b> - this allows you to choose a host name already in use in the current case.
|
||||
</ul>
|
||||
|
||||
<li> You will select the type of data source.
|
||||
|
||||
\image html select-data-source-type.PNG
|
||||
|
@ -13,7 +13,7 @@ We suggest running all \ref ingest_page "ingest modules" before launching discov
|
||||
Required ingest modules:
|
||||
<ul>
|
||||
<li>\ref file_type_identification_page for image, video, and document searches
|
||||
<li>\ref recent_activity_page or \ref ileapp_page for domain searches
|
||||
<li>\ref recent_activity_page or one of the mobile parsers (\ref android_analyzer_page, \ref ileapp_page, \ref aleapp_page) for domain searches
|
||||
</ul>
|
||||
|
||||
Optional ingest modules:
|
||||
@ -50,7 +50,7 @@ The first step is choosing whether you want to display images, videos, documents
|
||||
|
||||
\subsection file_disc_filtering Filtering
|
||||
|
||||
The second step is to select and configure your filters. The available filters will vary depending on the result type. For most filters, you enable them using the checkbox on the left and then select your options. Multiple options can be selected by using CTRL + left click. Results must pass all enabled filters to be displayed.
|
||||
The second step is to select and configure your filters. The available filters will vary depending on the result type. For most filters, you enable them using the checkbox on the left and then select the checkboxes next to the options you want to be enabled. The "Check All" and "Uncheck All" buttons can be used to check or uncheck all options in the list. Results must pass all enabled filters to be displayed.
|
||||
|
||||
\subsubsection file_disc_size_filter File Size Filter
|
||||
|
||||
@ -132,7 +132,7 @@ The previously notable filter is for domain searches only and is used to restric
|
||||
|
||||
\subsubsection file_disc_known_account_filter Known Account Type Filter
|
||||
|
||||
The previously notable filter is for domain searches only and is used to restrict results to only those domains that have a known account type.
|
||||
The known account type filter is for domain searches only and is used to restrict results to only those domains that have a known account type.
|
||||
|
||||
\image html FileDiscovery/fd_knownAccountFilter.png
|
||||
|
||||
|
50
docs/doxygen-user/hosts.dox
Normal file
@ -0,0 +1,50 @@
|
||||
/*! \page host_page Hosts
|
||||
|
||||
|
||||
[TOC]
|
||||
|
||||
\section host_use Using Hosts
|
||||
|
||||
\subsection host_wizard Associating a Data Source With a Host
|
||||
|
||||
Every data source must be associated with a host. The first step in the \ref ds_add "add data source process" is to select a host for the data source you are about to add to the case. This host can be auto-generated, entered by the user, or selected from the list of hosts already present in the case.
|
||||
|
||||
\image html data_source_host_select.png
|
||||
|
||||
\subsection host_view Viewing Hosts
|
||||
|
||||
Hosts are displayed in the \ref tree_viewer_page. Depending on the \ref view_options_page selected, hosts may be grouped together under persons.
|
||||
|
||||
\image html ui_tree_top_ds.png
|
||||
|
||||
\subsection host_os_accounts OS Accounts
|
||||
|
||||
OS accounts can be viewed in the OS Accounts node under Results. Each OS account is associated with a host, and the host information is displayed in the OS Account tab of the content viewer.
|
||||
|
||||
\image html host_os_accounts.png
|
||||
|
||||
\section host_management Managing Hosts
|
||||
|
||||
\subsection host_menu Manage Hosts Menu
|
||||
|
||||
Go to Case->Manage Hosts to open the host management panel.
|
||||
|
||||
\image html manage_hosts.png
|
||||
|
||||
Here you can see all hosts in the case, add new hosts, change the name of an existing host, and delete hosts that are not in use.
|
||||
|
||||
\subsection host_merge Merging Hosts
|
||||
|
||||
Over the course of processing a case, it may become clear that two (or more) hosts should be combined. Merging one host into another will move all data sources from the source host into the destination host and move or combine any OS accounts found.
|
||||
|
||||
|
||||
To merge hosts, right-click on the host you want to merge into another host.
|
||||
|
||||
\image html host_merge.png
|
||||
|
||||
A confirmation dialog will display stating that this can not be undone. After proceeding, the hosts will be merged together and the tree viewer node will update showing the combined data.
|
||||
|
||||
\image html host_merge_result.png
|
||||
|
||||
|
||||
*/
|
Before Width: | Height: | Size: 20 KiB After Width: | Height: | Size: 16 KiB |
Before Width: | Height: | Size: 28 KiB After Width: | Height: | Size: 24 KiB |
BIN
docs/doxygen-user/images/DataSourceSummary/ds_summary_export.png
Normal file
After Width: | Height: | Size: 6.6 KiB |
Before Width: | Height: | Size: 33 KiB After Width: | Height: | Size: 29 KiB |
Before Width: | Height: | Size: 36 KiB After Width: | Height: | Size: 32 KiB |
Before Width: | Height: | Size: 20 KiB After Width: | Height: | Size: 17 KiB |
Before Width: | Height: | Size: 36 KiB After Width: | Height: | Size: 32 KiB |
Before Width: | Height: | Size: 22 KiB After Width: | Height: | Size: 18 KiB |
Before Width: | Height: | Size: 51 KiB After Width: | Height: | Size: 48 KiB |
Before Width: | Height: | Size: 32 KiB After Width: | Height: | Size: 28 KiB |
Before Width: | Height: | Size: 59 KiB After Width: | Height: | Size: 56 KiB |
Before Width: | Height: | Size: 4.1 KiB After Width: | Height: | Size: 4.8 KiB |
Before Width: | Height: | Size: 5.6 KiB After Width: | Height: | Size: 7.3 KiB |
Before Width: | Height: | Size: 70 KiB After Width: | Height: | Size: 78 KiB |
Before Width: | Height: | Size: 4.4 KiB After Width: | Height: | Size: 6.3 KiB |
Before Width: | Height: | Size: 1.8 KiB After Width: | Height: | Size: 2.9 KiB |
Before Width: | Height: | Size: 2.0 KiB After Width: | Height: | Size: 3.4 KiB |
Before Width: | Height: | Size: 3.2 KiB After Width: | Height: | Size: 4.8 KiB |
Before Width: | Height: | Size: 4.9 KiB After Width: | Height: | Size: 6.4 KiB |
Before Width: | Height: | Size: 66 KiB After Width: | Height: | Size: 55 KiB |
Before Width: | Height: | Size: 16 KiB After Width: | Height: | Size: 25 KiB |
Before Width: | Height: | Size: 293 KiB After Width: | Height: | Size: 314 KiB |
Before Width: | Height: | Size: 25 KiB After Width: | Height: | Size: 34 KiB |
Before Width: | Height: | Size: 27 KiB After Width: | Height: | Size: 38 KiB |
Before Width: | Height: | Size: 41 KiB After Width: | Height: | Size: 42 KiB |
Before Width: | Height: | Size: 20 KiB After Width: | Height: | Size: 33 KiB |
Before Width: | Height: | Size: 27 KiB After Width: | Height: | Size: 37 KiB |
Before Width: | Height: | Size: 20 KiB After Width: | Height: | Size: 26 KiB |
Before Width: | Height: | Size: 22 KiB After Width: | Height: | Size: 33 KiB |
BIN
docs/doxygen-user/images/content_viewer_os_account.png
Normal file
After Width: | Height: | Size: 22 KiB |
Before Width: | Height: | Size: 22 KiB After Width: | Height: | Size: 30 KiB |
Before Width: | Height: | Size: 33 KiB After Width: | Height: | Size: 42 KiB |
Before Width: | Height: | Size: 19 KiB After Width: | Height: | Size: 19 KiB |
Before Width: | Height: | Size: 20 KiB After Width: | Height: | Size: 15 KiB |
Before Width: | Height: | Size: 37 KiB After Width: | Height: | Size: 48 KiB |
Before Width: | Height: | Size: 28 KiB After Width: | Height: | Size: 36 KiB |
Before Width: | Height: | Size: 132 KiB After Width: | Height: | Size: 140 KiB |
BIN
docs/doxygen-user/images/custom_web_categories.png
Normal file
After Width: | Height: | Size: 36 KiB |
BIN
docs/doxygen-user/images/custom_web_categories_results.png
Normal file
After Width: | Height: | Size: 42 KiB |
BIN
docs/doxygen-user/images/data_source_host_select.png
Normal file
After Width: | Height: | Size: 31 KiB |
BIN
docs/doxygen-user/images/host_merge.png
Normal file
After Width: | Height: | Size: 20 KiB |
BIN
docs/doxygen-user/images/host_merge_result.png
Normal file
After Width: | Height: | Size: 7.8 KiB |
BIN
docs/doxygen-user/images/host_os_accounts.png
Normal file
After Width: | Height: | Size: 67 KiB |
Before Width: | Height: | Size: 44 KiB After Width: | Height: | Size: 47 KiB |
Before Width: | Height: | Size: 53 KiB After Width: | Height: | Size: 44 KiB |
BIN
docs/doxygen-user/images/manage_hosts.png
Normal file
After Width: | Height: | Size: 12 KiB |
Before Width: | Height: | Size: 36 KiB After Width: | Height: | Size: 49 KiB |
Before Width: | Height: | Size: 23 KiB After Width: | Height: | Size: 21 KiB |
BIN
docs/doxygen-user/images/reset_windows.png
Normal file
After Width: | Height: | Size: 30 KiB |
BIN
docs/doxygen-user/images/solr/solr_disable_periodic_search.png
Normal file
After Width: | Height: | Size: 30 KiB |
BIN
docs/doxygen-user/images/solr/solr_jvm.png
Normal file
After Width: | Height: | Size: 24 KiB |
BIN
docs/doxygen-user/images/ui_person_select.png
Normal file
After Width: | Height: | Size: 24 KiB |
BIN
docs/doxygen-user/images/ui_tree_top_ds.png
Normal file
After Width: | Height: | Size: 19 KiB |
BIN
docs/doxygen-user/images/ui_tree_top_persons.png
Normal file
After Width: | Height: | Size: 8.8 KiB |
Before Width: | Height: | Size: 63 KiB After Width: | Height: | Size: 58 KiB |
BIN
docs/doxygen-user/images/views_grouped_tree.png
Normal file
After Width: | Height: | Size: 30 KiB |
BIN
docs/doxygen-user/images/views_standard_tree.png
Normal file
After Width: | Height: | Size: 36 KiB |
@ -44,12 +44,28 @@ Under the Keyword list is the option to send ingest inbox messages for each hit.
|
||||
The string extraction setting defines how strings are extracted from files from which text cannot be extracted normally because their file formats are not supported. This is the case with arbitrary binary files (such as the page file) and chunks of unallocated space that represent deleted files.
|
||||
When we extract strings from binary files we need to interpret sequences of bytes as text differently, depending on the possible text encoding and script/language used. In many cases we don't know in advance what the specific encoding/language the text is encoded in. However, it helps if the investigator is looking for a specific language, because by selecting less languages the indexing performance will be improved and the number of false positives will be reduced.
|
||||
|
||||
\image html keyword-search-configuration-dialog-string-extraction.PNG
|
||||
|
||||
The default setting is to search for English strings only, encoded as either UTF8 or UTF16. This setting has the best performance (shortest ingest time).
|
||||
The user can also use the String Viewer first and try different script/language settings, and see which settings give satisfactory results for the type of text relevant to the investigation. Then the same setting that works for the investigation can be applied to the keyword search ingest.
|
||||
|
||||
\image html keyword-search-configuration-dialog-string-extraction.PNG
|
||||
|
||||
There is also a setting to enable Optical Character Recognition (OCR). If enabled, text may be extracted from supported image types. Enabling this feature will make the keyword search module take longer to run, and the results are not perfect. The following shows a sample image containing text:
|
||||
## General Settings tab {#generalSettingsTab}
|
||||
|
||||
\image html keyword-search-configuration-dialog-general.PNG
|
||||
|
||||
### NIST NSRL Support
|
||||
The hash lookup ingest service can be configured to use the NIST NSRL hash set of known files. The keyword search advanced configuration dialog "General" tab contains an option to skip keyword indexing and search on files that have previously marked as "known" and uninteresting files. Selecting this option can greatly reduce size of the index and improve ingest performance. In most cases, user does not need to keyword search for "known" files.
|
||||
|
||||
### Result update frequency during ingest
|
||||
To control how frequently searches are executed during ingest, the user can adjust the timing setting available in the keyword search advanced configuration dialog "General" tab. Setting the number of minutes lower will result in more frequent index updates and searches being executed and the user will be able to see results more in real-time. However, more frequent updates can affect the overall performance, especially on lower-end systems, and can potentially lengthen the overall time needed for the ingest to complete.
|
||||
|
||||
One can also choose to have no periodic searches. This will speed up the ingest. Users choosing this option can run their keyword searches once the entire keyword search index is complete.
|
||||
|
||||
### Optical Character Recognition
|
||||
There is also a setting to enable Optical Character Recognition (OCR). If enabled, text may be extracted from supported image types. Enabling this feature will make the keyword search module take longer to run, and the results are not perfect. The secondary checkbox can make OCR run faster by only processing large images and images extracted from documents.
|
||||
|
||||
The following shows a sample image containing text:
|
||||
|
||||
\image html keyword-search-ocr-image.png
|
||||
|
||||
@ -72,19 +88,6 @@ and move them to the right location. The following steps breakdown this process
|
||||
|
||||
The language files will now be supported when OCR is enabled in the Keyword Search Settings.
|
||||
|
||||
## General Settings tab {#generalSettingsTab}
|
||||
|
||||
\image html keyword-search-configuration-dialog-general.PNG
|
||||
|
||||
### NIST NSRL Support
|
||||
The hash lookup ingest service can be configured to use the NIST NSRL hash set of known files. The keyword search advanced configuration dialog "General" tab contains an option to skip keyword indexing and search on files that have previously marked as "known" and uninteresting files. Selecting this option can greatly reduce size of the index and improve ingest performance. In most cases, user does not need to keyword search for "known" files.
|
||||
|
||||
### Result update frequency during ingest
|
||||
To control how frequently searches are executed during ingest, the user can adjust the timing setting available in the keyword search advanced configuration dialog "General" tab. Setting the number of minutes lower will result in more frequent index updates and searches being executed and the user will be able to see results more in real-time. However, more frequent updates can affect the overall performance, especially on lower-end systems, and can potentially lengthen the overall time needed for the ingest to complete.
|
||||
|
||||
One can also choose to have no periodic searches. This will speed up the ingest. Users choosing this option can run their keyword searches once the entire keyword search index is complete.
|
||||
|
||||
|
||||
<!----------------------------------------->
|
||||
|
||||
<br>
|
||||
|
@ -259,5 +259,32 @@ However, the dashboard does not show enough detail to know when Solr is out of h
|
||||
Solr heap and other performance tuning is described in the following article:
|
||||
<ul><li>https://cwiki.apache.org/confluence/display/SOLR/SolrPerformanceProblems</ul>
|
||||
|
||||
\subsubsection install_solr_performance_tuning Notes on Solr Performance Tuning
|
||||
|
||||
If you are going to work with large images (TBs) and KWS performance is important, the best approach is to use a network (Multi-User) Solr server.
|
||||
|
||||
Some notes:
|
||||
<ul>
|
||||
<li>A single Solr server works well for data sources up to 1TB; after that the performance starts to slow down. The performance doesn't "drop off the cliff," but it keeps slowing down as you add more data to the index. After 3TBs of input data the Solr performance takes a significant decline.
|
||||
|
||||
<li>A single Multi-User Solr server may not perform much better than a Single-User Autopsy case. However, in Multi-User mode you can add additional Solr servers and create a Solr cluster. See the \ref install_sorl_adding_nodes section in the above documentation. These additional nodes are where the performance gains come from, especially for large input data sources. Apache Solr documentation calls this "SolrCloud" mode and each Solr server is called a "shard". The more Solr servers/shards you have, the better performance you will have for large data sets. On our test and production clusters, we are using 4-6 Solr servers to handle data sets of up to 10TB, which seems to be the upper limit. After that, you are better off breaking your Autopsy case into multiple cases, thus creating a separate Solr index for each case.
|
||||
|
||||
<li>In our testing, a 3-node SolrCloud indexes data roughly twice as fast as single Solr node. A 6-node SolrCloud indexes data almost twice as fast as 3-node SolrCloud. After that we did not see much performance gain. These performance figures are heavily dependent on network throughput, machine resources, disk access speeds, and the type of data that is being indexed.
|
||||
|
||||
<li>Exact match searches are much faster than substring or regex searches.
|
||||
|
||||
<li>Regex searches tend to use a lot of RAM on the Solr server.
|
||||
|
||||
<li>Indexing/searching of unallocated space really slows everything down because it is mostly binary or garbled data.
|
||||
|
||||
<li>If you are not going to look at the search results until ingest is over then you should disable the periodic keyword searches. They will start taking longer as your input data grows. This can be done in Tools->Options->Keyword Search tab:
|
||||
|
||||
\image html solr_disable_periodic_search.png
|
||||
|
||||
<li>In Single-User mode, if you are ingesting and indexing data sources that are multiple TBs in size, then both Autopsy memory and especially the Solr JVM memory needs to be increased from their default settings. This can be done in Tools->Options->Application tab. We would recommend at least 10GB heap size for Autopsy and at least 6-8GB heap size for Solr. Note that these are "maximum" values that the process will be allowed to use/request. The operating system will not allocate more heap than the process actually needs.
|
||||
|
||||
\image html solr_jvm.png
|
||||
|
||||
</ul>
|
||||
|
||||
*/
|
||||
|
@ -13,7 +13,17 @@ This allows you to see what activity has occured in the last seven days of usage
|
||||
Configuration
|
||||
=======
|
||||
|
||||
There is nothing to configure for this module.
|
||||
Configuring Custom Web Categories
|
||||
------
|
||||
|
||||
The Recent Activity module will create "Web Categories" results for domains that match a list of categories. There are some built-in categories, but custom categories can also be entered through the "Custom Web Categories" tab on the main options panel. These custom categories will override any matching built-in category.
|
||||
|
||||
\image html custom_web_categories.png
|
||||
|
||||
The buttons below the list of categories allow you to enter new categories, edit existing categories, and delete categories. You can also export your list of categories and import a set of categories that was previously exported from this panel. Importing a set will add its categories to the current list (existing categories will not be deleted).
|
||||
|
||||
The category match for each domain will be listed in the "Name" column in the result viewer.
|
||||
\image html custom_web_categories_results.png
|
||||
|
||||
|
||||
Using the Module
|
||||
@ -23,6 +33,7 @@ Ingest Settings
|
||||
------
|
||||
There are no run-time settings for this module.
|
||||
|
||||
|
||||
Seeing Results
|
||||
------
|
||||
Results show up in the tree under "Extracted Content".
|
||||
|
@ -58,7 +58,7 @@ Generating an Excel report is very similar to an \ref report_html. You select wh
|
||||
|
||||
\image html reports_excel.png
|
||||
|
||||
\subsection report_tagged_hashes Add Tagged Hashes
|
||||
\subsection report_tagged_hashes Save Tagged Hashes
|
||||
|
||||
This is one of the report modules that doesn't generate an actual report. The purpose of this module is to easily add the hashes
|
||||
of some/all tagged files to an Autopsy hash set that can be used by the \ref hash_db_page. You can use the "Configure Hash Sets" button to create a new
|
||||
@ -69,6 +69,10 @@ hash set to write to, or use an existing hash set.
|
||||
After running this module, if you use the same hash set on future cases then everything that was tagged with one of the selected tags in this case will
|
||||
show up as Hashset Hits.
|
||||
|
||||
\subsection reports_unique_words Extract Unique Words
|
||||
|
||||
This report module allows you to export all unique "words" found in a case. These words come from the Solr index that was created by the \ref keyword_search_page.
|
||||
|
||||
\subsection report_case_uco CASE-UCO
|
||||
|
||||
This module creates a JSON output file in <a href="https://github.com/ucoProject/CASE/wiki">CASE-UCO</a> format for a single data source.
|
||||
|
@ -4,20 +4,37 @@
|
||||
|
||||
|
||||
The tree on the left-hand side of the main window is where you can browse the files in the data sources in the case and find saved results from automated analyis (ingest). The tree has five main areas:
|
||||
- <b>Data Sources:</b> This shows the directory tree hierarchy of the data sources. You can navigate to a specific file or directory here. Each data source added to the case is represented as a distinct sub tree. If you add a data source multiple times, it shows up multiple times.
|
||||
- <b>Persons / Hosts / Data Sources:</b> This shows the directory tree hierarchy of the data sources. You can navigate to a specific file or directory here. Each data source added to the case is represented as a distinct sub tree. If you add a data source multiple times, it shows up multiple times.
|
||||
- <b>Views:</b> Specific types of files from the data sources are shown here, aggregated by type or other properties. Files here can come from more than one data source.
|
||||
- <b>Results:</b> This is where you can see the results from both the automated analysis (ingest) running in the background and your search results.
|
||||
- <b>Tags:</b> This is where files and results that have been \ref tagging_page "tagged" are shown.
|
||||
- <b>Reports:</b> Reports that you have generated, or that ingest modules have created, show up here.
|
||||
|
||||
You can also use the "Group by data source" option available through the \ref view_options_page to move the Views, Results, and Tags tree nodes under their corresponding data sources. This can be helpful on very large cases to reduce the size of each sub tree. For example:
|
||||
You can also use the "Group by Person/Host" option available through the \ref view_options_page to move the Views, Results, and Tags tree nodes under their corresponding person and host. This can be helpful on very large cases to reduce the size of each sub tree.
|
||||
|
||||
\image html ui_layout_group_tree.PNG
|
||||
\section ui_tree_ds Persons / Hosts / Data Sources
|
||||
By default, the top node of the tree viewer will contain all data sources in the case. The Data Sources node is organized by host and then the data source itself. Right clicking on the various nodes in the Data Sources area of the tree will allow you to get more options for each data source and its contents.
|
||||
|
||||
\section ui_tree_ds Data Sources
|
||||
\image html ui_tree_top_ds.png
|
||||
|
||||
The Data Sources area shows each data source that has been added to the case, in order added (top one is first).
|
||||
Right clicking on the various nodes in the Data Sources area of the tree will allow you to get more options for each data source and its contents.
|
||||
If the "Group by Person/Host" option has been selected in the \ref view_options_group "View Options", the hosts and data sources will be organized under any persons that have been associated with the hosts. Additionally, the rest of the nodes (Views, Results, etc) will be found under each data source.
|
||||
|
||||
\image html ui_tree_top_persons.png
|
||||
|
||||
\subsection ui_tree_persons Persons
|
||||
|
||||
If the "Group by Person/Host" option in the \ref view_options_group "View Options" has been set, the top level nodes will display persons. Persons are manually created and can be associated with one or more hosts. To add or remove a person from a host, right-click on the host and select the appropriate option.
|
||||
|
||||
\image html ui_person_select.png
|
||||
|
||||
You can edit and delete persons by right-clicking on the node.
|
||||
|
||||
\subsection ui_tree_hosts Hosts
|
||||
|
||||
All data sources are organized under host nodes. See the \ref host_page "hosts page" for more information on using hosts.
|
||||
|
||||
\subsection ui_tree_ds_node Data Sources
|
||||
Under the hosts are the nodes for each data source.
|
||||
|
||||
Unallocated space is the chunks of a file system that are currently not being used for anything. Unallocated space can hold deleted files and other interesting artifacts. In an image data source, unallocated space is stored in blocks with distinct locations in the file system. However, because of the way carving tools work, it is better to feed these tools a single, large unallocated space file. Autopsy provides access to both methods of looking at unallocated space.
|
||||
\li <b>Individual blocks in a volume</b> For each volume, there is a "virtual" folder named "$Unalloc". This folder contains all the individual unallocated blocks in contiguous runs (unallocated space files) as the image is storing them. You can right click and extract any unallocated space file the same way you can extract any other type of file in the Data Sources area.
|
||||
|
@ -9,18 +9,19 @@ If you are experiencing an error, we encourage you to post on the forum (https:/
|
||||
<li>What led to the error. For example:
|
||||
<ul>
|
||||
<li>What type of data source was being processed?
|
||||
<li>Which ingest modules were running?
|
||||
<li>Which ingest modules were running? You can generate an \ref ingest_monitoring "ingest snapshot" to view the current ingest state.
|
||||
<li>Which specialized viewer were you using?
|
||||
</ul>
|
||||
<li>The error being displayed on screen (if applicable)
|
||||
<li>A \ref troubleshooting_stack "thread dump" or screenshot of the \ref ingest_monitoring "ingest snapshot" if Autopsy seems stuck
|
||||
<li>If there were any errors in the \ref troubleshooting_logs "logs"
|
||||
</ul>
|
||||
|
||||
\section troubleshooting_specific_issues Specific Issues
|
||||
|
||||
\subsection troubleshooting_fond_size Font Size Too Small in Windows
|
||||
\subsection troubleshooting_fond_size Font Size Too Small
|
||||
|
||||
Make the following changes if the application is hard to navigate in High DPI systems:
|
||||
In Windows, you can make the following changes if the application is hard to navigate in High DPI systems:
|
||||
|
||||
<ol>
|
||||
<li>Right-click on the application icon on your Desktop, Start Menu, etc.
|
||||
@ -32,8 +33,18 @@ Make the following changes if the application is hard to navigate in High DPI sy
|
||||
<li>Restart Autopsy.
|
||||
</ol>
|
||||
|
||||
In Linux, you can supply the font size with "--fontsize XX" command line argument, but not all of the dialogs are correctly responsive and some of the text will get cut off.
|
||||
|
||||
\section troubleshooting_general General Troubleshooting
|
||||
|
||||
\subsection troubleshooting_reset_ui Resetting the UI
|
||||
|
||||
If the Autopsy window no longer looks like the default \ref uilayout_page (for example, if a viewer has disappeared or there is a strange empty space), you can reset it. To do this, go to Window->Reset Windows. This will cause Autopsy to restart. If you have a case open, it will reopen after the reset.
|
||||
|
||||
\image html reset_windows.png
|
||||
|
||||
If resetting the windows does not fix the problem, you may need to delete your user folder as described in the next section.
|
||||
|
||||
\subsection troubleshooting_user_folder Deleting the Autopsy User Folder
|
||||
|
||||
If Autopsy starts behaving strangely, stops loading entirely, or menu items go missing, you probably need to delete your user folder. Doing so essenitally gives you a fresh installation. On Windows the user folder is located in "C:\Users\(user name)\AppData\Roaming\autopsy".
|
||||
|
@ -66,11 +66,15 @@ If you have a \ref machine_translation_page module installed, this option will a
|
||||
|
||||
The settings in this section only apply to the current case.
|
||||
|
||||
\subsection view_options_group Group by data source
|
||||
\subsection view_options_group Data Source Grouping
|
||||
|
||||
The "Group by data source" option allows you to separate all elements in the \ref ui_tree by data source. This can help nodes load faster on large cases.
|
||||
The options here allow you to choose how to display data in the \ref ui_tree. The top option ("Group by Data Type") displays combined results for all data sources. All nodes on the tree will contain combined results for all data sources in the case.
|
||||
|
||||
\image html ui_layout_group_tree.PNG
|
||||
\image html views_standard_tree.png
|
||||
|
||||
The second option ("Group by Person/Host") separates the results for each data source, and organizes the data sources by \ref ui_tree_persons "person" and \ref ui_tree_hosts "host".
|
||||
|
||||
\image html views_grouped_tree.png
|
||||
|
||||
\section view_options_session Current Session Settings
|
||||
|
||||
|
1
thirdparty/NetbeansLocalization/README.txt
vendored
Normal file
@ -0,0 +1 @@
|
||||
This contains jars provided in Netbeans 8 RCP that provide localization bundles. They do not appear to be included in Netbeans >= 9. See Jira 7434 for more information.
|