Merge remote-tracking branch 'upstream/develop' into 7261_addHostsToProcess

This commit is contained in:
apriestman 2021-02-11 09:44:51 -05:00
commit cfa8700bb5
64 changed files with 1438 additions and 492 deletions

6
.gitattributes vendored
View File

@ -13,3 +13,9 @@ Doxyfile text
*.py text diff=python
*.pl text
# ensure solr scripts that are bash scripts not ending with.sh are lf instead of crlf
/KeywordSearch/solr/bin/autopsy-solr eol=lf
/KeywordSearch/solr/bin/init.d/solr eol=lf
/KeywordSearch/solr/bin/post eol=lf
/KeywordSearch/solr/bin/solr eol=lf

View File

@ -183,11 +183,32 @@ public class ImageDSProcessor implements DataSourceProcessor, AutoIngestDataSour
*/
@Override
public void run(DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
run(null, progressMonitor, callback);
}
/**
* Adds a data source to the case database using a background task in a
* separate thread and the settings provided by the selection and
* configuration panel. Returns as soon as the background task is started.
* The background task uses a callback object to signal task completion and
* return results.
*
* This method should not be called unless isPanelValid returns true.
*
* @param host Host for this data source.
* @param progressMonitor Progress monitor that will be used by the
* background task to report progress.
* @param callback Callback that will be used by the background task
* to return results.
*/
@Override
public void run(Host host, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
ingestStream = new DefaultIngestStream();
readConfigSettings();
this.host = host;
try {
image = SleuthkitJNI.addImageToDatabase(Case.getCurrentCase().getSleuthkitCase(),
new String[]{imagePath}, sectorSize, timeZone, md5, sha1, sha256, deviceId);
new String[]{imagePath}, sectorSize, timeZone, md5, sha1, sha256, deviceId, this.host);
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error adding data source with path " + imagePath + " to database", ex);
final List<String> errors = new ArrayList<>();
@ -219,14 +240,47 @@ public class ImageDSProcessor implements DataSourceProcessor, AutoIngestDataSour
@Override
public void runWithIngestStream(IngestJobSettings settings, DataSourceProcessorProgressMonitor progress,
DataSourceProcessorCallback callBack) {
runWithIngestStream(null, settings, progress, callBack);
}
/**
* Adds a data source to the case database using a background task in a
* separate thread and the settings provided by the selection and
* configuration panel. Files found during ingest will be sent directly to the
* IngestStream provided. Returns as soon as the background task is started.
* The background task uses a callback object to signal task completion and
* return results.
*
* This method should not be called unless isPanelValid returns true, and
* should only be called for DSPs that support ingest streams.
*
* @param host The host for this data source.
* @param settings The ingest job settings.
* @param progress Progress monitor that will be used by the
* background task to report progress.
* @param callBack Callback that will be used by the background task
* to return results.
*/
@Override
public void runWithIngestStream(Host host, IngestJobSettings settings, DataSourceProcessorProgressMonitor progress,
DataSourceProcessorCallback callBack) {
// Read the settings from the wizard
readConfigSettings();
this.host = host;
// HOSTTODO - remove once passing in a host
try {
this.host = Case.getCurrentCase().getSleuthkitCase().getHostManager().getOrCreateHost("ImageDSProcessor Host");
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error creating/loading host", ex);
this.host = null;
}
// Set up the data source before creating the ingest stream
try {
image = SleuthkitJNI.addImageToDatabase(Case.getCurrentCase().getSleuthkitCase(),
new String[]{imagePath}, sectorSize, timeZone, md5, sha1, sha256, deviceId, host);
new String[]{imagePath}, sectorSize, timeZone, md5, sha1, sha256, deviceId, this.host);
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error adding data source with path " + imagePath + " to database", ex);
final List<String> errors = new ArrayList<>();
@ -273,14 +327,6 @@ public class ImageDSProcessor implements DataSourceProcessor, AutoIngestDataSour
if (sha256.isEmpty()) {
sha256 = null;
}
// HOSTTODO - this will come from the config panel
try {
host = Case.getCurrentCase().getSleuthkitCase().getHostManager().getOrCreateHost("ImageDSProcessor Host");
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error creating/loading host", ex);
host = null;
}
}
}

View File

@ -137,6 +137,26 @@ public class LocalDiskDSProcessor implements DataSourceProcessor {
*/
@Override
public void run(DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
run(null, progressMonitor, callback);
}
/**
* Adds a data source to the case database using a background task in a
* separate thread and the settings provided by the selection and
* configuration panel. Returns as soon as the background task is started.
* The background task uses a callback object to signal task completion and
* return results.
*
* This method should not be called unless isPanelValid returns true.
*
* @param host Host for this data source.
* @param progressMonitor Progress monitor that will be used by the
* background task to report progress.
* @param callback Callback that will be used by the background task
* to return results.
*/
@Override
public void run(Host host, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
if (!setDataSourceOptionsCalled) {
deviceId = UUID.randomUUID().toString();
drivePath = configPanel.getContentPath();
@ -148,21 +168,22 @@ public class LocalDiskDSProcessor implements DataSourceProcessor {
} else {
imageWriterSettings = null;
}
}
this.host = host;
// HOSTTODO - set to value from config panel
try {
host = Case.getCurrentCase().getSleuthkitCase().getHostManager().getOrCreateHost("LocalDiskDSProcessor Host");
this.host = Case.getCurrentCase().getSleuthkitCase().getHostManager().getOrCreateHost("LocalDiskDSProcessor Host");
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error creating/loading host", ex);
host = null;
}
this.host = null;
}
Image image;
try {
image = SleuthkitJNI.addImageToDatabase(Case.getCurrentCase().getSleuthkitCase(),
new String[]{drivePath}, sectorSize,
timeZone, null, null, null, deviceId, host);
timeZone, null, null, null, deviceId, this.host);
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error adding local disk with path " + drivePath + " to database", ex);
final List<String> errors = new ArrayList<>();
@ -172,7 +193,7 @@ public class LocalDiskDSProcessor implements DataSourceProcessor {
}
addDiskTask = new AddImageTask(
new AddImageTask.ImageDetails(deviceId, image, sectorSize, timeZone, ignoreFatOrphanFiles, null, null, null, host, imageWriterSettings),
new AddImageTask.ImageDetails(deviceId, image, sectorSize, timeZone, ignoreFatOrphanFiles, null, null, null, this.host, imageWriterSettings),
progressMonitor,
new StreamingAddDataSourceCallbacks(new DefaultIngestStream()),
new StreamingAddImageTaskCallback(new DefaultIngestStream(), callback));

View File

@ -155,10 +155,29 @@ public class LocalFilesDSProcessor implements DataSourceProcessor, AutoIngestDat
*/
@Override
public void run(DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
Host host = null;
run(null, progressMonitor, callback);
}
/**
* Adds a data source to the case database using a background task in a
* separate thread and the settings provided by the selection and
* configuration panel. Returns as soon as the background task is started.
* The background task uses a callback object to signal task completion and
* return results.
*
* This method should not be called unless isPanelValid returns true.
*
* @param host Host for this data source.
* @param progressMonitor Progress monitor that will be used by the
* background task to report progress.
* @param callback Callback that will be used by the background task
* to return results.
*/
@Override
public void run(Host host, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
if (!setDataSourceOptionsCalled) {
// HOSTTODO - set to value from config panel
// HOSTTODO - use passed in value
try {
host = Case.getCurrentCase().getSleuthkitCase().getHostManager().getOrCreateHost("LocalFilesDSProcessor Host");
} catch (TskCoreException ex) {

View File

@ -61,19 +61,19 @@ final public class TagNameDefinition implements Comparable<TagNameDefinition> {
private final TskData.FileKnown knownStatus;
private static final List<TagNameDefinition> STANDARD_TAGS_DEFINITIONS = new ArrayList<>();
private static final List<String> OLD_CATEGORY_TAG_NAMES = new ArrayList<>();
private static final List<String> PROJECT_VIC_NAMES_NO_LONGER_USED = new ArrayList<>();
static {
STANDARD_TAGS_DEFINITIONS.add(new TagNameDefinition(Bundle.TagNameDefinition_predefTagNames_bookmark_text(), "", TagName.HTML_COLOR.NONE, TskData.FileKnown.UNKNOWN));
STANDARD_TAGS_DEFINITIONS.add(new TagNameDefinition(Bundle.TagNameDefinition_predefTagNames_followUp_text(), "", TagName.HTML_COLOR.NONE, TskData.FileKnown.UNKNOWN));
STANDARD_TAGS_DEFINITIONS.add(new TagNameDefinition(Bundle.TagNameDefinition_predefTagNames_notableItem_text(), "", TagName.HTML_COLOR.NONE, TskData.FileKnown.BAD));
OLD_CATEGORY_TAG_NAMES.add("CAT-1: Child Exploitation (Illegal)");
OLD_CATEGORY_TAG_NAMES.add("CAT-2: Child Exploitation (Non-Illegal/Age Difficult)");
OLD_CATEGORY_TAG_NAMES.add("CAT-3: CGI/Animation (Child Exploitive)");
OLD_CATEGORY_TAG_NAMES.add("CAT-4: Exemplar/Comparison (Internal Use Only)");
OLD_CATEGORY_TAG_NAMES.add("CAT-5: Non-pertinent");
OLD_CATEGORY_TAG_NAMES.add("CAT-0: Uncategorized");
PROJECT_VIC_NAMES_NO_LONGER_USED.add("CAT-1: Child Exploitation (Illegal)");
PROJECT_VIC_NAMES_NO_LONGER_USED.add("CAT-2: Child Exploitation (Non-Illegal/Age Difficult)");
PROJECT_VIC_NAMES_NO_LONGER_USED.add("CAT-3: CGI/Animation (Child Exploitive)");
PROJECT_VIC_NAMES_NO_LONGER_USED.add("CAT-4: Exemplar/Comparison (Internal Use Only)");
PROJECT_VIC_NAMES_NO_LONGER_USED.add("CAT-5: Non-pertinent");
PROJECT_VIC_NAMES_NO_LONGER_USED.add("CAT-0: Uncategorized");
}
/**
@ -259,7 +259,7 @@ final public class TagNameDefinition implements Comparable<TagNameDefinition> {
*/
static synchronized Set<TagNameDefinition> getTagNameDefinitions() {
if (needsVersionUpdate()) {
updateTagDefinitions();
updatePropertyFile();
}
String tagsProperty = ModuleSettings.getConfigSetting(TAGS_SETTINGS_NAME, TAG_NAMES_SETTING_KEY);
@ -311,7 +311,7 @@ final public class TagNameDefinition implements Comparable<TagNameDefinition> {
/**
* Updates the Tag Definition file to the current format.
*/
private static void updateTagDefinitions() {
private static void updatePropertyFile() {
Integer version = getPropertyFileVersion();
List<TagNameDefinition> definitions = new ArrayList<>();
@ -355,18 +355,18 @@ final public class TagNameDefinition implements Comparable<TagNameDefinition> {
}
// Remove the standard and Project VIC tags from the list
List<String> tagStrings = new ArrayList<>();
List<String> tagStringsToKeep = new ArrayList<>();
List<String> standardTags = getStandardTagNames();
for (TagNameDefinition def : definitions) {
if (!standardTags.contains(def.getDisplayName())
&& !OLD_CATEGORY_TAG_NAMES.contains(def.getDisplayName())) {
tagStrings.add(def.toSettingsFormat());
&& !PROJECT_VIC_NAMES_NO_LONGER_USED.contains(def.getDisplayName())) {
tagStringsToKeep.add(def.toSettingsFormat());
}
}
// Write out the version and the new tag list.
ModuleSettings.setConfigSetting(TAGS_SETTINGS_NAME, TAG_SETTING_VERSION_KEY, Integer.toString(TAG_SETTINGS_VERSION));
ModuleSettings.setConfigSetting(TAGS_SETTINGS_NAME, TAG_NAMES_SETTING_KEY, String.join(";", tagStrings));
ModuleSettings.setConfigSetting(TAGS_SETTINGS_NAME, TAG_NAMES_SETTING_KEY, String.join(";", tagStringsToKeep));
}
/**

View File

@ -88,7 +88,7 @@ final public class TagSetDefinition {
}
/**
* Returns a list of the defined TagSet objects.
* Returns a list of configured TagSets (from the user's config folder)
*
* @return A list of TagSetDefinition objects or empty list if none were
* found.

View File

@ -55,7 +55,9 @@ public class TagsManager implements Closeable {
private static final Logger LOGGER = Logger.getLogger(TagsManager.class.getName());
private final SleuthkitCase caseDb;
private static String DEFAULT_TAG_SET_NAME = "Project VIC";
// NOTE: This name is also hard coded in Image Gallery and Projet Vic module.
// They need to stay in sync
private static String PROJECT_VIC_TAG_SET_NAME = "Project VIC";
private static final Object lock = new Object();
@ -196,7 +198,7 @@ public class TagsManager implements Closeable {
try {
List<TagSet> tagSetList = Case.getCurrentCaseThrows().getSleuthkitCase().getTaggingManager().getTagSets();
for (TagSet tagSet : tagSetList) {
if (tagSet.getName().equals(DEFAULT_TAG_SET_NAME)) {
if (tagSet.getName().equals(PROJECT_VIC_TAG_SET_NAME)) {
for (TagName tagName : tagSet.getTagNames()) {
tagList.add(tagName.getDisplayName());
}
@ -237,7 +239,7 @@ public class TagsManager implements Closeable {
}
/**
* Creates a new TagSetDefinition file.
* Creates a new TagSetDefinition file that will be used for future cases
*
* @param tagSetDef The tag set definition.
*
@ -258,23 +260,26 @@ public class TagsManager implements Closeable {
TagsManager(SleuthkitCase caseDb) {
this.caseDb = caseDb;
// Add standard tags and the Project VIC default tag set and tags.
// Add standard tags and any configured tag sets.
TaggingManager taggingMgr = caseDb.getTaggingManager();
try {
List<TagSet> setList = taggingMgr.getTagSets();
if (setList.isEmpty()) {
List<TagSet> tagSetsInCase = taggingMgr.getTagSets();
if (tagSetsInCase.isEmpty()) {
// add the standard tag names
for (TagNameDefinition def : TagNameDefinition.getStandardTagNameDefinitions()) {
caseDb.addOrUpdateTagName(def.getDisplayName(), def.getDescription(), def.getColor(), def.getKnownStatus());
}
//Assume new case and add tag sets
//Assume new case and add all tag sets
for (TagSetDefinition setDef : TagSetDefinition.readTagSetDefinitions()) {
List<TagName> tagNameList = new ArrayList<>();
List<TagName> tagNamesInSet = new ArrayList<>();
for (TagNameDefinition tagNameDef : setDef.getTagNameDefinitions()) {
tagNameList.add(caseDb.addOrUpdateTagName(tagNameDef.getDisplayName(), tagNameDef.getDescription(), tagNameDef.getColor(), tagNameDef.getKnownStatus()));
tagNamesInSet.add(caseDb.addOrUpdateTagName(tagNameDef.getDisplayName(), tagNameDef.getDescription(), tagNameDef.getColor(), tagNameDef.getKnownStatus()));
}
if (!tagNameList.isEmpty()) {
taggingMgr.addTagSet(setDef.getName(), tagNameList);
if (!tagNamesInSet.isEmpty()) {
taggingMgr.addTagSet(setDef.getName(), tagNamesInSet);
}
}
}

View File

@ -27,6 +27,7 @@ import java.util.Set;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.validator.routines.DomainValidator;
import org.apache.commons.validator.routines.EmailValidator;
import org.sleuthkit.autopsy.coreutils.NetworkUtils;
/**
* Provides functions for normalizing data by attribute type before insertion or
@ -144,11 +145,11 @@ final public class CorrelationAttributeNormalizer {
private static String normalizeDomain(String data) throws CorrelationAttributeNormalizationException {
DomainValidator validator = DomainValidator.getInstance(true);
if (validator.isValid(data)) {
return data.toLowerCase();
return NetworkUtils.extractDomain(data.toLowerCase());
} else {
final String validIpAddressRegex = "^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$";
if (data.matches(validIpAddressRegex)) {
return data;
return NetworkUtils.extractDomain(data);
} else {
throw new CorrelationAttributeNormalizationException(String.format("Data was expected to be a valid domain: %s", data));
}

View File

@ -28,6 +28,7 @@ import org.openide.util.NbPreferences;
import org.python.icu.util.TimeZone;
import org.sleuthkit.autopsy.machinesettings.UserMachinePreferences;
import org.sleuthkit.autopsy.coreutils.ModuleSettings;
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
import org.sleuthkit.autopsy.coreutils.TextConverterException;
import org.sleuthkit.autopsy.coreutils.Version;
import org.sleuthkit.datamodel.CaseDbConnectionInfo;
@ -84,7 +85,8 @@ public final class UserPreferences {
private static final boolean DISPLAY_TRANSLATED_NAMES_DEFAULT = true;
public static final String EXTERNAL_HEX_EDITOR_PATH = "ExternalHexEditorPath";
public static final String SOLR_MAX_JVM_SIZE = "SolrMaxJVMSize";
private static final int DEFAULT_SOLR_HEAP_SIZE_MB = 2048;
private static final int DEFAULT_SOLR_HEAP_SIZE_MB_64BIT_PLATFORM = 2048;
private static final int DEFAULT_SOLR_HEAP_SIZE_MB_32BIT_PLATFORM = 512;
public static final String RESULTS_TABLE_PAGE_SIZE = "ResultsTablePageSize";
private static final String GEO_TILE_OPTION = "GeolocationTileOption";
private static final String GEO_OSM_TILE_ZIP_PATH = "GeolocationOsmZipPath";
@ -534,12 +536,17 @@ public final class UserPreferences {
}
/**
* Get the maximum JVM heap size (in MB) for the embedded Solr server.
* Get the maximum JVM heap size (in MB) for the embedded Solr server. The returned value
* depends on the platform (64bit vs 32bit).
*
* @return Saved value or default (2 GB)
* @return Saved value or default (2 GB for 64bit platforms, 512MB for 32bit)
*/
public static int getMaxSolrVMSize() {
return preferences.getInt(SOLR_MAX_JVM_SIZE, DEFAULT_SOLR_HEAP_SIZE_MB);
if (PlatformUtil.is64BitJVM()) {
return preferences.getInt(SOLR_MAX_JVM_SIZE, DEFAULT_SOLR_HEAP_SIZE_MB_64BIT_PLATFORM);
} else {
return preferences.getInt(SOLR_MAX_JVM_SIZE, DEFAULT_SOLR_HEAP_SIZE_MB_32BIT_PLATFORM);
}
}
/**

View File

@ -20,6 +20,7 @@ package org.sleuthkit.autopsy.corecomponentinterfaces;
import javax.swing.JPanel;
import org.sleuthkit.autopsy.ingest.IngestJobSettings;
import org.sleuthkit.datamodel.Host;
/**
* Interface implemented by classes that add data sources of a particular type
@ -109,6 +110,25 @@ public interface DataSourceProcessor {
*/
void run(DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback);
/**
* Adds a data source to the case database using a background task in a
* separate thread and the settings provided by the selection and
* configuration panel. Returns as soon as the background task is started.
* The background task uses a callback object to signal task completion and
* return results.
*
* This method should not be called unless isPanelValid returns true.
*
* @param host Host for the data source.
* @param progressMonitor Progress monitor that will be used by the
* background task to report progress.
* @param callback Callback that will be used by the background task
* to return results.
*/
default void run(Host host, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
run(progressMonitor, callback);
}
/**
* Adds a data source to the case database using a background task in a
* separate thread and the settings provided by the selection and
@ -132,6 +152,30 @@ public interface DataSourceProcessor {
throw new UnsupportedOperationException("Streaming ingest not supported for this data source processor");
}
/**
* Adds a data source to the case database using a background task in a
* separate thread and the settings provided by the selection and
* configuration panel. Files found during ingest will be sent directly to
* the IngestStream provided. Returns as soon as the background task is
* started. The background task uses a callback object to signal task
* completion and return results.
*
* This method should not be called unless isPanelValid returns true, and
* should only be called for DSPs that support ingest streams. The ingest
* settings must be complete before calling this method.
*
* @param host Host for this data source.
* @param settings The ingest job settings.
* @param progress Progress monitor that will be used by the background task
* to report progress.
* @param callBack Callback that will be used by the background task to
* return results.
*/
default void runWithIngestStream(Host host, IngestJobSettings settings, DataSourceProcessorProgressMonitor progress,
DataSourceProcessorCallback callBack) {
runWithIngestStream(settings, progress, callBack);
}
/**
* Check if this DSP supports ingest streams.
*

View File

@ -30,6 +30,7 @@ import org.openide.util.NbBundle;
/**
* Enum to represent the six categories in the DHS image categorization scheme.
* NOTE: This appears to not be used anywhere anymore after the ImageGallery refactoring
*/
@NbBundle.Messages({
"Category.one=CAT-1: Child Exploitation (Illegal)",

View File

@ -389,11 +389,6 @@ public final class FileTypes implements AutopsyVisitableItem {
return content.newArtifact(artifactTypeID);
}
@Override
public AnalysisResultAdded newAnalysisResult(BlackboardArtifact.Type artifactType, Score score, String conclusion, String configuration, String justification, Collection<BlackboardAttribute> attributesList) throws TskCoreException {
return content.newAnalysisResult(artifactType, score, conclusion, configuration, justification, attributesList);
}
@Override
public BlackboardArtifact newArtifact(BlackboardArtifact.ARTIFACT_TYPE type) throws TskCoreException {
return content.newArtifact(type);
@ -434,15 +429,6 @@ public final class FileTypes implements AutopsyVisitableItem {
return content.getAllArtifacts();
}
@Override
public List<AnalysisResult> getAllAnalysisResults() throws TskCoreException {
return content.getAllAnalysisResults();
}
public List<AnalysisResult> getAnalysisResults(BlackboardArtifact.Type artifactType) throws TskCoreException {
return content.getAnalysisResults(artifactType);
}
@Override
public Set<String> getHashSetNames() throws TskCoreException {
return content.getHashSetNames();
@ -468,9 +454,24 @@ public final class FileTypes implements AutopsyVisitableItem {
return content.getAllArtifactsCount();
}
@Override
public AnalysisResultAdded newAnalysisResult(BlackboardArtifact.Type type, Score score, String string, String string1, String string2, Collection<BlackboardAttribute> clctn) throws TskCoreException {
return content.newAnalysisResult(type, score, string, string1, string2, clctn);
}
@Override
public Score getAggregateScore() throws TskCoreException {
return content.getAggregateScore();
}
@Override
public List<AnalysisResult> getAnalysisResults(BlackboardArtifact.Type type) throws TskCoreException {
return content.getAnalysisResults(type);
}
@Override
public List<AnalysisResult> getAllAnalysisResults() throws TskCoreException {
return content.getAllAnalysisResults();
}
}
}

View File

@ -138,10 +138,29 @@ public class RawDSProcessor implements DataSourceProcessor, AutoIngestDataSource
*/
@Override
public void run(DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
run(null, progressMonitor, callback);
}
/**
* Adds a data source to the case database using a background task in a
* separate thread and the settings provided by the selection and
* configuration panel. Returns as soon as the background task is started.
* The background task uses a callback object to signal task completion and
* return results.
*
* This method should not be called unless isPanelValid returns true.
*
* @param host Host for the data source.
* @param progressMonitor Progress monitor that will be used by the
* background task to report progress.
* @param callback Callback that will be used by the background task
* to return results.
*/
@Override
public void run(Host host, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
configPanel.storeSettings();
// HOSTTODO - replace with a call to configPanel().getHost()
Host host;
// HOSTTODO - use passed in value
try {
host = Case.getCurrentCase().getSleuthkitCase().getHostManager().getOrCreateHost("RawDSProcessor Host");
} catch (TskCoreException ex) {

View File

@ -194,14 +194,33 @@ public class XRYDataSourceProcessor implements DataSourceProcessor, AutoIngestDa
* in isPanelValid().
*/
@Override
public void run(DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
run(null, progressMonitor, callback);
}
/**
* Processes the XRY folder that the examiner selected. The heavy lifting is
* done off of the EDT, so this function will return while the
* path is still being processed.
*
* This function assumes the calling thread has sufficient privileges to
* read the folder and its child content, which should have been validated
* in isPanelValid().
*
* @param host Host for the data source.
* @param progressMonitor Progress monitor that will be used by the
* background task to report progress.
* @param callback Callback that will be used by the background task
* to return results.
*/
@Override
@NbBundle.Messages({
"XRYDataSourceProcessor.noCurrentCase=No case is open."
})
public void run(DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
public void run(Host host, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
progressMonitor.setIndeterminate(true);
// HOSTTODO - replace with a call to configPanel().getHost()
Host host;
// HOSTTODO - use passed in value
try {
host = Case.getCurrentCase().getSleuthkitCase().getHostManager().getOrCreateHost("XRYDSProcessor Host");
} catch (TskCoreException ex) {

View File

@ -18,6 +18,7 @@
*/
package org.sleuthkit.autopsy.discovery.ui;
import java.awt.Dimension;
import java.awt.Point;
import java.util.ArrayList;
import java.util.List;
@ -63,6 +64,7 @@ final class ArtifactsListPanel extends AbstractArtifactListPanel {
for (int i = 0; i < tableModel.getColumnCount(); ++i) {
artifactsTable.getColumnModel().getColumn(i).setCellRenderer(renderer);
}
setMinimumSize(new Dimension(125, 20));
artifactsTable.getRowSorter().toggleSortOrder(0);
artifactsTable.getRowSorter().toggleSortOrder(0);
}

View File

@ -4,8 +4,11 @@
<NonVisualComponents>
<Container class="javax.swing.JSplitPane" name="mainSplitPane">
<Properties>
<Property name="dividerLocation" type="int" value="350"/>
<Property name="resizeWeight" type="double" value="0.1"/>
<Property name="dividerLocation" type="int" editor="org.netbeans.modules.form.RADConnectionPropertyEditor">
<Connection code="dividerLocation" type="code"/>
</Property>
<Property name="resizeWeight" type="double" value="0.2"/>
<Property name="lastDividerLocation" type="int" value="250"/>
</Properties>
<Layout class="org.netbeans.modules.form.compat2.layouts.support.JSplitPaneSupportLayout"/>

View File

@ -20,8 +20,12 @@ package org.sleuthkit.autopsy.discovery.ui;
import org.sleuthkit.autopsy.contentviewers.artifactviewers.GeneralPurposeArtifactViewer;
import com.google.common.eventbus.Subscribe;
import java.awt.Dimension;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.util.logging.Level;
import javax.swing.JPanel;
import javax.swing.JSplitPane;
import javax.swing.SwingUtilities;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
@ -41,13 +45,18 @@ final class DomainArtifactsTabPanel extends JPanel {
private final ArtifactsListPanel listPanel;
private final BlackboardArtifact.ARTIFACT_TYPE artifactType;
private AbstractArtifactDetailsPanel rightPanel = null;
private int dividerLocation = 300;
private final PropertyChangeListener dividerListener;
private ArtifactRetrievalStatus status = ArtifactRetrievalStatus.UNPOPULATED;
private final ListSelectionListener listener = new ListSelectionListener() {
@Override
public void valueChanged(ListSelectionEvent event) {
if (!event.getValueIsAdjusting()) {
mainSplitPane.removePropertyChangeListener(dividerListener);
rightPanel.setArtifact(listPanel.getSelectedArtifact());
mainSplitPane.setDividerLocation(dividerLocation);
mainSplitPane.addPropertyChangeListener(JSplitPane.DIVIDER_LOCATION_PROPERTY, dividerListener);
}
}
};
@ -60,12 +69,27 @@ final class DomainArtifactsTabPanel extends JPanel {
@ThreadConfined(type = ThreadConfined.ThreadType.AWT)
DomainArtifactsTabPanel(BlackboardArtifact.ARTIFACT_TYPE type) {
initComponents();
dividerListener = new PropertyChangeListener() {
@Override
public void propertyChange(PropertyChangeEvent evt) {
if (evt.getPropertyName().equalsIgnoreCase(JSplitPane.DIVIDER_LOCATION_PROPERTY)
&& evt.getNewValue() instanceof Integer
&& evt.getOldValue() instanceof Integer
&& (JSplitPane.UNDEFINED_CONDITION != (int) evt.getNewValue())) {
dividerLocation = (int) evt.getNewValue();
}
}
};
this.artifactType = type;
listPanel = new ArtifactsListPanel(artifactType);
listPanel.setPreferredSize(new Dimension(100, 20));
listPanel.addMouseListener(new ArtifactMenuMouseAdapter(listPanel));
mainSplitPane.setLeftComponent(listPanel);
add(mainSplitPane);
setRightComponent();
mainSplitPane.addPropertyChangeListener(JSplitPane.DIVIDER_LOCATION_PROPERTY, dividerListener);
dividerLocation = mainSplitPane.getDividerLocation();
listPanel.addSelectionListener(listener);
}
@ -121,6 +145,7 @@ final class DomainArtifactsTabPanel extends JPanel {
@ThreadConfined(type = ThreadConfined.ThreadType.AWT)
void setStatus(ArtifactRetrievalStatus status) {
this.status = status;
mainSplitPane.removePropertyChangeListener(dividerListener);
if (status == ArtifactRetrievalStatus.UNPOPULATED) {
listPanel.clearList();
removeAll();
@ -132,6 +157,8 @@ final class DomainArtifactsTabPanel extends JPanel {
removeAll();
add(new LoadingPanel(artifactType.getDisplayName()));
}
mainSplitPane.setDividerLocation(dividerLocation);
mainSplitPane.addPropertyChangeListener(JSplitPane.DIVIDER_LOCATION_PROPERTY, dividerListener);
}
/**
@ -144,6 +171,7 @@ final class DomainArtifactsTabPanel extends JPanel {
void handleArtifactSearchResultEvent(DiscoveryEventUtils.ArtifactSearchResultEvent artifactresultEvent) {
if (artifactType == artifactresultEvent.getArtifactType() && status == ArtifactRetrievalStatus.POPULATING) {
SwingUtilities.invokeLater(() -> {
mainSplitPane.removePropertyChangeListener(dividerListener);
listPanel.removeSelectionListener(listener);
listPanel.addArtifacts(artifactresultEvent.getListOfArtifacts());
status = ArtifactRetrievalStatus.POPULATED;
@ -152,6 +180,8 @@ final class DomainArtifactsTabPanel extends JPanel {
listPanel.selectFirst();
removeAll();
add(mainSplitPane);
mainSplitPane.setDividerLocation(dividerLocation);
mainSplitPane.addPropertyChangeListener(JSplitPane.DIVIDER_LOCATION_PROPERTY, dividerListener);
if (artifactresultEvent.shouldGrabFocus()) {
focusList();
}
@ -188,8 +218,9 @@ final class DomainArtifactsTabPanel extends JPanel {
mainSplitPane = new javax.swing.JSplitPane();
mainSplitPane.setDividerLocation(350);
mainSplitPane.setResizeWeight(0.1);
mainSplitPane.setDividerLocation(dividerLocation);
mainSplitPane.setResizeWeight(0.2);
mainSplitPane.setLastDividerLocation(250);
setCursor(new java.awt.Cursor(java.awt.Cursor.DEFAULT_CURSOR));
setMinimumSize(new java.awt.Dimension(0, 0));

View File

@ -18,6 +18,7 @@
*/
package org.sleuthkit.autopsy.discovery.ui;
import java.awt.Dimension;
import java.awt.Point;
import java.util.ArrayList;
import java.util.List;
@ -39,7 +40,7 @@ import org.sleuthkit.datamodel.TskCoreException;
/**
* Panel to display list of artifacts types and descriptions.
*/
class MiniTimelineArtifactListPanel extends AbstractArtifactListPanel {
final class MiniTimelineArtifactListPanel extends AbstractArtifactListPanel {
private static final long serialVersionUID = 1L;
private final TypeDescriptionTableModel tableModel;
@ -61,6 +62,7 @@ class MiniTimelineArtifactListPanel extends AbstractArtifactListPanel {
for (int i = 0; i < tableModel.getColumnCount(); ++i) {
artifactsTable.getColumnModel().getColumn(i).setCellRenderer(renderer);
}
setMinimumSize(new Dimension(125, 20));
artifactsTable.getRowSorter().toggleSortOrder(0);
artifactsTable.getRowSorter().toggleSortOrder(0);
}

View File

@ -18,6 +18,7 @@
*/
package org.sleuthkit.autopsy.discovery.ui;
import java.awt.Dimension;
import java.util.ArrayList;
import java.util.List;
import javax.swing.JPanel;
@ -33,7 +34,7 @@ import org.sleuthkit.datamodel.BlackboardArtifact;
/**
* Panel to display list of dates and counts.
*/
class MiniTimelineDateListPanel extends JPanel {
final class MiniTimelineDateListPanel extends JPanel {
private static final long serialVersionUID = 1L;
private final DateCountTableModel tableModel = new DateCountTableModel();
@ -49,6 +50,7 @@ class MiniTimelineDateListPanel extends JPanel {
for (int i = 0; i < tableModel.getColumnCount(); ++i) {
jTable1.getColumnModel().getColumn(i).setCellRenderer(renderer);
}
setMinimumSize(new Dimension(125, 20));
jTable1.getRowSorter().toggleSortOrder(0);
jTable1.getRowSorter().toggleSortOrder(0);
}

View File

@ -4,8 +4,10 @@
<NonVisualComponents>
<Container class="javax.swing.JSplitPane" name="mainSplitPane">
<Properties>
<Property name="dividerLocation" type="int" value="400"/>
<Property name="resizeWeight" type="double" value="0.1"/>
<Property name="dividerLocation" type="int" editor="org.netbeans.modules.form.RADConnectionPropertyEditor">
<Connection code="mainSplitPaneDividerLocation" type="code"/>
</Property>
<Property name="resizeWeight" type="double" value="0.2"/>
<Property name="toolTipText" type="java.lang.String" value=""/>
<Property name="minimumSize" type="java.awt.Dimension" editor="org.netbeans.beaninfo.editors.DimensionEditor">
<Dimension value="[0, 0]"/>
@ -16,7 +18,9 @@
<SubComponents>
<Container class="javax.swing.JSplitPane" name="leftSplitPane">
<Properties>
<Property name="dividerLocation" type="int" value="198"/>
<Property name="dividerLocation" type="int" editor="org.netbeans.modules.form.RADConnectionPropertyEditor">
<Connection code="leftSplitPaneDividerLocation" type="code"/>
</Property>
<Property name="resizeWeight" type="double" value="0.5"/>
<Property name="minimumSize" type="java.awt.Dimension" editor="org.netbeans.beaninfo.editors.DimensionEditor">
<Dimension value="[0, 0]"/>

View File

@ -19,6 +19,10 @@
package org.sleuthkit.autopsy.discovery.ui;
import com.google.common.eventbus.Subscribe;
import java.awt.Dimension;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import javax.swing.JSplitPane;
import javax.swing.SwingUtilities;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
@ -42,6 +46,9 @@ final class MiniTimelinePanel extends javax.swing.JPanel {
private String selectedDomain = null;
private final ListSelectionListener artifactListener;
private final ListSelectionListener dateListener;
private int leftSplitPaneDividerLocation = 125;
private int mainSplitPaneDividerLocation = 300;
private final PropertyChangeListener dividerListener;
@NbBundle.Messages({"MiniTimelinePanel.loadingPanel.details=the Timeline view"})
/**
@ -62,22 +69,51 @@ final class MiniTimelinePanel extends javax.swing.JPanel {
} else {
rightPanel = new GeneralPurposeArtifactViewer();
}
leftSplitPane.removePropertyChangeListener(dividerListener);
mainSplitPane.removePropertyChangeListener(dividerListener);
mainSplitPane.setRightComponent(rightPanel.getComponent());
rightPanel.setArtifact(artifact);
mainSplitPane.setDividerLocation(mainSplitPaneDividerLocation);
leftSplitPane.setDividerLocation(leftSplitPaneDividerLocation);
mainSplitPane.setDividerLocation(mainSplitPaneDividerLocation);
leftSplitPane.setDividerLocation(leftSplitPaneDividerLocation);
leftSplitPane.addPropertyChangeListener(JSplitPane.DIVIDER_LOCATION_PROPERTY, dividerListener);
mainSplitPane.addPropertyChangeListener(JSplitPane.DIVIDER_LOCATION_PROPERTY, dividerListener);
validate();
repaint();
}
}
};
dividerListener = new PropertyChangeListener() {
@Override
public void propertyChange(PropertyChangeEvent evt) {
if (evt.getPropertyName().equalsIgnoreCase(JSplitPane.DIVIDER_LOCATION_PROPERTY)
&& evt.getNewValue() instanceof Integer
&& evt.getOldValue() instanceof Integer
&& (JSplitPane.UNDEFINED_CONDITION != (int) evt.getNewValue())) {
if (evt.getSource().equals(leftSplitPane)) {
leftSplitPaneDividerLocation = (int) evt.getNewValue();
} else if (evt.getSource().equals(mainSplitPane)) {
mainSplitPaneDividerLocation = (int) evt.getNewValue();
}
}
}
};
dateListener = new ListSelectionListener() {
@Override
public void valueChanged(ListSelectionEvent event) {
if (!event.getValueIsAdjusting()) {
artifactListPanel.removeSelectionListener(artifactListener);
leftSplitPane.removePropertyChangeListener(dividerListener);
mainSplitPane.removePropertyChangeListener(dividerListener);
artifactListPanel.clearList();
artifactListPanel.addArtifacts(dateListPanel.getArtifactsForSelectedDate());
artifactListPanel.addSelectionListener(artifactListener);
artifactListPanel.selectFirst();
mainSplitPane.setDividerLocation(mainSplitPaneDividerLocation);
leftSplitPane.setDividerLocation(leftSplitPaneDividerLocation);
leftSplitPane.addPropertyChangeListener(JSplitPane.DIVIDER_LOCATION_PROPERTY, dividerListener);
mainSplitPane.addPropertyChangeListener(JSplitPane.DIVIDER_LOCATION_PROPERTY, dividerListener);
validate();
repaint();
}
@ -85,10 +121,16 @@ final class MiniTimelinePanel extends javax.swing.JPanel {
};
dateListPanel.addSelectionListener(dateListener);
artifactListPanel.addSelectionListener(artifactListener);
dateListPanel.setPreferredSize(new Dimension(100, 20));
leftSplitPane.setLeftComponent(dateListPanel);
artifactListPanel.setPreferredSize(new Dimension(100, 20));
leftSplitPane.setRightComponent(artifactListPanel);
mainSplitPane.setRightComponent(rightPanel.getComponent());
add(mainSplitPane);
leftSplitPane.addPropertyChangeListener(JSplitPane.DIVIDER_LOCATION_PROPERTY, dividerListener);
mainSplitPane.addPropertyChangeListener(JSplitPane.DIVIDER_LOCATION_PROPERTY, dividerListener);
leftSplitPaneDividerLocation = leftSplitPane.getDividerLocation();
mainSplitPaneDividerLocation = mainSplitPane.getDividerLocation();
}
/**
@ -145,6 +187,8 @@ final class MiniTimelinePanel extends javax.swing.JPanel {
if (miniTimelineResultEvent.getDomain().equals(selectedDomain)) {
dateListPanel.removeListSelectionListener(dateListener);
artifactListPanel.removeSelectionListener(artifactListener);
leftSplitPane.removePropertyChangeListener(dividerListener);
mainSplitPane.removePropertyChangeListener(dividerListener);
dateListPanel.addArtifacts(miniTimelineResultEvent.getResultList());
status = DomainArtifactsTabPanel.ArtifactRetrievalStatus.POPULATED;
setEnabled(!dateListPanel.isEmpty());
@ -156,6 +200,10 @@ final class MiniTimelinePanel extends javax.swing.JPanel {
}
removeAll();
add(mainSplitPane);
mainSplitPane.setDividerLocation(mainSplitPaneDividerLocation);
leftSplitPane.setDividerLocation(leftSplitPaneDividerLocation);
leftSplitPane.addPropertyChangeListener(JSplitPane.DIVIDER_LOCATION_PROPERTY, dividerListener);
mainSplitPane.addPropertyChangeListener(JSplitPane.DIVIDER_LOCATION_PROPERTY, dividerListener);
revalidate();
repaint();
}
@ -174,12 +222,12 @@ final class MiniTimelinePanel extends javax.swing.JPanel {
mainSplitPane = new javax.swing.JSplitPane();
leftSplitPane = new javax.swing.JSplitPane();
mainSplitPane.setDividerLocation(400);
mainSplitPane.setResizeWeight(0.1);
mainSplitPane.setDividerLocation(mainSplitPaneDividerLocation);
mainSplitPane.setResizeWeight(0.2);
mainSplitPane.setToolTipText("");
mainSplitPane.setMinimumSize(new java.awt.Dimension(0, 0));
leftSplitPane.setDividerLocation(198);
leftSplitPane.setDividerLocation(leftSplitPaneDividerLocation);
leftSplitPane.setResizeWeight(0.5);
leftSplitPane.setMinimumSize(new java.awt.Dimension(0, 0));
mainSplitPane.setLeftComponent(leftSplitPane);

View File

@ -477,6 +477,9 @@ public final class IngestJobSettings {
case "Exif Parser": //NON-NLS
moduleNames.add("Picture Analyzer"); //NON-NLS
break;
case "Drone Analyzer":
moduleNames.add("DJI Drone Analyzer");
break;
default:
moduleNames.add(name);
}

View File

@ -130,6 +130,26 @@ public final class LogicalImagerDSProcessor implements DataSourceProcessor {
* @param callback Callback that will be used by the background task
* to return results.
*/
@Override
public void run(DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
run(null, progressMonitor, callback);
}
/**
* Adds a data source to the case database using a background task in a
* separate thread and the settings provided by the selection and
* configuration panel. Returns as soon as the background task is started.
* The background task uses a callback object to signal task completion and
* return results.
*
* This method should not be called unless isPanelValid returns true.
*
* @param host Host for the data source.
* @param progressMonitor Progress monitor that will be used by the
* background task to report progress.
* @param callback Callback that will be used by the background task
* to return results.
*/
@Messages({
"# {0} - imageDirPath", "LogicalImagerDSProcessor.imageDirPathNotFound={0} not found.\nUSB drive has been ejected.",
"# {0} - directory", "LogicalImagerDSProcessor.failToCreateDirectory=Failed to create directory {0}",
@ -139,11 +159,10 @@ public final class LogicalImagerDSProcessor implements DataSourceProcessor {
"LogicalImagerDSProcessor.noCurrentCase=No current case",
})
@Override
public void run(DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
public void run(Host host, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
configPanel.storeSettings();
// HOSTTODO - set to value from config panel
Host host;
try {
host = Case.getCurrentCase().getSleuthkitCase().getHostManager().getOrCreateHost("LogicalImagerDSProcessor Host");
} catch (TskCoreException ex) {

View File

@ -41,8 +41,6 @@ import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.modules.hashdatabase.HashDbManager.HashDb;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.AnalysisResult;
import org.sleuthkit.datamodel.AnalysisResultAdded;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
@ -50,7 +48,6 @@ import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
import org.sleuthkit.datamodel.HashHitInfo;
import org.sleuthkit.datamodel.HashUtility;
import org.sleuthkit.datamodel.Score;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
@ -519,7 +516,7 @@ public class HashDbIngestModule implements FileIngestModule {
private void postHashSetHitToBlackboard(AbstractFile abstractFile, String md5Hash, String hashSetName, String comment, boolean showInboxMessage) {
try {
String moduleName = HashLookupModuleFactory.getModuleName();
//BlackboardArtifact badFile = abstractFile.newArtifact(ARTIFACT_TYPE.TSK_HASHSET_HIT);
BlackboardArtifact badFile = abstractFile.newArtifact(ARTIFACT_TYPE.TSK_HASHSET_HIT);
Collection<BlackboardAttribute> attributes = new ArrayList<>();
//TODO Revisit usage of deprecated constructor as per TSK-583
//BlackboardAttribute att2 = new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), MODULE_NAME, "Known Bad", hashSetName);
@ -527,22 +524,14 @@ public class HashDbIngestModule implements FileIngestModule {
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_HASH_MD5, moduleName, md5Hash));
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_COMMENT, moduleName, comment));
SleuthkitCase.CaseDbTransaction trans = this.skCase.beginTransaction();
AnalysisResultAdded resultAdded = blackboard.newAnalysisResult(new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_HASHSET_HIT), abstractFile.getId(), abstractFile.getDataSourceObjectId(), new Score(Score.Significance.MEDIUM, Score.Confidence.HIGH), moduleName, comment, hashSetName, attributes, trans);
AnalysisResult badFile = resultAdded.getAnalysisResult();
trans.commit();
badFile.addAttributes(attributes);
try {
/*
* post the artifact which will index the artifact for keyword
* search, and fire an event to notify UI of this new artifact
*/
blackboard.postArtifact(badFile, moduleName);
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + badFile.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error(
@ -585,7 +574,7 @@ public class HashDbIngestModule implements FileIngestModule {
abstractFile.getName() + md5Hash,
badFile));
}
} catch (TskException | Blackboard.BlackboardException ex) {
} catch (TskException ex) {
logger.log(Level.WARNING, "Error creating blackboard artifact", ex); //NON-NLS
}
}

View File

@ -92,6 +92,10 @@ public class ALeappAnalyzerIngestModule implements DataSourceIngestModule {
public void startUp(IngestJobContext context) throws IngestModuleException {
this.context = context;
if (false == PlatformUtil.is64BitOS()) {
throw new IngestModuleException(NbBundle.getMessage(this.getClass(), "AleappAnalyzerIngestModule.not.64.bit.os"));
}
if (false == PlatformUtil.isWindowsOS()) {
throw new IngestModuleException(Bundle.ALeappAnalyzerIngestModule_requires_windows());
}

View File

@ -2,7 +2,9 @@ ILeappAnalyzerIngestModule.init.exception.msg=Unable to find {0}.
ILeappAnalyzerIngestModule.processing.file=Processing file {0}
ILeappAnalyzerIngestModule.parsing.file=Parsing file {0}
ILeappAnalyzerIngestModule.processing.filesystem=Processing filesystem
IleappAnalyzerIngestModule.not.64.bit.os=iLeapp will not run on a 32bit operating system
ALeappAnalyzerIngestModule.init.exception.msg=Unable to find {0}.
ALeappAnalyzerIngestModule.processing.file=Processing file {0}
ALeappAnalyzerIngestModule.parsing.file=Parsing file {0}
ALeappAnalyzerIngestModule.processing.filesystem=Processing filesystem
AleappAnalyzerIngestModule.not.64.bit.os=aLeapp will not run on a 32bit operating system

View File

@ -22,10 +22,12 @@ ILeappAnalyzerIngestModule.init.exception.msg=Unable to find {0}.
ILeappAnalyzerIngestModule.processing.file=Processing file {0}
ILeappAnalyzerIngestModule.parsing.file=Parsing file {0}
ILeappAnalyzerIngestModule.processing.filesystem=Processing filesystem
IleappAnalyzerIngestModule.not.64.bit.os=iLeapp will not run on 32bit operating system
ALeappAnalyzerIngestModule.init.exception.msg=Unable to find {0}.
ALeappAnalyzerIngestModule.processing.file=Processing file {0}
ALeappAnalyzerIngestModule.parsing.file=Parsing file {0}
ALeappAnalyzerIngestModule.processing.filesystem=Processing filesystem
AleappAnalyzerIngestModule.not.64.bit.os=aLeapp will not run on 32bit operating system
ILeappAnalyzerIngestModule.report.name=iLeapp Html Report
ILeappAnalyzerIngestModule.requires.windows=iLeapp module requires windows.
ILeappAnalyzerIngestModule.running.iLeapp=Running iLeapp

View File

@ -92,6 +92,10 @@ public class ILeappAnalyzerIngestModule implements DataSourceIngestModule {
public void startUp(IngestJobContext context) throws IngestModuleException {
this.context = context;
if (false == PlatformUtil.is64BitOS()) {
throw new IngestModuleException(NbBundle.getMessage(this.getClass(), "IleappAnalyzerIngestModule.not.64.bit.os"));
}
if (false == PlatformUtil.isWindowsOS()) {
throw new IngestModuleException(Bundle.ILeappAnalyzerIngestModule_requires_windows());
}

View File

@ -59,6 +59,7 @@ import org.sleuthkit.autopsy.casemodule.Case;
import static org.sleuthkit.autopsy.casemodule.Case.getCurrentCase;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.FileManager;
import org.sleuthkit.autopsy.coreutils.NetworkUtils;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
import org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException;
@ -379,7 +380,9 @@ public final class LeappFileProcessor {
return Collections.emptyList();
}
BlackboardAttribute attr = (value == null) ? null : getAttribute(colAttr.getAttributeType(), value, fileName);
String formattedValue = formatValueBasedOnAttrType(colAttr, value);
BlackboardAttribute attr = (value == null) ? null : getAttribute(colAttr.getAttributeType(), formattedValue, fileName);
if (attr == null) {
logger.log(Level.WARNING, String.format("Blackboard attribute could not be parsed column %s at line %d in file %s. Omitting row.", colAttr.getColumnName(), lineNum, fileName));
return Collections.emptyList();
@ -394,6 +397,21 @@ public final class LeappFileProcessor {
return attrsToRet;
}
/**
* Check type of attribute and possibly format string based on it.
*
* @param colAttr Column Attribute information
* @param value string to be formatted
* @return formatted string based on attribute type if no attribute type found then return original string
*/
private String formatValueBasedOnAttrType(TsvColumn colAttr, String value) {
if (colAttr.getAttributeType().getTypeName().equals("TSK_DOMAIN")) {
return NetworkUtils.extractDomain(value);
}
return value;
}
/**
* The format of time stamps in tsv.
*/

View File

@ -71,6 +71,7 @@ import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.ContentTag;
import org.sleuthkit.datamodel.DataSource;
import org.sleuthkit.datamodel.FileSystem;
import org.sleuthkit.datamodel.Host;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.LocalFilesDataSource;
import org.sleuthkit.datamodel.Pool;
@ -1079,12 +1080,20 @@ public class PortableCaseReportModule implements ReportModule {
BlackboardArtifact artifactToCopy = (BlackboardArtifact) content;
newContent = copyArtifact(parentId, artifactToCopy);
} else {
// Get or create the host (if needed) before beginning transaction.
Host newHost = null;
if (content instanceof DataSource) {
Host oldHost = ((DataSource)content).getHost();
newHost = portableSkCase.getHostManager().getOrCreateHost(oldHost.getName());
}
CaseDbTransaction trans = portableSkCase.beginTransaction();
try {
if (content instanceof Image) {
Image image = (Image) content;
newContent = portableSkCase.addImage(image.getType(), image.getSsize(), image.getSize(), image.getName(),
new ArrayList<>(), image.getTimeZone(), image.getMd5(), image.getSha1(), image.getSha256(), image.getDeviceId(), trans);
new ArrayList<>(), image.getTimeZone(), image.getMd5(), image.getSha1(), image.getSha256(), image.getDeviceId(), newHost, trans);
} else if (content instanceof VolumeSystem) {
VolumeSystem vs = (VolumeSystem) content;
newContent = portableSkCase.addVolumeSystem(parentId, vs.getType(), vs.getOffset(), vs.getBlockSize(), trans);
@ -1108,7 +1117,7 @@ public class PortableCaseReportModule implements ReportModule {
if (abstractFile instanceof LocalFilesDataSource) {
LocalFilesDataSource localFilesDS = (LocalFilesDataSource) abstractFile;
newContent = portableSkCase.addLocalFilesDataSource(localFilesDS.getDeviceId(), localFilesDS.getName(), localFilesDS.getTimeZone(), trans);
newContent = portableSkCase.addLocalFilesDataSource(localFilesDS.getDeviceId(), localFilesDS.getName(), localFilesDS.getTimeZone(), newHost, trans);
} else {
if (abstractFile.isDir()) {
newContent = portableSkCase.addLocalDirectory(parentId, abstractFile.getName(), trans);

View File

@ -14,7 +14,7 @@
<!-- for viewers -->
<dependency conf="autopsy_core->*" org="org.freedesktop.gstreamer" name="gst1-java-core" rev="1.0.0"/>
<dependency conf="autopsy_core->*" org="net.java.dev.jna" name="jna-platform" rev="5.6.0"/>
<dependency conf="autopsy_core->*" org="net.java.dev.jna" name="jna-platform" rev="5.7.0"/>
<!-- for file search -->
<dependency conf="autopsy_core->*" org="com.github.lgooddatepicker" name="LGoodDatePicker" rev="10.3.1"/>

View File

@ -42,8 +42,8 @@ file.reference.javassist-3.12.1.GA.jar=release/modules/ext/javassist-3.12.1.GA.j
file.reference.jfxtras-common-8.0-r4.jar=release/modules/ext/jfxtras-common-8.0-r4.jar
file.reference.jfxtras-controls-8.0-r4.jar=release/modules/ext/jfxtras-controls-8.0-r4.jar
file.reference.jfxtras-fxml-8.0-r4.jar=release/modules/ext/jfxtras-fxml-8.0-r4.jar
file.reference.jna-5.6.0.jar=release/modules/ext/jna-5.6.0.jar
file.reference.jna-platform-5.6.0.jar=release/modules/ext/jna-platform-5.6.0.jar
file.reference.jna-5.7.0.jar=release/modules/ext/jna-5.7.0.jar
file.reference.jna-platform-5.7.0.jar=release/modules/ext/jna-platform-5.7.0.jar
file.reference.joda-time-2.4.jar=release/modules/ext/joda-time-2.4.jar
file.reference.jsr305-1.3.9.jar=release/modules/ext/jsr305-1.3.9.jar
file.reference.LGoodDatePicker-10.3.1.jar=release/modules/ext/LGoodDatePicker-10.3.1.jar

View File

@ -923,8 +923,8 @@
<binary-origin>release/modules/ext/commons-compress-1.18.jar</binary-origin>
</class-path-extension>
<class-path-extension>
<runtime-relative-path>ext/jna-platform-5.6.0.jar</runtime-relative-path>
<binary-origin>release\modules\ext\jna-platform-5.6.0.jar</binary-origin>
<runtime-relative-path>ext/jna-platform-5.7.0.jar</runtime-relative-path>
<binary-origin>release\modules\ext\jna-platform-5.7.0.jar</binary-origin>
</class-path-extension>
<class-path-extension>
<runtime-relative-path>ext/opencv-248.jar</runtime-relative-path>
@ -951,8 +951,8 @@
<binary-origin>release/modules/ext/imageio-bmp-3.2.jar</binary-origin>
</class-path-extension>
<class-path-extension>
<runtime-relative-path>ext/jna-5.6.0.jar</runtime-relative-path>
<binary-origin>release\modules\ext\jna-5.6.0.jar</binary-origin>
<runtime-relative-path>ext/jna-5.7.0.jar</runtime-relative-path>
<binary-origin>release\modules\ext\jna-5.7.0.jar</binary-origin>
</class-path-extension>
<class-path-extension>
<runtime-relative-path>ext/commons-lang-2.6.jar</runtime-relative-path>

View File

@ -119,10 +119,29 @@ public class MemoryDSProcessor implements DataSourceProcessor {
*/
@Override
public void run(DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
run(null, progressMonitor, callback);
}
/**
* Adds a data source to the case database using a background task in a
* separate thread and the settings provided by the selection and
* configuration panel. Returns as soon as the background task is started.
* The background task uses a callback object to signal task completion and
* return results.
*
* This method should not be called unless isPanelValid returns true.
*
* @param host Host for the data source.
* @param progressMonitor Progress monitor that will be used by the
* background task to report progress.
* @param callback Callback that will be used by the background task
* to return results.
*/
@Override
public void run(Host host, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
configPanel.storeSettings();
// HOSTTODO - replace with a call to configPanel().getHost()
Host host;
try {
host = Case.getCurrentCase().getSleuthkitCase().getHostManager().getOrCreateHost("MemoryDSProcessor Host");
} catch (TskCoreException ex) {

View File

@ -96,7 +96,6 @@ public final class ImageGalleryController {
private static final Set<IngestManager.IngestJobEvent> INGEST_JOB_EVENTS_OF_INTEREST = EnumSet.of(IngestManager.IngestJobEvent.DATA_SOURCE_ANALYSIS_STARTED, IngestManager.IngestJobEvent.DATA_SOURCE_ANALYSIS_COMPLETED);
private static final Set<IngestManager.IngestModuleEvent> INGEST_MODULE_EVENTS_OF_INTEREST = EnumSet.of(IngestManager.IngestModuleEvent.DATA_ADDED, IngestManager.IngestModuleEvent.FILE_DONE);
private static String DEFAULT_TAG_SET_NAME = "Project VIC";
/*
* The file limit for image gallery. If the selected data source (or all
* data sources, if that option is selected) has more than this many files
@ -738,7 +737,7 @@ public final class ImageGalleryController {
List<TagSet> tagSetList = getCaseDatabase().getTaggingManager().getTagSets();
if (tagSetList != null && !tagSetList.isEmpty()) {
for (TagSet set : tagSetList) {
if (set.getName().equals(getCategoryTagSetName())) {
if (set.getName().equals(ImageGalleryService.PROJECT_VIC_TAG_SET_NAME)) {
return set;
}
}
@ -749,14 +748,6 @@ public final class ImageGalleryController {
}
}
/**
* Returns the name of the category tag set.
*
* @return Tagset name
*/
static String getCategoryTagSetName() {
return DEFAULT_TAG_SET_NAME;
}
/**
* A listener for ingest module application events.
@ -839,8 +830,11 @@ public final class ImageGalleryController {
Content newDataSource = (Content) event.getNewValue();
if (isListeningEnabled()) {
try {
// If the data source already exists and has a status other than UNKNOWN, dont overwrite it.
if(drawableDB.getDataSourceDbBuildStatus(newDataSource.getId()) == DrawableDB.DrawableDbBuildStatusEnum.UNKNOWN) {
drawableDB.insertOrUpdateDataSource(newDataSource.getId(), DrawableDB.DrawableDbBuildStatusEnum.UNKNOWN);
} catch (SQLException ex) {
}
} catch (SQLException | TskCoreException ex) {
logger.log(Level.SEVERE, String.format("Error updating datasources table (data source object ID = %d, status = %s)", newDataSource.getId(), DrawableDB.DrawableDbBuildStatusEnum.UNKNOWN.toString()), ex); //NON-NLS
}
}

View File

@ -44,20 +44,32 @@ import org.sleuthkit.datamodel.TskData;
})
public class ImageGalleryService implements AutopsyService {
private static final String CATEGORY_ONE_NAME = "Child Exploitation (Illegal)";
private static final String CATEGORY_TWO_NAME = "Child Exploitation (Non-Illegal/Age Difficult)";
private static final String CATEGORY_THREE_NAME = "CGI/Animation (Child Exploitive)";
private static final String CATEGORY_FOUR_NAME = "Exemplar/Comparison (Internal Use Only)";
private static final String CATEGORY_FIVE_NAME = "Non-pertinent";
/* Image Gallery has its own definition of Project VIC tag names because
* these will be used if the Project Vic module is not installed. These will
* get added when a case is opened if the tag set is not already defined.
*
* The following list of names must be kept in sync with the CountryManager
* code in the ProjectVic module.
*
* Autopsy Core Tag code and TSK DataModel upgrade code also have a
* references to the "Projet VIC" set name. Be careful changing any of these names.
*/
static String PROJECT_VIC_TAG_SET_NAME = "Project VIC";
private static final String PV_US_CAT0 = "Non-Pertinent";
private static final String PV_US_CAT1 = "Child Abuse Material - (CAM)";
private static final String PV_US_CAT2 = "Child Exploitive (Non-CAM) Age Difficult";
private static final String PV_US_CAT3 = "CGI/Animation - Child Exploitive";
private static final String PV_US_CAT4 = "Comparison Images";
private static final List<TagNameDefinition> DEFAULT_CATEGORY_DEFINITION = new ArrayList<>();
private static final List<TagNameDefinition> PROJECT_VIC_US_CATEGORIES = new ArrayList<>();
static {
DEFAULT_CATEGORY_DEFINITION.add(new TagNameDefinition(CATEGORY_ONE_NAME, "", TagName.HTML_COLOR.RED, TskData.FileKnown.BAD));
DEFAULT_CATEGORY_DEFINITION.add(new TagNameDefinition(CATEGORY_TWO_NAME, "", TagName.HTML_COLOR.LIME, TskData.FileKnown.BAD));
DEFAULT_CATEGORY_DEFINITION.add(new TagNameDefinition(CATEGORY_THREE_NAME, "", TagName.HTML_COLOR.YELLOW, TskData.FileKnown.BAD));
DEFAULT_CATEGORY_DEFINITION.add(new TagNameDefinition(CATEGORY_FOUR_NAME, "", TagName.HTML_COLOR.PURPLE, TskData.FileKnown.UNKNOWN));
DEFAULT_CATEGORY_DEFINITION.add(new TagNameDefinition(CATEGORY_FIVE_NAME, "", TagName.HTML_COLOR.FUCHSIA, TskData.FileKnown.UNKNOWN));
// NOTE: The colors here are what will be shown in the border
PROJECT_VIC_US_CATEGORIES.add(new TagNameDefinition(PV_US_CAT0, "", TagName.HTML_COLOR.GREEN, TskData.FileKnown.UNKNOWN));
PROJECT_VIC_US_CATEGORIES.add(new TagNameDefinition(PV_US_CAT1, "", TagName.HTML_COLOR.RED, TskData.FileKnown.BAD));
PROJECT_VIC_US_CATEGORIES.add(new TagNameDefinition(PV_US_CAT2, "", TagName.HTML_COLOR.YELLOW, TskData.FileKnown.BAD));
PROJECT_VIC_US_CATEGORIES.add(new TagNameDefinition(PV_US_CAT3, "", TagName.HTML_COLOR.FUCHSIA, TskData.FileKnown.BAD));
PROJECT_VIC_US_CATEGORIES.add(new TagNameDefinition(PV_US_CAT4, "", TagName.HTML_COLOR.BLUE, TskData.FileKnown.UNKNOWN));
}
@Override
@ -91,17 +103,17 @@ public class ImageGalleryService implements AutopsyService {
// Check to see if the Project VIC tag set exists, if not create a
// tag set using the default tags.
boolean addDefaultTagSet = true;
boolean addProjVicTagSet = true;
List<TagSet> tagSets = context.getCase().getServices().getTagsManager().getAllTagSets();
for (TagSet set : tagSets) {
if (set.getName().equals(ImageGalleryController.getCategoryTagSetName())) {
addDefaultTagSet = false;
if (set.getName().equals(PROJECT_VIC_TAG_SET_NAME)) {
addProjVicTagSet = false;
break;
}
}
if (addDefaultTagSet) {
addDefaultTagSet(context.getCase());
if (addProjVicTagSet) {
addProjetVicTagSet(context.getCase());
}
ImageGalleryController.createController(context.getCase());
@ -134,13 +146,11 @@ public class ImageGalleryService implements AutopsyService {
*
* @throws TskCoreException
*/
private void addDefaultTagSet(Case currentCase) throws TskCoreException {
private void addProjetVicTagSet(Case currentCase) throws TskCoreException {
List<TagName> tagNames = new ArrayList<>();
for (TagNameDefinition def : DEFAULT_CATEGORY_DEFINITION) {
for (TagNameDefinition def : PROJECT_VIC_US_CATEGORIES) {
tagNames.add(currentCase.getSleuthkitCase().addOrUpdateTagName(def.getDisplayName(), def.getDescription(), def.getColor(), def.getKnownStatus()));
}
currentCase.getServices().getTagsManager().addTagSet(ImageGalleryController.getCategoryTagSetName(), tagNames);
currentCase.getServices().getTagsManager().addTagSet(PROJECT_VIC_TAG_SET_NAME, tagNames);
}
}

View File

@ -157,7 +157,10 @@ public final class OpenAction extends CallableSystemAction {
}
Platform.runLater(() -> {
ImageGalleryController controller;
// @@@ This call gets a lock. We shouldn't do this in the UI....
controller = ImageGalleryController.getController(currentCase);
// Display an error if we could not get the controller and return
if (controller == null) {
Alert errorDIalog = new Alert(Alert.AlertType.ERROR);
errorDIalog.initModality(Modality.APPLICATION_MODAL);
@ -174,6 +177,7 @@ public final class OpenAction extends CallableSystemAction {
return;
}
// Make sure the user is aware of Single vs Multi-user behaviors
if (currentCase.getCaseType() == Case.CaseType.MULTI_USER_CASE
&& ImageGalleryPreferences.isMultiUserCaseInfoDialogDisabled() == false) {
Alert dialog = new Alert(Alert.AlertType.INFORMATION);

View File

@ -99,9 +99,9 @@ public class CategoryManager {
}
/**
* get the number of file with the given {@link DhsImageCategory}
* get the number of file with the given tag
*
* @param cat get the number of files with Category = cat
* @param tagName get the number of files with Category = tagName
*
* @return the number of files with the given Category
*/
@ -110,20 +110,18 @@ public class CategoryManager {
}
/**
* increment the cached value for the number of files with the given
* {@link DhsImageCategory}
* increment the cached value for the number of files with the given tag
*
* @param cat the Category to increment
* @param tagName the Category to increment
*/
synchronized public void incrementCategoryCount(TagName tagName) {
categoryCounts.getUnchecked(tagName).increment();
}
/**
* decrement the cached value for the number of files with the given
* DhsImageCategory
* decrement the cached value for the number of files with the given tag
*
* @param cat the Category to decrement
* @param tagName the Category to decrement
*/
synchronized public void decrementCategoryCount(TagName tagName) {
categoryCounts.getUnchecked(tagName).decrement();

View File

@ -207,19 +207,19 @@ public final class DrawableDB {
*/
UNKNOWN,
/**
* Analyis (an ingest job or image gallery database rebuild) for the
* Analysis (an ingest job or image gallery database rebuild) for the
* data source is in progress.
*/
IN_PROGRESS,
/**
* Analyis (an ingest job or image gallery database rebuild) for the
* Analysis (an ingest job or image gallery database rebuild) for the
* data source has been completed and at least one file in the data
* source has a MIME type (ingest filters may have been applied, so some
* files may not have been typed).
*/
COMPLETE,
/**
* Analyis (an ingest job or image gallery database rebuild) for the
* Analysis (an ingest job or image gallery database rebuild) for the
* data source has been completed, but the files for the data source
* were not assigned a MIME type (file typing was not enabled).
*/

View File

@ -187,7 +187,7 @@ Server.query.exception.msg=Error running query: {0}
Server.query2.exception.msg=Error running query: {0}
Server.queryTerms.exception.msg=Error running terms query: {0}
Server.connect.exception.msg=Failed to connect to Solr server: {0}
Server.openCore.exception.msg=Keyword search service not yet running
Server.openCore.exception.msg=Local keyword search service not yet running
Server.openCore.exception.cantOpen.msg=Could not create or open index
Server.openCore.exception.noIndexDir.msg=Index directory could not be created or is missing
Server.request.exception.exception.msg=Could not issue Solr request

View File

@ -18,6 +18,9 @@
*/
package org.sleuthkit.autopsy.keywordsearch;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.logging.Level;
import org.apache.solr.client.solrj.SolrServerException;
import org.openide.modules.ModuleInstall;
@ -39,6 +42,7 @@ class Installer extends ModuleInstall {
private static final Logger logger = Logger.getLogger(Installer.class.getName());
private static final long serialVersionUID = 1L;
private static final String KWS_START_THREAD_NAME = "KWS-server-start-%d";
@Override
public void restored() {
@ -46,6 +50,10 @@ class Installer extends ModuleInstall {
KeywordSearchSettings.setDefaults();
final Server server = KeywordSearch.getServer();
ExecutorService jobProcessingExecutor = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setNameFormat(KWS_START_THREAD_NAME).build());
Runnable kwsStartTask = new Runnable() {
public void run() {
try {
server.start();
} catch (SolrServerNoPortException ex) {
@ -60,6 +68,12 @@ class Installer extends ModuleInstall {
reportInitError(ex.getMessage());
}
}
};
// start KWS service on the background thread. Currently all it does is start the embedded Solr server.
jobProcessingExecutor.submit(kwsStartTask);
jobProcessingExecutor.shutdown(); // tell executor no more work is coming
}
@Override
public boolean closing() {

View File

@ -31,6 +31,7 @@ import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.net.ConnectException;
import java.net.DatagramSocket;
import java.net.ServerSocket;
import java.net.SocketException;
import java.nio.charset.Charset;
@ -239,6 +240,8 @@ public class Server {
private static final String CORE_PROPERTIES = "core.properties";
private static final boolean DEBUG = false;//(Version.getBuildType() == Version.Type.DEVELOPMENT);
private static final int NUM_COLLECTION_CREATION_RETRIES = 5;
private static final int NUM_EMBEDDED_SERVER_RETRIES = 12; // attempt to connect to embedded Solr server for 1 minute
private static final int EMBEDDED_SERVER_RETRY_WAIT_SEC = 5;
public enum CORE_EVT_STATES {
@ -270,6 +273,8 @@ public class Server {
Server() {
initSettings();
localSolrServer = getSolrClient("http://localhost:" + localSolrServerPort + "/solr");
serverAction = new ServerAction();
File solr8Folder = InstalledFileLocator.getDefault().locate("solr", Server.class.getPackage().getName(), false); //NON-NLS
File solr4Folder = InstalledFileLocator.getDefault().locate("solr4", Server.class.getPackage().getName(), false); //NON-NLS
@ -664,11 +669,13 @@ public class Server {
*/
@NbBundle.Messages({
"Server.status.failed.msg=Local Solr server did not respond to status request. This may be because the server failed to start or is taking too long to initialize.",})
void startLocalSolr(SOLR_VERSION version) throws KeywordSearchModuleException, SolrServerNoPortException, SolrServerException {
synchronized void startLocalSolr(SOLR_VERSION version) throws KeywordSearchModuleException, SolrServerNoPortException, SolrServerException {
logger.log(Level.INFO, "Starting local Solr " + version + " server"); //NON-NLS
if (isLocalSolrRunning()) {
if (localServerVersion.equals(version)) {
// this version of local server is already running
logger.log(Level.INFO, "Local Solr " + version + " server is already running"); //NON-NLS
return;
} else {
// wrong version of local server is running, stop it
@ -723,9 +730,8 @@ public class Server {
}
// Wait for the Solr server to start and respond to a statusRequest request.
for (int numRetries = 0; numRetries < 6; numRetries++) {
for (int numRetries = 0; numRetries < NUM_EMBEDDED_SERVER_RETRIES; numRetries++) {
if (isLocalSolrRunning()) {
localSolrServer = getSolrClient("http://localhost:" + localSolrServerPort + "/solr");
final List<Long> pids = this.getSolrPIDs();
logger.log(Level.INFO, "New Solr process PID: {0}", pids); //NON-NLS
return;
@ -734,7 +740,7 @@ public class Server {
// Local Solr server did not respond so we sleep for
// 5 seconds before trying again.
try {
TimeUnit.SECONDS.sleep(5);
TimeUnit.SECONDS.sleep(EMBEDDED_SERVER_RETRY_WAIT_SEC);
} catch (InterruptedException ex) {
logger.log(Level.WARNING, "Timer interrupted"); //NON-NLS
}
@ -767,6 +773,23 @@ public class Server {
* @param port the port to check for availability
*/
static boolean isPortAvailable(int port) {
final String osName = PlatformUtil.getOSName().toLowerCase();
if (osName != null && osName.toLowerCase().startsWith("mac")) {
return isPortAvailableOSX(port);
} else {
return isPortAvailableDefault(port);
}
}
/**
* Checks to see if a specific port is available.
*
* NOTE: This is used on non-OS X systems as of right now but could be
* replaced with the OS X version.
*
* @param port the port to check for availability
*/
static boolean isPortAvailableDefault(int port) {
ServerSocket ss = null;
try {
@ -792,6 +815,48 @@ public class Server {
return false;
}
/**
* Checks to see if a specific port is available.
*
* NOTE: This is only used on OSX for now, but could replace default
* implementation in the future.
*
* @param port The port to check for availability.
* @throws IllegalArgumentException If port is outside range of possible ports.
*/
static boolean isPortAvailableOSX(int port) {
// implementation taken from https://stackoverflow.com/a/435579
if (port < 1 || port > 65535) {
throw new IllegalArgumentException("Invalid start port: " + port);
}
ServerSocket ss = null;
DatagramSocket ds = null;
try {
ss = new ServerSocket(port);
ss.setReuseAddress(true);
ds = new DatagramSocket(port);
ds.setReuseAddress(true);
return true;
} catch (IOException e) {
} finally {
if (ds != null) {
ds.close();
}
if (ss != null) {
try {
ss.close();
} catch (IOException e) {
/* should not be thrown */
}
}
}
return false;
}
/**
* Changes the current solr server port. Only call this after available.
*
@ -1875,13 +1940,22 @@ public class Server {
* @throws IOException
*/
private void connectToEmbeddedSolrServer() throws SolrServerException, IOException {
HttpSolrClient solrServer = getSolrClient("http://localhost:" + localSolrServerPort + "/solr");
TimingMetric metric = HealthMonitor.getTimingMetric("Solr: Connectivity check");
CoreAdminRequest.getStatus(null, solrServer);
CoreAdminRequest.getStatus(null, localSolrServer);
HealthMonitor.submitTimingMetric(metric);
}
/**
* Attempts to connect to the given Solr server, which is running in
* SoulrCloud mode. This API does not work for the local Solr which is NOT
* running in SolrCloud mode.
*
* @param host Host name of the remote Solr server
* @param port Port of the remote Solr server
*
* @throws SolrServerException
* @throws IOException
*/
void connectToSolrServer(String host, String port) throws SolrServerException, IOException {
try (HttpSolrClient solrServer = getSolrClient("http://" + host + ":" + port + "/solr")) {
connectToSolrServer(solrServer);
@ -1947,46 +2021,6 @@ public class Server {
}
}
/* ELTODO leaving this for reference, will delete later
private boolean clusterStatusWithCollection(String collectionName) throws IOException, SolrServerException {
ModifiableSolrParams params = new ModifiableSolrParams();
params.set("action", CollectionParams.CollectionAction.CLUSTERSTATUS.toString());
params.set("collection", collectionName);
SolrRequest request = new QueryRequest(params);
request.setPath("/admin/collections");
NamedList<Object> statusResponse;
try {
statusResponse = currentSolrServer.request(request);
} catch (RemoteSolrException ex) {
// collection doesn't exist
return false;
}
if (statusResponse == null) {
logger.log(Level.SEVERE, "Collections response should not be null"); //NON-NLS
return false;
}
NamedList<Object> cluster = (NamedList<Object>) statusResponse.get("cluster");
if (cluster == null) {
logger.log(Level.SEVERE, "Cluster should not be null"); //NON-NLS
return false;
}
NamedList<Object> collections = (NamedList<Object>) cluster.get("collections");
if (cluster == null) {
logger.log(Level.SEVERE, "Collections should not be null in cluster state"); //NON-NLS
return false;
}
if (collections.size() == 0) {
logger.log(Level.SEVERE, "Collections should not be empty in cluster state"); //NON-NLS
return false;
}
Object collection = collections.get(collectionName);
return (collection != null);
}*/
class Collection {
// handle to the collection in Solr

View File

@ -401,8 +401,10 @@ public class SolrSearchService implements KeywordSearchService, AutopsyService {
throw new AutopsyServiceException(String.format("Failed to close core for %s", context.getCase().getCaseDirectory()), ex);
}
if (context.getCase().getSleuthkitCase() != null) {
context.getCase().getSleuthkitCase().unregisterForEvents(this);
}
}
/**
* Event handler for ArtifactsPostedEvents from SleuthkitCase.

View File

@ -2,7 +2,7 @@
*
* Autopsy Forensic Browser
*
* Copyright 2012-2020 Basis Technology Corp.
* Copyright 2012-2021 Basis Technology Corp.
*
* Copyright 2012 42six Solutions.
*
@ -487,7 +487,7 @@ class Chromium extends Extract {
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL,
RecentActivityExtracterModuleFactory.getModuleName(),
((result.get("host_key").toString() != null) ? result.get("host_key").toString() : ""))); //NON-NLS
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME,
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED,
RecentActivityExtracterModuleFactory.getModuleName(),
(Long.valueOf(result.get("last_access_utc").toString()) / 1000000) - Long.valueOf("11644473600"))); //NON-NLS

View File

@ -2,7 +2,7 @@
*
* Autopsy Forensic Browser
*
* Copyright 2012-2019 Basis Technology Corp.
* Copyright 2012-2021 Basis Technology Corp.
*
* Copyright 2012 42six Solutions.
* Contact: aebadirad <at> 42six <dot> com
@ -325,18 +325,28 @@ abstract class Extract {
* @return List of BlackboarAttributes for the passed in attributes
*/
protected Collection<BlackboardAttribute> createCookieAttributes(String url,
Long creationTime, String name, String value, String programName, String domain) {
Long creationTime, Long accessTime, Long endTime, String name, String value, String programName, String domain) {
Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL,
RecentActivityExtracterModuleFactory.getModuleName(),
(url != null) ? url : "")); //NON-NLS
if (creationTime != null) {
bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME,
if (creationTime != null && creationTime != 0) {
bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_CREATED,
RecentActivityExtracterModuleFactory.getModuleName(), creationTime));
}
if (accessTime != null && accessTime != 0) {
bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED,
RecentActivityExtracterModuleFactory.getModuleName(), accessTime));
}
if(endTime != null && endTime != 0) {
bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_END,
RecentActivityExtracterModuleFactory.getModuleName(), endTime));
}
bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME,
RecentActivityExtracterModuleFactory.getModuleName(),
(name != null) ? name : "")); //NON-NLS

View File

@ -2,7 +2,7 @@
*
* Autopsy Forensic Browser
*
* Copyright 2019-2020 Basis Technology Corp.
* Copyright 2019-2021 Basis Technology Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -659,7 +659,7 @@ final class ExtractEdge extends Extract {
String url = flipDomain(domain);
BlackboardArtifact bbart = origFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE);
bbart.addAttributes(createCookieAttributes(url, ftime, name, value, this.getName(), NetworkUtils.extractDomain(url)));
bbart.addAttributes(createCookieAttributes(url, null, ftime, null, name, value, this.getName(), NetworkUtils.extractDomain(url)));
return bbart;
}

View File

@ -267,7 +267,7 @@ class ExtractIE extends Extract {
Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL,
RecentActivityExtracterModuleFactory.getModuleName(), url));
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME,
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED,
RecentActivityExtracterModuleFactory.getModuleName(), datetime));
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME,
RecentActivityExtracterModuleFactory.getModuleName(), (name != null) ? name : ""));

View File

@ -2,7 +2,7 @@
*
* Autopsy Forensic Browser
*
* Copyright 2019 Basis Technology Corp.
* Copyright 2019-2021 Basis Technology Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -566,7 +566,7 @@ final class ExtractSafari extends Extract {
Cookie cookie = iter.next();
BlackboardArtifact bbart = origFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE);
bbart.addAttributes(createCookieAttributes(cookie.getURL(), cookie.getCreationDate(), cookie.getName(), cookie.getValue(), this.getName(), NetworkUtils.extractDomain(cookie.getURL())));
bbart.addAttributes(createCookieAttributes(cookie.getURL(), cookie.getCreationDate(), null, cookie.getExpirationDate(), cookie.getName(), cookie.getValue(), this.getName(), NetworkUtils.extractDomain(cookie.getURL())));
bbartifacts.add(bbart);
}
}

View File

@ -2,7 +2,7 @@
*
* Autopsy Forensic Browser
*
* Copyright 2012-2020 Basis Technology Corp.
* Copyright 2012-2021 Basis Technology Corp.
*
* Copyright 2012 42six Solutions.
* Contact: aebadirad <at> 42six <dot> com
@ -423,7 +423,7 @@ class Firefox extends Extract {
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL,
RecentActivityExtracterModuleFactory.getModuleName(),
((host != null) ? host : ""))); //NON-NLS
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME,
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED,
RecentActivityExtracterModuleFactory.getModuleName(),
(Long.valueOf(result.get("lastAccessed").toString())))); //NON-NLS
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME,

View File

@ -16,7 +16,25 @@ If you are experiencing an error, we encourage you to post on the forum (https:/
<li>If there were any errors in the \ref troubleshooting_logs "logs"
</ul>
\section troubleshooting_user_folder Deleting the Autopsy User Folder
\section troubleshooting_specific_issues Specific Issues
\subsection troubleshooting_fond_size Font Size Too Small in Windows
Make the following changes if the application is hard to navigate in High DPI systems:
<ol>
<li>Right-click on the application icon on your Desktop, Start Menu, etc.
<li>Choose Properties.
<li>Go to Compatibility tab.
<li>Click "Change high DPI settings" button.
<li>Select "Override high DPI scaling behavior".
<li>Change the "Scaling performed by:" drop down box to "System".
<li>Restart Autopsy.
</ol>
\section troubleshooting_general General Troubleshooting
\subsection troubleshooting_user_folder Deleting the Autopsy User Folder
If Autopsy starts behaving strangely, stops loading entirely, or menu items go missing, you probably need to delete your user folder. Doing so essenitally gives you a fresh installation. On Windows the user folder is located in "C:\Users\(user name)\AppData\Roaming\autopsy".
@ -30,7 +48,7 @@ Note that if you delete this folder you will lose all your Autopsy settings incl
Alternately, you could copy the fresh user folder somewhere, move your old version back, and replace folders until it works again.
\section troubleshooting_logs Viewing the Logs
\subsection troubleshooting_logs Viewing the Logs
The logs are generally the most helpful in figuring out why an error is occurring. There are two sets of logs - the system logs and the case logs. There is an option in the UI to open the log folder:
@ -70,7 +88,7 @@ Caused by: java.sql.SQLException: ResultSet closed
If the error message doesn't help you solve the problem yourself, please post to the <a href="https://sleuthkit.discourse.group/">forum</a> including the full stack trace (if available).
\section troubleshooting_stack Creating a Thread Dump
\subsection troubleshooting_stack Creating a Thread Dump
You can also generate a thread dump of the current state. This is useful if an ingest module or other process seems to be stuck. To generate a thread dump, go to "Help" then "Thread Dump" in the UI.

View File

@ -0,0 +1,178 @@
# Sample module in the public domain. Feel free to use this as a template
# for your modules (and you can remove this header and take complete credit
# and liability)
#
# Contact: Brian Carrier [carrier <at> sleuthkit [dot] org]
#
# This is free and unencumbered software released into the public domain.
#
# Anyone is free to copy, modify, publish, use, compile, sell, or
# distribute this software, either in source code form or as a compiled
# binary, for any purpose, commercial or non-commercial, and by any
# means.
#
# In jurisdictions that recognize copyright laws, the author or authors
# of this software dedicate any and all copyright interest in the
# software to the public domain. We make this dedication for the benefit
# of the public at large and to the detriment of our heirs and
# successors. We intend this dedication to be an overt act of
# relinquishment in perpetuity of all present and future rights to this
# software under copyright law.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
# Simple data source-level ingest module for Autopsy.
# Used as part of Python tutorials from Basis Technology - August 2015
#
# Looks for files of a given name, opens then in SQLite, queries the DB,
# and makes artifacts
import jarray
import inspect
from java.lang import Class
from java.lang import System
from java.util.logging import Level
from java.util import ArrayList
from java.io import File
from org.sleuthkit.datamodel import SleuthkitCase
from org.sleuthkit.datamodel import AbstractFile
from org.sleuthkit.autopsy.ingest import IngestModule
from org.sleuthkit.autopsy.ingest.IngestModule import IngestModuleException
from org.sleuthkit.autopsy.ingest import DataSourceIngestModule
from org.sleuthkit.autopsy.ingest import IngestModuleFactoryAdapter
from org.sleuthkit.autopsy.ingest import IngestMessage
from org.sleuthkit.autopsy.ingest import IngestServices
from org.sleuthkit.autopsy.ingest import ModuleDataEvent
from org.sleuthkit.autopsy.coreutils import Logger
from org.sleuthkit.autopsy.casemodule import Case
from org.sleuthkit.datamodel import TskCoreException
from org.sleuthkit.datamodel.Blackboard import BlackboardException
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
from org.sleuthkit.datamodel import Account
from org.sleuthkit.datamodel.blackboardutils import CommunicationArtifactsHelper
from java.sql import ResultSet
from java.sql import SQLException
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
# Factory that defines the name and details of the module and allows Autopsy
# to create instances of the modules that will do the analysis.
class ContactsDbIngestModuleFactory(IngestModuleFactoryAdapter):
# TODO - Replace with your modules name
moduleName = "Contacts Db Analyzer"
def getModuleDisplayName(self):
return self.moduleName
def getModuleDescription(self):
return "Sample module that parses contacts.db"
def getModuleVersionNumber(self):
return "1.0"
def isDataSourceIngestModuleFactory(self):
return True
def createDataSourceIngestModule(self, ingestOptions):
return ContactsDbIngestModule()
# Data Source-level ingest module. One gets created per data source.
class ContactsDbIngestModule(DataSourceIngestModule):
_logger = Logger.getLogger(ContactsDbIngestModuleFactory.moduleName)
def log(self, level, msg):
self._logger.logp(level, self.__class__.__name__, inspect.stack()[1][3], msg)
def __init__(self):
self.context = None
# Where any setup and configuration is done
# 'context' is an instance of org.sleuthkit.autopsy.ingest.IngestJobContext.
# See: http://sleuthkit.org/autopsy/docs/api-docs/latest/classorg_1_1sleuthkit_1_1autopsy_1_1ingest_1_1_ingest_job_context.html
def startUp(self, context):
self.context = context
# Where the analysis is done.
# The 'data_source' object being passed in is of type org.sleuthkit.datamodel.Content.
# See: http://www.sleuthkit.org/sleuthkit/docs/jni-docs/latest/interfaceorg_1_1sleuthkit_1_1datamodel_1_1_content.html
# 'progress_bar' is of type org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress
# See: http://sleuthkit.org/autopsy/docs/api-docs/latest/classorg_1_1sleuthkit_1_1autopsy_1_1ingest_1_1_data_source_ingest_module_progress.html
def process(self, data_source, progress_bar):
# we don't know how much work there is yet
progress_bar.switchToIndeterminate()
# Find files named contacts.db anywhere in the data source.
# TODO - replace with your database name and parent path.
app_databases = AppSQLiteDB.findAppDatabases(data_source, "contacts.db", True, "")
num_databases = len(app_databases)
progress_bar.switchToDeterminate(num_databases)
databases_processed = 0
try:
# Iterate through all the database files returned
for app_database in app_databases:
# Check if the user pressed cancel while we were busy
if self.context.isJobCancelled():
return IngestModule.ProcessResult.OK
self.log(Level.INFO, "Processing file: " + app_database.getDBFile().getName())
# Query the contacts table in the database and get all columns.
try:
# TODO - replace with your query
result_set = app_database.runQuery("SELECT * FROM contacts")
except SQLException as e:
self.log(Level.INFO, "Error querying database for contacts table (" + e.getMessage() + ")")
return IngestModule.ProcessResult.OK
try:
#Get the current case for the CommunicationArtifactsHelper.
current_case = Case.getCurrentCaseThrows()
except NoCurrentCaseException as ex:
self.log(Level.INFO, "Case is closed (" + ex.getMessage() + ")")
return IngestModule.ProcessResult.OK
# Create an instance of the helper class
# TODO - Replace with your parser name and Account.Type
helper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
ContactsDbIngestModuleFactory.moduleName, app_database.getDBFile(), Account.Type.DEVICE)
# Iterate through each row and create artifacts
while result_set.next():
try:
# TODO - Replace these calls with your column names and types
# Ex of other types: result_set.getInt("contact_type") or result_set.getLong("datetime")
name = result_set.getString("name")
email = result_set.getString("email")
phone = result_set.getString("phone")
except SQLException as e:
self.log(Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")")
helper.addContact(name, phone, "", "", email)
app_database.close()
databases_processed += 1
progress_bar.progress(databases_processed)
except TskCoreException as e:
self.log(Level.INFO, "Error inserting or reading from the Sleuthkit case (" + e.getMessage() + ")")
except BlackboardException as e:
self.log(Level.INFO, "Error posting artifact to the Blackboard (" + e.getMessage() + ")")
# After all databases, post a message to the ingest messages in box.
# TODO - update your module name here
message = IngestMessage.createMessage(IngestMessage.MessageType.DATA,
ContactsDbIngestModuleFactory.moduleName, "Found %d files" % num_databases)
IngestServices.getInstance().postMessage(message)
return IngestModule.ProcessResult.OK

View File

@ -0,0 +1,284 @@
# Sample module in the public domain. Feel free to use this as a template
# for your modules (and you can remove this header and take complete credit
# and liability)
#
# Contact: Brian Carrier [carrier <at> sleuthkit [dot] org]
#
# This is free and unencumbered software released into the public domain.
#
# Anyone is free to copy, modify, publish, use, compile, sell, or
# distribute this software, either in source code form or as a compiled
# binary, for any purpose, commercial or non-commercial, and by any
# means.
#
# In jurisdictions that recognize copyright laws, the author or authors
# of this software dedicate any and all copyright interest in the
# software to the public domain. We make this dedication for the benefit
# of the public at large and to the detriment of our heirs and
# successors. We intend this dedication to be an overt act of
# relinquishment in perpetuity of all present and future rights to this
# software under copyright law.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
# Simple data source-level ingest module for Autopsy.
# Search for TODO for the things that you need to change
# See http://sleuthkit.org/autopsy/docs/api-docs/4.6.0/index.html for documentation
import inspect
import os
import shutil
import ntpath
from com.williballenthin.rejistry import RegistryHiveFile
from com.williballenthin.rejistry import RegistryKey
from com.williballenthin.rejistry import RegistryParseException
from com.williballenthin.rejistry import RegistryValue
from java.io import File
from java.lang import Class
from java.lang import System
from java.sql import DriverManager, SQLException
from java.util.logging import Level
from java.util import ArrayList
from org.sleuthkit.datamodel import SleuthkitCase
from org.sleuthkit.datamodel import AbstractFile
from org.sleuthkit.datamodel import ReadContentInputStream
from org.sleuthkit.datamodel import BlackboardArtifact
from org.sleuthkit.datamodel import BlackboardAttribute
from org.sleuthkit.datamodel import TskData
from org.sleuthkit.autopsy.ingest import IngestModule
from org.sleuthkit.autopsy.ingest.IngestModule import IngestModuleException
from org.sleuthkit.autopsy.ingest import DataSourceIngestModule
from org.sleuthkit.autopsy.ingest import IngestModuleFactoryAdapter
from org.sleuthkit.autopsy.ingest import IngestModuleIngestJobSettings
from org.sleuthkit.autopsy.ingest import IngestModuleIngestJobSettingsPanel
from org.sleuthkit.autopsy.ingest import IngestMessage
from org.sleuthkit.autopsy.ingest import IngestServices
from org.sleuthkit.autopsy.ingest import ModuleDataEvent
from org.sleuthkit.autopsy.coreutils import Logger
from org.sleuthkit.autopsy.coreutils import PlatformUtil
from org.sleuthkit.autopsy.casemodule import Case
from org.sleuthkit.autopsy.casemodule.services import Services
from org.sleuthkit.autopsy.casemodule.services import FileManager
from org.sleuthkit.autopsy.datamodel import ContentUtils
from org.sleuthkit.autopsy.modules.interestingitems import FilesSetsManager
# Factory that defines the name and details of the module and allows Autopsy
# to create instances of the modules that will do the analysis.
class RegistryExampleIngestModuleFactory(IngestModuleFactoryAdapter):
def __init__(self):
self.settings = None
moduleName = "Registy Example Module"
def getModuleDisplayName(self):
return self.moduleName
def getModuleDescription(self):
return "Extract Run Keys To Look For Interesting Items"
def getModuleVersionNumber(self):
return "1.0"
def hasIngestJobSettingsPanel(self):
return False
def isDataSourceIngestModuleFactory(self):
return True
def createDataSourceIngestModule(self, ingestOptions):
return RegistryExampleIngestModule(self.settings)
# Data Source-level ingest module. One gets created per data source.
class RegistryExampleIngestModule(DataSourceIngestModule):
_logger = Logger.getLogger(RegistryExampleIngestModuleFactory.moduleName)
def log(self, level, msg):
self._logger.logp(level, self.__class__.__name__, inspect.stack()[1][3], msg)
def __init__(self, settings):
self.context = None
# Where any setup and configuration is done
def startUp(self, context):
self.context = context
# Hive Keys to parse, use / as it is easier to parse out then \\
self.registryNTUserRunKeys = ('Software/Microsoft/Windows/CurrentVersion/Run', 'Software/Microsoft/Windows/CurrentVersion/RunOnce')
self.registrySoftwareRunKeys = ('Microsoft/Windows/CurrentVersion/Run', 'Microsoft/Windows/CurrentVersion/RunOnce')
self.registryKeysFound = []
# Where the analysis is done.
def process(self, dataSource, progressBar):
# we don't know how much work there is yet
progressBar.switchToIndeterminate()
# Hive files to extract
filesToExtract = ("NTUSER.DAT", "SOFTWARE")
# Create ExampleRegistry directory in temp directory, if it exists then continue on processing
tempDir = os.path.join(Case.getCurrentCase().getTempDirectory(), "RegistryExample")
self.log(Level.INFO, "create Directory " + tempDir)
try:
os.mkdir(tempDir)
except:
self.log(Level.INFO, "ExampleRegistry Directory already exists " + tempDir)
# Set the database to be read to the once created by the prefetch parser program
skCase = Case.getCurrentCase().getSleuthkitCase();
fileManager = Case.getCurrentCase().getServices().getFileManager()
# Look for files to process
for fileName in filesToExtract:
files = fileManager.findFiles(dataSource, fileName)
numFiles = len(files)
for file in files:
# Check if the user pressed cancel while we were busy
if self.context.isJobCancelled():
return IngestModule.ProcessResult.OK
# Check path to only get the hive files in the config directory and no others
if ((file.getName() == 'SOFTWARE') and (file.getParentPath().upper() == '/WINDOWS/SYSTEM32/CONFIG/') and (file.getSize() > 0)):
# Save the file locally in the temp folder.
self.writeHiveFile(file, file.getName(), tempDir)
# Process this file looking thru the run keys
self.processSoftwareHive(os.path.join(tempDir, file.getName()), file)
elif ((file.getName() == 'NTUSER.DAT') and ('/USERS' in file.getParentPath().upper()) and (file.getSize() > 0)):
# Found a NTUSER.DAT file to process only want files in User directories
# Filename may not be unique so add file id to the name
fileName = str(file.getId()) + "-" + file.getName()
# Save the file locally in the temp folder.
self.writeHiveFile(file, fileName, tempDir)
# Process this file looking thru the run keys
self.processNTUserHive(os.path.join(tempDir, fileName), file)
# Setup Artifact and Attributes
try:
artID = skCase.addArtifactType( "TSK_REGISTRY_RUN_KEYS", "Registry Run Keys")
except:
self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ")
artId = skCase.getArtifactTypeID("TSK_REGISTRY_RUN_KEYS")
try:
attributeIdRunKeyName = skCase.addArtifactAttributeType("TSK_REG_RUN_KEY_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Run Key Name")
except:
self.log(Level.INFO, "Attributes Creation Error, TSK_REG_RUN_KEY_NAME, May already exist. ")
try:
attributeIdRunKeyValue = skCase.addArtifactAttributeType("TSK_REG_RUN_KEY_VALUE", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Run Key Value")
except:
self.log(Level.INFO, "Attributes Creation Error, TSK_REG_RUN_KEY_VALUE, May already exist. ")
try:
attributeIdRegKeyLoc = skCase.addArtifactAttributeType("TSK_REG_KEY_LOCATION", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Registry Key Location")
except:
self.log(Level.INFO, "Attributes Creation Error, TSK_REG_KEY_LOCATION, May already exist. ")
attributeIdRunKeyName = skCase.getAttributeType("TSK_REG_RUN_KEY_NAME")
attributeIdRunKeyValue = skCase.getAttributeType("TSK_REG_RUN_KEY_VALUE")
attributeIdRegKeyLoc = skCase.getAttributeType("TSK_REG_KEY_LOCATION")
moduleName = RegistryExampleIngestModuleFactory.moduleName
# RefistryKeysFound is a list that contains a list with the following records abstractFile, Registry Key Location, Key Name, Key value
for registryKey in self.registryKeysFound:
attributes = ArrayList()
art = registryKey[0].newArtifact(artId)
attributes.add(BlackboardAttribute(attributeIdRegKeyLoc, moduleName, registryKey[1]))
attributes.add(BlackboardAttribute(attributeIdRunKeyName, moduleName, registryKey[2]))
attributes.add(BlackboardAttribute(attributeIdRunKeyValue, moduleName, registryKey[3]))
art.addAttributes(attributes)
# index the artifact for keyword search
try:
blackboard.indexArtifact(art)
except:
self._logger.log(Level.WARNING, "Error indexing artifact " + art.getDisplayName())
#Clean up registryExample directory and files
try:
shutil.rmtree(tempDir)
except:
self.log(Level.INFO, "removal of directory tree failed " + tempDir)
# After all databases, post a message to the ingest messages in box.
message = IngestMessage.createMessage(IngestMessage.MessageType.DATA,
"RegistryExample", " RegistryExample Files Have Been Analyzed " )
IngestServices.getInstance().postMessage(message)
return IngestModule.ProcessResult.OK
def writeHiveFile(self, file, fileName, tempDir):
# Write the file to the temp directory.
filePath = os.path.join(tempDir, fileName)
ContentUtils.writeToFile(file, File(filePath))
def processSoftwareHive(self, softwareHive, abstractFile):
# Open the registry hive file
softwareRegFile = RegistryHiveFile(File(softwareHive))
for runKey in self.registrySoftwareRunKeys:
currentKey = self.findRegistryKey(softwareRegFile, runKey)
if len(currentKey.getValueList()) > 0:
skValues = currentKey.getValueList()
for skValue in skValues:
regKey = []
regKey.append(abstractFile)
regKey.append(runKey)
skName = skValue.getName()
skVal = skValue.getValue()
regKey.append(str(skName))
regKey.append(skVal.getAsString())
self.registryKeysFound.append(regKey)
def processNTUserHive(self, ntuserHive, abstractFile):
# Open the registry hive file
ntuserRegFile = RegistryHiveFile(File(ntuserHive))
for runKey in self.registryNTUserRunKeys:
currentKey = self.findRegistryKey(ntuserRegFile, runKey)
if len(currentKey.getValueList()) > 0:
skValues = currentKey.getValueList()
for skValue in skValues:
regKey = []
regKey.append(abstractFile)
regKey.append(runKey)
skName = skValue.getName()
skVal = skValue.getValue()
regKey.append(str(skName))
regKey.append(skVal.getAsString())
self.registryKeysFound.append(regKey)
def findRegistryKey(self, registryHiveFile, registryKey):
# Search for the registry key
rootKey = registryHiveFile.getRoot()
regKeyList = registryKey.split('/')
currentKey = rootKey
try:
for key in regKeyList:
currentKey = currentKey.getSubkey(key)
return currentKey
except:
# Key not found
return null

View File

@ -273,19 +273,24 @@ class TestRunner(object):
# Compare output with gold and display results
TestResultsDiffer.run_diff(test_data)
print("Html report passed: ", test_data.html_report_passed)
# NOTE: commented out html version items
# print("Html report passed: ", test_data.html_report_passed)
print("Errors diff passed: ", test_data.errors_diff_passed)
print("DB diff passed: ", test_data.db_diff_passed)
# run time test only for the specific jenkins test
if test_data.main_config.timing:
print("Run time test passed: ", test_data.run_time_passed)
test_data.overall_passed = (test_data.html_report_passed and
test_data.errors_diff_passed and test_data.db_diff_passed)
# NOTE: commented out html version items
#test_data.overall_passed = (test_data.html_report_passed and
#test_data.errors_diff_passed and test_data.db_diff_passed)
test_data.overall_passed = (test_data.errors_diff_passed and test_data.db_diff_passed)
# otherwise, do the usual
else:
test_data.overall_passed = (test_data.html_report_passed and
test_data.errors_diff_passed and test_data.db_diff_passed)
# NOTE: commented out html version items
#test_data.overall_passed = (test_data.html_report_passed and
#test_data.errors_diff_passed and test_data.db_diff_passed)
test_data.overall_passed = (test_data.errors_diff_passed and test_data.db_diff_passed)
Reports.generate_reports(test_data)
if(not test_data.overall_passed):
@ -1009,10 +1014,11 @@ class TestResultsDiffer(object):
test_data.errors_diff_passed = passed
# Compare html output
gold_report_path = test_data.get_html_report_path(DBType.GOLD)
output_report_path = test_data.get_html_report_path(DBType.OUTPUT)
passed = TestResultsDiffer._html_report_diff(test_data)
test_data.html_report_passed = passed
# NOTE: commented out html version items
# gold_report_path = test_data.get_html_report_path(DBType.GOLD)
# output_report_path = test_data.get_html_report_path(DBType.OUTPUT)
# passed = TestResultsDiffer._html_report_diff(test_data)
# test_data.html_report_passed = passed
# Compare time outputs
if test_data.main_config.timing:
@ -1070,51 +1076,52 @@ class TestResultsDiffer(object):
else:
return True
def _html_report_diff(test_data):
"""Compare the output and gold html reports. Diff util is used for this purpose.
Diff -r -N -x <non-textual files> --ignore-matching-lines <regex> <folder-location-1> <folder-location-2>
is executed.
Diff is recursively used to scan through the HTML report directories. Modify the <regex> to suit the needs.
Currently, the regex is set to match certain lines found on index.html and summary.html, and skip (read ignore)
them.
Diff returns 0 when there is no difference, 1 when there is difference, and 2 when there is trouble (trouble not
defined in the official documentation).
Args:
test_data TestData object which contains initialized report_paths.
Returns:
true, if the reports match, false otherwise.
"""
gold_report_path = test_data.get_html_report_path(DBType.GOLD)
output_report_path = test_data.get_html_report_path(DBType.OUTPUT)
try:
# Ensure gold is passed before output
(subprocess.check_output(["diff", '-r', '-N', '-x', '*.png', '-x', '*.ico', '--ignore-matching-lines',
'HTML Report Generated on \|Autopsy Report for case \|Case:\|Case Number:'
'\|Examiner:\|Unalloc_', gold_report_path, output_report_path]))
print_report("", "REPORT COMPARISON", "The test html reports matched the gold reports")
return True
except subprocess.CalledProcessError as e:
if e.returncode == 1:
Errors.print_error("Error Code: 1\nThe HTML reports did not match.")
diff_file = codecs.open(test_data.output_path + "\HTML-Report-Diff.txt", "wb", "utf_8")
diff_file.write(str(e.output.decode("utf-8")))
return False
if e.returncode == 2:
Errors.print_error("Error Code: 2\nTrouble executing the Diff Utility.")
diff_file = codecs.open(test_data.output_path + "\HTML-Report-Diff.txt", "wb", "utf_8")
diff_file.write(str(e.output.decode("utf-8")))
return False
except OSError as e:
Errors.print_error("Error: OSError while performing html report diff")
Errors.print_error(str(e) + "\n")
return False
except Exception as e:
Errors.print_error("Error: Unknown fatal error comparing reports.")
Errors.print_error(str(e) + "\n")
logging.critical(traceback.format_exc())
return False
# NOTE: commented out html version items
# def _html_report_diff(test_data):
# """Compare the output and gold html reports. Diff util is used for this purpose.
# Diff -r -N -x <non-textual files> --ignore-matching-lines <regex> <folder-location-1> <folder-location-2>
# is executed.
# Diff is recursively used to scan through the HTML report directories. Modify the <regex> to suit the needs.
# Currently, the regex is set to match certain lines found on index.html and summary.html, and skip (read ignore)
# them.
# Diff returns 0 when there is no difference, 1 when there is difference, and 2 when there is trouble (trouble not
# defined in the official documentation).
#
# Args:
# test_data TestData object which contains initialized report_paths.
#
# Returns:
# true, if the reports match, false otherwise.
# """
# gold_report_path = test_data.get_html_report_path(DBType.GOLD)
# output_report_path = test_data.get_html_report_path(DBType.OUTPUT)
# try:
# # Ensure gold is passed before output
# (subprocess.check_output(["diff", '-r', '-N', '-x', '*.png', '-x', '*.ico', '--ignore-matching-lines',
# 'HTML Report Generated on \|Autopsy Report for case \|Case:\|Case Number:'
# '\|Examiner:\|Unalloc_', gold_report_path, output_report_path]))
# print_report("", "REPORT COMPARISON", "The test html reports matched the gold reports")
# return True
# except subprocess.CalledProcessError as e:
# if e.returncode == 1:
# Errors.print_error("Error Code: 1\nThe HTML reports did not match.")
# diff_file = codecs.open(test_data.output_path + "\HTML-Report-Diff.txt", "wb", "utf_8")
# diff_file.write(str(e.output.decode("utf-8")))
# return False
# if e.returncode == 2:
# Errors.print_error("Error Code: 2\nTrouble executing the Diff Utility.")
# diff_file = codecs.open(test_data.output_path + "\HTML-Report-Diff.txt", "wb", "utf_8")
# diff_file.write(str(e.output.decode("utf-8")))
# return False
# except OSError as e:
# Errors.print_error("Error: OSError while performing html report diff")
# Errors.print_error(str(e) + "\n")
# return False
# except Exception as e:
# Errors.print_error("Error: Unknown fatal error comparing reports.")
# Errors.print_error(str(e) + "\n")
# logging.critical(traceback.format_exc())
# return False
def _run_time_diff(test_data, old_time_path):
""" Compare run times for this run, and the run previous.
@ -1371,7 +1378,8 @@ class Reports(object):
vars.append( str(len(search_log_set("autopsy", "Stopping ingest due to low disk space on disk", test_data))) )
vars.append( make_local_path("gold", test_data.image_name, DB_FILENAME) )
vars.append( make_local_path("gold", test_data.image_name, "standard.html") )
vars.append( str(test_data.html_report_passed) )
# NOTE: commented out html version items
# vars.append( str(test_data.html_report_passed) )
vars.append( test_data.ant_to_string() )
# Join it together with a ", "
output = "|".join(vars)

View File

@ -469,9 +469,9 @@ def normalize_db_entry(line, files_table, vs_parts_table, vs_info_table, fs_info
# Ignore TIFF size and hash if extracted from PDFs.
# See JIRA-6951 for more details.
# index -1 = last element in the list, which is extension
# index -3 = 3rd from the end, which is the parent path.
if fields_list[-1] == "'tif'" and fields_list[-3].endswith(".pdf/'"):
# index -3 = 3rd from the end, which is extension
# index -5 = 5th from the end, which is the parent path.
if fields_list[-3] == "'tif'" and fields_list[-5].endswith(".pdf/'"):
fields_list[15] = "'SIZE_IGNORED'"
fields_list[23] = "'MD5_IGNORED'"
fields_list[24] = "'SHA256_IGNORED'"

Binary file not shown.

View File

@ -19,7 +19,7 @@
<target name="get-thirdparty-jars" description="get third-party jar dependencies">
<mkdir dir="${ext.dir}"/>
<copy file="${thirdparty.dir}/java-libpst/java-libpst-1.0-SNAPSHOT.jar" todir="${ext.dir}" />
<copy file="${thirdparty.dir}/java-libpst/java-libpst-0.9.5-SNAPSHOT.jar" todir="${ext.dir}" />
<copy file="${thirdparty.dir}/apache-mime4j/apache-mime4j-core-0.8.0-SNAPSHOT.jar" todir="${ext.dir}" />
<copy file="${thirdparty.dir}/apache-mime4j/apache-mime4j-dom-0.8.0-SNAPSHOT.jar" todir="${ext.dir}" />
<copy file="${thirdparty.dir}/apache-mime4j/apache-mime4j-mbox-iterator-0.8.0-SNAPSHOT.jar" todir="${ext.dir}" />

View File

@ -7,14 +7,16 @@ file.reference.apache-mime4j-dom-0.8.0.jar=release/modules/ext/apache-mime4j-dom
file.reference.apache-mime4j-mbox-iterator-0.8.0.jar=release/modules/ext/apache-mime4j-mbox-iterator-0.8.0-SNAPSHOT.jar
file.reference.commons-validator-1.6.jar=release/modules/ext/commons-validator-1.6.jar
file.reference.guava-19.0.jar=release/modules/ext/guava-19.0.jar
file.reference.java-libpst-1.0-SNAPSHOT.jar=release/modules/ext/java-libpst-1.0-SNAPSHOT.jar
file.reference.ez-vcard-0.10.5.jar=release/modules/ext/ez-vcard-0.10.5.jar
file.reference.java-libpst-0.9.5-SNAPSHOT.jar=release/modules/ext/java-libpst-0.9.5-SNAPSHOT.jar
file.reference.vinnie-2.0.2.jar=release/modules/ext/vinnie-2.0.2.jar
javac.source=1.8
javac.compilerargs=-Xlint -Xlint:-serial
javadoc.reference.guava-19.0.jar=release/modules/ext/guava-19.0-javadoc.jar
javadoc.reference.java-libpst-0.9.5-SNAPSHOT.jar=release/modules/ext/java-libpst-0.9.5-SNAPSHOT.jar
license.file=../LICENSE-2.0.txt
nbm.homepage=http://www.sleuthkit.org/autopsy/
nbm.needs.restart=true
source.reference.guava-19.0.jar=release/modules/ext/guava-19.0-sources.jar
source.reference.java-libpst-0.9.5-SNAPSHOT.jar=release/modules/ext/java-libpst-0.9.5-SNAPSHOT.jar
spec.version.base=4.0

View File

@ -84,14 +84,14 @@
<runtime-relative-path>ext/apache-mime4j-core-0.8.0-SNAPSHOT.jar</runtime-relative-path>
<binary-origin>release/modules/ext/apache-mime4j-core-0.8.0-SNAPSHOT.jar</binary-origin>
</class-path-extension>
<class-path-extension>
<runtime-relative-path>ext/java-libpst-1.0-SNAPSHOT.jar</runtime-relative-path>
<binary-origin>release/modules/ext/java-libpst-1.0-SNAPSHOT.jar</binary-origin>
</class-path-extension>
<class-path-extension>
<runtime-relative-path>ext/guava-19.0.jar</runtime-relative-path>
<binary-origin>release/modules/ext/guava-19.0.jar</binary-origin>
</class-path-extension>
<class-path-extension>
<runtime-relative-path>ext/java-libpst-0.9.5-SNAPSHOT.jar</runtime-relative-path>
<binary-origin>release/modules/ext/java-libpst-0.9.5-SNAPSHOT.jar</binary-origin>
</class-path-extension>
<class-path-extension>
<runtime-relative-path>ext/apache-mime4j-dom-0.8.0-SNAPSHOT.jar</runtime-relative-path>
<binary-origin>release/modules/ext/apache-mime4j-dom-0.8.0-SNAPSHOT.jar</binary-origin>

View File

@ -43,7 +43,7 @@ class EmailMessage {
private String localPath = "";
private boolean hasAttachment = false;
private long sentDate = 0L;
private List<Attachment> attachments = new ArrayList<>();
private final List<Attachment> attachments = new ArrayList<>();
private long id = -1L;
private String messageID = "";
private String inReplyToID = "";
@ -410,4 +410,16 @@ class EmailMessage {
}
}
static class AttachedEmailMessage extends Attachment {
private final EmailMessage emailMessage;
AttachedEmailMessage(EmailMessage emailMessage) {
this.emailMessage = emailMessage;
}
EmailMessage getEmailMessage() {
return emailMessage;
}
}
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2019 Basis Technology Corp.
* Copyright 2019-2020 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -26,12 +26,11 @@ import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import java.util.logging.Level;
import org.apache.james.mime4j.dom.BinaryBody;
import org.apache.james.mime4j.dom.Body;
import org.apache.james.mime4j.dom.Entity;
import org.apache.james.mime4j.dom.Message;
import org.apache.james.mime4j.dom.MessageWriter;
import org.apache.james.mime4j.dom.Multipart;
import org.apache.james.mime4j.dom.SingleBody;
import org.apache.james.mime4j.dom.TextBody;
import org.apache.james.mime4j.dom.address.AddressList;
import org.apache.james.mime4j.dom.address.Mailbox;
@ -39,6 +38,7 @@ import org.apache.james.mime4j.dom.address.MailboxList;
import org.apache.james.mime4j.dom.field.ContentDispositionField;
import org.apache.james.mime4j.dom.field.ContentTypeField;
import org.apache.james.mime4j.message.DefaultMessageBuilder;
import org.apache.james.mime4j.message.DefaultMessageWriter;
import org.apache.james.mime4j.stream.Field;
import org.apache.james.mime4j.stream.MimeConfig;
import org.openide.util.NbBundle;
@ -293,7 +293,7 @@ class MimeJ4MessageParser implements AutoCloseable{
* @param e
*/
@NbBundle.Messages({"MimeJ4MessageParser.handleAttch.noOpenCase.errMsg=Exception while getting open case."})
private static void handleAttachment(EmailMessage email, Entity e, long fileID, int index) {
private void handleAttachment(EmailMessage email, Entity e, long fileID, int index) {
String outputDirPath;
String relModuleOutputPath;
try {
@ -322,25 +322,31 @@ class MimeJ4MessageParser implements AutoCloseable{
String outPath = outputDirPath + uniqueFilename;
Body body = e.getBody();
if (body instanceof SingleBody) {
if (body != null) {
long fileLength;
try (EncodedFileOutputStream fos = new EncodedFileOutputStream(new FileOutputStream(outPath), TskData.EncodingType.XOR1)) {
((SingleBody) body).writeTo(fos);
fileLength = fos.getBytesWritten();
} catch (IOException ex) {
logger.log(Level.WARNING, "Failed to create file output stream for: " + outPath, ex); //NON-NLS
return;
}
EmailMessage.Attachment attach = new EmailMessage.Attachment();
EmailMessage.Attachment attach;
MessageWriter msgWriter = new DefaultMessageWriter();
if(body instanceof Message) {
msgWriter.writeMessage((Message)body, fos);
attach = new EmailMessage.AttachedEmailMessage(extractEmail((Message)body, email.getLocalPath(), fileID));
} else {
msgWriter.writeBody(body, fos);
attach = new EmailMessage.Attachment();
}
fileLength = fos.getBytesWritten();
attach.setName(filename);
attach.setLocalPath(relModuleOutputPath + uniqueFilename);
attach.setSize(fileLength);
attach.setEncodingType(TskData.EncodingType.XOR1);
email.addAttachment(attach);
} catch (IOException ex) {
logger.log(Level.WARNING, "Failed to create file output stream for: " + outPath, ex); //NON-NLS
}
}
}
/**

View File

@ -277,16 +277,30 @@ class PstParser implements AutoCloseable{
*/
private EmailMessage extractEmailMessage(PSTMessage msg, String localPath, long fileID) {
EmailMessage email = new EmailMessage();
email.setRecipients(msg.getDisplayTo());
email.setCc(msg.getDisplayCC());
email.setBcc(msg.getDisplayBCC());
email.setSender(getSender(msg.getSenderName(), msg.getSenderEmailAddress()));
String toAddress = msg.getDisplayTo();
String ccAddress = msg.getDisplayCC();
String bccAddress = msg.getDisplayBCC();
String receivedByName = msg.getReceivedByName();
String receivedBySMTPAddress = msg.getReceivedBySMTPAddress();
if (toAddress.contains(receivedByName)) {
toAddress = toAddress.replace(receivedByName, receivedBySMTPAddress);
}
if (ccAddress.contains(receivedByName)) {
ccAddress = ccAddress.replace(receivedByName, receivedBySMTPAddress);
}
if (bccAddress.contains(receivedByName)) {
bccAddress = bccAddress.replace(receivedByName, receivedBySMTPAddress);
}
email.setRecipients(toAddress);
email.setCc(ccAddress);
email.setBcc(bccAddress);
email.setSender(getSender(msg.getSenderName(), msg.getSentRepresentingSMTPAddress()));
email.setSentDate(msg.getMessageDeliveryTime());
email.setTextBody(msg.getBody());
if (false == msg.getTransportMessageHeaders().isEmpty()) {
email.setHeaders("\n-----HEADERS-----\n\n" + msg.getTransportMessageHeaders() + "\n\n---END HEADERS--\n\n");
}
email.setHtmlBody(msg.getBodyHTML());
String rtf = "";
try {

View File

@ -48,6 +48,7 @@ import org.sleuthkit.autopsy.ingest.IngestModule.ProcessResult;
import org.sleuthkit.autopsy.ingest.IngestMonitor;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleContentEvent;
import org.sleuthkit.autopsy.thunderbirdparser.EmailMessage.AttachedEmailMessage;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Account;
import org.sleuthkit.datamodel.AccountFileInstance;
@ -72,6 +73,7 @@ import org.sleuthkit.datamodel.blackboardutils.attributes.MessageAttachments.Fil
* structure and metadata.
*/
public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
private static final Logger logger = Logger.getLogger(ThunderbirdMboxFileIngestModule.class.getName());
private final IngestServices services = IngestServices.getInstance();
private FileManager fileManager;
@ -112,8 +114,8 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
}
//skip unalloc
if ((abstractFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)) ||
(abstractFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.SLACK))) {
if ((abstractFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS))
|| (abstractFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.SLACK))) {
return ProcessResult.OK;
}
@ -374,8 +376,8 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
int len = in.read(buffer);
while (len != -1) {
len = in.read(buffer);
if (buffer[0] == 13 && buffer[1] == 10 && buffer[2] == 70 && buffer[3] == 114 &&
buffer[4] == 111 && buffer[5] == 109 && buffer[6] == 32) {
if (buffer[0] == 13 && buffer[1] == 10 && buffer[2] == 70 && buffer[3] == 114
&& buffer[4] == 111 && buffer[5] == 109 && buffer[6] == 32) {
mboxSplitOffset.add(in.getCurPosition() - 5);
in.skip(MBOX_SIZE_TO_SPLIT);
}
@ -385,7 +387,6 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
}
private void processMboxFile(File file, AbstractFile abstractFile, String emailFolder) {
try (MboxParser emailIterator = MboxParser.getEmailIterator(emailFolder, file, abstractFile.getId())) {
@ -450,13 +451,9 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
List<AbstractFile> derivedFiles = new ArrayList<>();
AccountFileInstanceCache accountFileInstanceCache = new AccountFileInstanceCache(abstractFile, currentCase);
BlackboardArtifact msgArtifact = addEmailArtifact(message, abstractFile, accountFileInstanceCache);
createEmailArtifact(message, abstractFile, accountFileInstanceCache, derivedFiles);
accountFileInstanceCache.clear();
if ((msgArtifact != null) && (message.hasAttachment())) {
derivedFiles.addAll(handleAttachments(message.getAttachments(), abstractFile, msgArtifact));
}
if (derivedFiles.isEmpty() == false) {
for (AbstractFile derived : derivedFiles) {
services.fireModuleContentEvent(new ModuleContentEvent(derived));
@ -558,17 +555,12 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
if (partialEmailsForThreading.size() > msgCnt) {
EmailMessage threaded = partialEmailsForThreading.get(msgCnt++);
if(threaded.getMessageID().equals(current.getMessageID()) &&
threaded.getSubject().equals(current.getSubject())) {
if (threaded.getMessageID().equals(current.getMessageID())
&& threaded.getSubject().equals(current.getSubject())) {
current.setMessageThreadID(threaded.getMessageThreadID());
}
}
BlackboardArtifact msgArtifact = addEmailArtifact(current, abstractFile, accountFileInstanceCache);
if ((msgArtifact != null) && (current.hasAttachment())) {
derivedFiles.addAll(handleAttachments(current.getAttachments(), abstractFile, msgArtifact ));
}
createEmailArtifact(current, abstractFile, accountFileInstanceCache, derivedFiles);
}
if (derivedFiles.isEmpty() == false) {
@ -581,6 +573,21 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
}
context.addFilesToJob(derivedFiles);
}
void createEmailArtifact(EmailMessage email, AbstractFile abstractFile, AccountFileInstanceCache accountFileInstanceCache, List<AbstractFile> derivedFiles) {
BlackboardArtifact msgArtifact = addEmailArtifact(email, abstractFile, accountFileInstanceCache);
if ((msgArtifact != null) && (email.hasAttachment())) {
derivedFiles.addAll(handleAttachments(email.getAttachments(), abstractFile, msgArtifact));
for (EmailMessage.Attachment attach : email.getAttachments()) {
if (attach instanceof AttachedEmailMessage) {
createEmailArtifact(((AttachedEmailMessage) attach).getEmailMessage(), abstractFile, accountFileInstanceCache, derivedFiles);
}
}
}
}
/**
* Add the given attachments as derived files and reschedule them for
* ingest.
@ -627,7 +634,6 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
}
}
try {
communicationArtifactsHelper.addAttachments(messageArtifact, new MessageAttachments(fileAttachments, Collections.emptyList()));
} catch (TskCoreException ex) {
@ -654,7 +660,8 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
}
/**
* Finds and returns a set of unique email addresses found in the input string
* Finds and returns a set of unique email addresses found in the input
* string
*
* @param input - input string, like the To/CC line from an email header
*
@ -712,12 +719,10 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
senderAddress = senderAddressList.get(0);
try {
senderAccountInstance = accountFileInstanceCache.getAccountInstance(senderAddress);
}
catch(TskCoreException ex) {
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "Failed to create account for email address " + senderAddress, ex); //NON-NLS
}
}
else {
} else {
logger.log(Level.WARNING, "Failed to find sender address, from = {0}", from); //NON-NLS
}
@ -738,8 +743,7 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
try {
AccountFileInstance recipientAccountInstance = accountFileInstanceCache.getAccountInstance(addr);
recipientAccountInstances.add(recipientAccountInstance);
}
catch(TskCoreException ex) {
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "Failed to create account for email address " + addr, ex); //NON-NLS
}
}
@ -765,7 +769,6 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
addArtifactAttribute(rtf, ATTRIBUTE_TYPE.TSK_EMAIL_CONTENT_RTF, bbattributes);
addArtifactAttribute(threadID, ATTRIBUTE_TYPE.TSK_THREAD_ID, bbattributes);
try {
if (context.fileIngestIsCancelled()) {
return null;
@ -839,17 +842,19 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
}
/**
* Cache for storing AccountFileInstance.
* The idea is that emails will be used multiple times in a file and
* we shouldn't do a database lookup each time.
* Cache for storing AccountFileInstance. The idea is that emails will be
* used multiple times in a file and we shouldn't do a database lookup each
* time.
*/
static private class AccountFileInstanceCache {
private final Map<String, AccountFileInstance> cacheMap;
private final AbstractFile file;
private final Case currentCase;
/**
* Create a new cache. Caches are linked to a specific file.
*
* @param file
* @param currentCase
*/
@ -873,8 +878,8 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
return cacheMap.get(email);
}
AccountFileInstance accountInstance =
currentCase.getSleuthkitCase().getCommunicationsManager().createAccountFileInstance(Account.Type.EMAIL, email,
AccountFileInstance accountInstance
= currentCase.getSleuthkitCase().getCommunicationsManager().createAccountFileInstance(Account.Type.EMAIL, email,
EmailParserModuleFactory.getModuleName(), file);
cacheMap.put(email, accountInstance);
return accountInstance;

View File

@ -76,6 +76,9 @@ fi
chmod u+x autopsy/markmckinnon/Export*
chmod u+x autopsy/markmckinnon/parse*
# allow solr dependencies to execute
chmod -R u+x autopsy/solr/bin
# make sure it is executable
chmod u+x bin/autopsy