mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-12 07:56:16 +00:00
Merge remote-tracking branch 'upstream/develop' into 7282_portCaseHost
This commit is contained in:
commit
26d2538c4b
6
.gitattributes
vendored
6
.gitattributes
vendored
@ -13,3 +13,9 @@ Doxyfile text
|
||||
|
||||
*.py text diff=python
|
||||
*.pl text
|
||||
|
||||
# ensure solr scripts that are bash scripts not ending with.sh are lf instead of crlf
|
||||
/KeywordSearch/solr/bin/autopsy-solr eol=lf
|
||||
/KeywordSearch/solr/bin/init.d/solr eol=lf
|
||||
/KeywordSearch/solr/bin/post eol=lf
|
||||
/KeywordSearch/solr/bin/solr eol=lf
|
@ -61,19 +61,19 @@ final public class TagNameDefinition implements Comparable<TagNameDefinition> {
|
||||
private final TskData.FileKnown knownStatus;
|
||||
|
||||
private static final List<TagNameDefinition> STANDARD_TAGS_DEFINITIONS = new ArrayList<>();
|
||||
private static final List<String> OLD_CATEGORY_TAG_NAMES = new ArrayList<>();
|
||||
private static final List<String> PROJECT_VIC_NAMES_NO_LONGER_USED = new ArrayList<>();
|
||||
|
||||
static {
|
||||
STANDARD_TAGS_DEFINITIONS.add(new TagNameDefinition(Bundle.TagNameDefinition_predefTagNames_bookmark_text(), "", TagName.HTML_COLOR.NONE, TskData.FileKnown.UNKNOWN));
|
||||
STANDARD_TAGS_DEFINITIONS.add(new TagNameDefinition(Bundle.TagNameDefinition_predefTagNames_followUp_text(), "", TagName.HTML_COLOR.NONE, TskData.FileKnown.UNKNOWN));
|
||||
STANDARD_TAGS_DEFINITIONS.add(new TagNameDefinition(Bundle.TagNameDefinition_predefTagNames_notableItem_text(), "", TagName.HTML_COLOR.NONE, TskData.FileKnown.BAD));
|
||||
|
||||
OLD_CATEGORY_TAG_NAMES.add("CAT-1: Child Exploitation (Illegal)");
|
||||
OLD_CATEGORY_TAG_NAMES.add("CAT-2: Child Exploitation (Non-Illegal/Age Difficult)");
|
||||
OLD_CATEGORY_TAG_NAMES.add("CAT-3: CGI/Animation (Child Exploitive)");
|
||||
OLD_CATEGORY_TAG_NAMES.add("CAT-4: Exemplar/Comparison (Internal Use Only)");
|
||||
OLD_CATEGORY_TAG_NAMES.add("CAT-5: Non-pertinent");
|
||||
OLD_CATEGORY_TAG_NAMES.add("CAT-0: Uncategorized");
|
||||
PROJECT_VIC_NAMES_NO_LONGER_USED.add("CAT-1: Child Exploitation (Illegal)");
|
||||
PROJECT_VIC_NAMES_NO_LONGER_USED.add("CAT-2: Child Exploitation (Non-Illegal/Age Difficult)");
|
||||
PROJECT_VIC_NAMES_NO_LONGER_USED.add("CAT-3: CGI/Animation (Child Exploitive)");
|
||||
PROJECT_VIC_NAMES_NO_LONGER_USED.add("CAT-4: Exemplar/Comparison (Internal Use Only)");
|
||||
PROJECT_VIC_NAMES_NO_LONGER_USED.add("CAT-5: Non-pertinent");
|
||||
PROJECT_VIC_NAMES_NO_LONGER_USED.add("CAT-0: Uncategorized");
|
||||
}
|
||||
|
||||
/**
|
||||
@ -259,7 +259,7 @@ final public class TagNameDefinition implements Comparable<TagNameDefinition> {
|
||||
*/
|
||||
static synchronized Set<TagNameDefinition> getTagNameDefinitions() {
|
||||
if (needsVersionUpdate()) {
|
||||
updateTagDefinitions();
|
||||
updatePropertyFile();
|
||||
}
|
||||
|
||||
String tagsProperty = ModuleSettings.getConfigSetting(TAGS_SETTINGS_NAME, TAG_NAMES_SETTING_KEY);
|
||||
@ -311,7 +311,7 @@ final public class TagNameDefinition implements Comparable<TagNameDefinition> {
|
||||
/**
|
||||
* Updates the Tag Definition file to the current format.
|
||||
*/
|
||||
private static void updateTagDefinitions() {
|
||||
private static void updatePropertyFile() {
|
||||
Integer version = getPropertyFileVersion();
|
||||
List<TagNameDefinition> definitions = new ArrayList<>();
|
||||
|
||||
@ -355,18 +355,18 @@ final public class TagNameDefinition implements Comparable<TagNameDefinition> {
|
||||
}
|
||||
|
||||
// Remove the standard and Project VIC tags from the list
|
||||
List<String> tagStrings = new ArrayList<>();
|
||||
List<String> tagStringsToKeep = new ArrayList<>();
|
||||
List<String> standardTags = getStandardTagNames();
|
||||
for (TagNameDefinition def : definitions) {
|
||||
if (!standardTags.contains(def.getDisplayName())
|
||||
&& !OLD_CATEGORY_TAG_NAMES.contains(def.getDisplayName())) {
|
||||
tagStrings.add(def.toSettingsFormat());
|
||||
&& !PROJECT_VIC_NAMES_NO_LONGER_USED.contains(def.getDisplayName())) {
|
||||
tagStringsToKeep.add(def.toSettingsFormat());
|
||||
}
|
||||
}
|
||||
|
||||
// Write out the version and the new tag list.
|
||||
ModuleSettings.setConfigSetting(TAGS_SETTINGS_NAME, TAG_SETTING_VERSION_KEY, Integer.toString(TAG_SETTINGS_VERSION));
|
||||
ModuleSettings.setConfigSetting(TAGS_SETTINGS_NAME, TAG_NAMES_SETTING_KEY, String.join(";", tagStrings));
|
||||
ModuleSettings.setConfigSetting(TAGS_SETTINGS_NAME, TAG_NAMES_SETTING_KEY, String.join(";", tagStringsToKeep));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -88,7 +88,7 @@ final public class TagSetDefinition {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a list of the defined TagSet objects.
|
||||
* Returns a list of configured TagSets (from the user's config folder)
|
||||
*
|
||||
* @return A list of TagSetDefinition objects or empty list if none were
|
||||
* found.
|
||||
|
@ -55,7 +55,9 @@ public class TagsManager implements Closeable {
|
||||
private static final Logger LOGGER = Logger.getLogger(TagsManager.class.getName());
|
||||
private final SleuthkitCase caseDb;
|
||||
|
||||
private static String DEFAULT_TAG_SET_NAME = "Project VIC";
|
||||
// NOTE: This name is also hard coded in Image Gallery and Projet Vic module.
|
||||
// They need to stay in sync
|
||||
private static String PROJECT_VIC_TAG_SET_NAME = "Project VIC";
|
||||
|
||||
private static final Object lock = new Object();
|
||||
|
||||
@ -196,7 +198,7 @@ public class TagsManager implements Closeable {
|
||||
try {
|
||||
List<TagSet> tagSetList = Case.getCurrentCaseThrows().getSleuthkitCase().getTaggingManager().getTagSets();
|
||||
for (TagSet tagSet : tagSetList) {
|
||||
if (tagSet.getName().equals(DEFAULT_TAG_SET_NAME)) {
|
||||
if (tagSet.getName().equals(PROJECT_VIC_TAG_SET_NAME)) {
|
||||
for (TagName tagName : tagSet.getTagNames()) {
|
||||
tagList.add(tagName.getDisplayName());
|
||||
}
|
||||
@ -237,7 +239,7 @@ public class TagsManager implements Closeable {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new TagSetDefinition file.
|
||||
* Creates a new TagSetDefinition file that will be used for future cases
|
||||
*
|
||||
* @param tagSetDef The tag set definition.
|
||||
*
|
||||
@ -258,23 +260,26 @@ public class TagsManager implements Closeable {
|
||||
TagsManager(SleuthkitCase caseDb) {
|
||||
this.caseDb = caseDb;
|
||||
|
||||
// Add standard tags and the Project VIC default tag set and tags.
|
||||
// Add standard tags and any configured tag sets.
|
||||
TaggingManager taggingMgr = caseDb.getTaggingManager();
|
||||
try {
|
||||
List<TagSet> setList = taggingMgr.getTagSets();
|
||||
if (setList.isEmpty()) {
|
||||
List<TagSet> tagSetsInCase = taggingMgr.getTagSets();
|
||||
if (tagSetsInCase.isEmpty()) {
|
||||
|
||||
// add the standard tag names
|
||||
for (TagNameDefinition def : TagNameDefinition.getStandardTagNameDefinitions()) {
|
||||
caseDb.addOrUpdateTagName(def.getDisplayName(), def.getDescription(), def.getColor(), def.getKnownStatus());
|
||||
}
|
||||
//Assume new case and add tag sets
|
||||
|
||||
//Assume new case and add all tag sets
|
||||
for (TagSetDefinition setDef : TagSetDefinition.readTagSetDefinitions()) {
|
||||
List<TagName> tagNameList = new ArrayList<>();
|
||||
List<TagName> tagNamesInSet = new ArrayList<>();
|
||||
for (TagNameDefinition tagNameDef : setDef.getTagNameDefinitions()) {
|
||||
tagNameList.add(caseDb.addOrUpdateTagName(tagNameDef.getDisplayName(), tagNameDef.getDescription(), tagNameDef.getColor(), tagNameDef.getKnownStatus()));
|
||||
tagNamesInSet.add(caseDb.addOrUpdateTagName(tagNameDef.getDisplayName(), tagNameDef.getDescription(), tagNameDef.getColor(), tagNameDef.getKnownStatus()));
|
||||
}
|
||||
|
||||
if (!tagNameList.isEmpty()) {
|
||||
taggingMgr.addTagSet(setDef.getName(), tagNameList);
|
||||
if (!tagNamesInSet.isEmpty()) {
|
||||
taggingMgr.addTagSet(setDef.getName(), tagNamesInSet);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -27,6 +27,7 @@ import java.util.Set;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.validator.routines.DomainValidator;
|
||||
import org.apache.commons.validator.routines.EmailValidator;
|
||||
import org.sleuthkit.autopsy.coreutils.NetworkUtils;
|
||||
|
||||
/**
|
||||
* Provides functions for normalizing data by attribute type before insertion or
|
||||
@ -144,11 +145,11 @@ final public class CorrelationAttributeNormalizer {
|
||||
private static String normalizeDomain(String data) throws CorrelationAttributeNormalizationException {
|
||||
DomainValidator validator = DomainValidator.getInstance(true);
|
||||
if (validator.isValid(data)) {
|
||||
return data.toLowerCase();
|
||||
return NetworkUtils.extractDomain(data.toLowerCase());
|
||||
} else {
|
||||
final String validIpAddressRegex = "^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$";
|
||||
if (data.matches(validIpAddressRegex)) {
|
||||
return data;
|
||||
return NetworkUtils.extractDomain(data);
|
||||
} else {
|
||||
throw new CorrelationAttributeNormalizationException(String.format("Data was expected to be a valid domain: %s", data));
|
||||
}
|
||||
|
@ -28,6 +28,7 @@ import org.openide.util.NbPreferences;
|
||||
import org.python.icu.util.TimeZone;
|
||||
import org.sleuthkit.autopsy.machinesettings.UserMachinePreferences;
|
||||
import org.sleuthkit.autopsy.coreutils.ModuleSettings;
|
||||
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
|
||||
import org.sleuthkit.autopsy.coreutils.TextConverterException;
|
||||
import org.sleuthkit.autopsy.coreutils.Version;
|
||||
import org.sleuthkit.datamodel.CaseDbConnectionInfo;
|
||||
@ -84,7 +85,8 @@ public final class UserPreferences {
|
||||
private static final boolean DISPLAY_TRANSLATED_NAMES_DEFAULT = true;
|
||||
public static final String EXTERNAL_HEX_EDITOR_PATH = "ExternalHexEditorPath";
|
||||
public static final String SOLR_MAX_JVM_SIZE = "SolrMaxJVMSize";
|
||||
private static final int DEFAULT_SOLR_HEAP_SIZE_MB = 2048;
|
||||
private static final int DEFAULT_SOLR_HEAP_SIZE_MB_64BIT_PLATFORM = 2048;
|
||||
private static final int DEFAULT_SOLR_HEAP_SIZE_MB_32BIT_PLATFORM = 512;
|
||||
public static final String RESULTS_TABLE_PAGE_SIZE = "ResultsTablePageSize";
|
||||
private static final String GEO_TILE_OPTION = "GeolocationTileOption";
|
||||
private static final String GEO_OSM_TILE_ZIP_PATH = "GeolocationOsmZipPath";
|
||||
@ -534,12 +536,17 @@ public final class UserPreferences {
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the maximum JVM heap size (in MB) for the embedded Solr server.
|
||||
* Get the maximum JVM heap size (in MB) for the embedded Solr server. The returned value
|
||||
* depends on the platform (64bit vs 32bit).
|
||||
*
|
||||
* @return Saved value or default (2 GB)
|
||||
* @return Saved value or default (2 GB for 64bit platforms, 512MB for 32bit)
|
||||
*/
|
||||
public static int getMaxSolrVMSize() {
|
||||
return preferences.getInt(SOLR_MAX_JVM_SIZE, DEFAULT_SOLR_HEAP_SIZE_MB);
|
||||
if (PlatformUtil.is64BitJVM()) {
|
||||
return preferences.getInt(SOLR_MAX_JVM_SIZE, DEFAULT_SOLR_HEAP_SIZE_MB_64BIT_PLATFORM);
|
||||
} else {
|
||||
return preferences.getInt(SOLR_MAX_JVM_SIZE, DEFAULT_SOLR_HEAP_SIZE_MB_32BIT_PLATFORM);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -30,6 +30,7 @@ import org.openide.util.NbBundle;
|
||||
|
||||
/**
|
||||
* Enum to represent the six categories in the DHS image categorization scheme.
|
||||
* NOTE: This appears to not be used anywhere anymore after the ImageGallery refactoring
|
||||
*/
|
||||
@NbBundle.Messages({
|
||||
"Category.one=CAT-1: Child Exploitation (Illegal)",
|
||||
|
@ -389,11 +389,6 @@ public final class FileTypes implements AutopsyVisitableItem {
|
||||
return content.newArtifact(artifactTypeID);
|
||||
}
|
||||
|
||||
@Override
|
||||
public AnalysisResultAdded newAnalysisResult(BlackboardArtifact.Type artifactType, Score score, String conclusion, String configuration, String justification, Collection<BlackboardAttribute> attributesList) throws TskCoreException {
|
||||
return content.newAnalysisResult(artifactType, score, conclusion, configuration, justification, attributesList);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BlackboardArtifact newArtifact(BlackboardArtifact.ARTIFACT_TYPE type) throws TskCoreException {
|
||||
return content.newArtifact(type);
|
||||
@ -434,15 +429,6 @@ public final class FileTypes implements AutopsyVisitableItem {
|
||||
return content.getAllArtifacts();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<AnalysisResult> getAllAnalysisResults() throws TskCoreException {
|
||||
return content.getAllAnalysisResults();
|
||||
}
|
||||
|
||||
public List<AnalysisResult> getAnalysisResults(BlackboardArtifact.Type artifactType) throws TskCoreException {
|
||||
return content.getAnalysisResults(artifactType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<String> getHashSetNames() throws TskCoreException {
|
||||
return content.getHashSetNames();
|
||||
@ -468,9 +454,24 @@ public final class FileTypes implements AutopsyVisitableItem {
|
||||
return content.getAllArtifactsCount();
|
||||
}
|
||||
|
||||
@Override
|
||||
public AnalysisResultAdded newAnalysisResult(BlackboardArtifact.Type type, Score score, String string, String string1, String string2, Collection<BlackboardAttribute> clctn) throws TskCoreException {
|
||||
return content.newAnalysisResult(type, score, string, string1, string2, clctn);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Score getAggregateScore() throws TskCoreException {
|
||||
return content.getAggregateScore();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<AnalysisResult> getAnalysisResults(BlackboardArtifact.Type type) throws TskCoreException {
|
||||
return content.getAnalysisResults(type);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<AnalysisResult> getAllAnalysisResults() throws TskCoreException {
|
||||
return content.getAllAnalysisResults();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -477,6 +477,9 @@ public final class IngestJobSettings {
|
||||
case "Exif Parser": //NON-NLS
|
||||
moduleNames.add("Picture Analyzer"); //NON-NLS
|
||||
break;
|
||||
case "Drone Analyzer":
|
||||
moduleNames.add("DJI Drone Analyzer");
|
||||
break;
|
||||
default:
|
||||
moduleNames.add(name);
|
||||
}
|
||||
|
@ -41,8 +41,6 @@ import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
|
||||
import org.sleuthkit.autopsy.ingest.IngestServices;
|
||||
import org.sleuthkit.autopsy.modules.hashdatabase.HashDbManager.HashDb;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.AnalysisResult;
|
||||
import org.sleuthkit.datamodel.AnalysisResultAdded;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
|
||||
@ -50,7 +48,6 @@ import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
|
||||
import org.sleuthkit.datamodel.HashHitInfo;
|
||||
import org.sleuthkit.datamodel.HashUtility;
|
||||
import org.sleuthkit.datamodel.Score;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.TskData;
|
||||
@ -519,7 +516,7 @@ public class HashDbIngestModule implements FileIngestModule {
|
||||
private void postHashSetHitToBlackboard(AbstractFile abstractFile, String md5Hash, String hashSetName, String comment, boolean showInboxMessage) {
|
||||
try {
|
||||
String moduleName = HashLookupModuleFactory.getModuleName();
|
||||
//BlackboardArtifact badFile = abstractFile.newArtifact(ARTIFACT_TYPE.TSK_HASHSET_HIT);
|
||||
BlackboardArtifact badFile = abstractFile.newArtifact(ARTIFACT_TYPE.TSK_HASHSET_HIT);
|
||||
Collection<BlackboardAttribute> attributes = new ArrayList<>();
|
||||
//TODO Revisit usage of deprecated constructor as per TSK-583
|
||||
//BlackboardAttribute att2 = new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), MODULE_NAME, "Known Bad", hashSetName);
|
||||
@ -527,22 +524,14 @@ public class HashDbIngestModule implements FileIngestModule {
|
||||
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_HASH_MD5, moduleName, md5Hash));
|
||||
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_COMMENT, moduleName, comment));
|
||||
|
||||
SleuthkitCase.CaseDbTransaction trans = this.skCase.beginTransaction();
|
||||
|
||||
AnalysisResultAdded resultAdded = blackboard.newAnalysisResult(new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_HASHSET_HIT), abstractFile.getId(), abstractFile.getDataSourceObjectId(), new Score(Score.Significance.MEDIUM, Score.Confidence.HIGH), moduleName, comment, hashSetName, attributes, trans);
|
||||
AnalysisResult badFile = resultAdded.getAnalysisResult();
|
||||
trans.commit();
|
||||
badFile.addAttributes(attributes);
|
||||
|
||||
try {
|
||||
/*
|
||||
* post the artifact which will index the artifact for keyword
|
||||
* search, and fire an event to notify UI of this new artifact
|
||||
*/
|
||||
|
||||
|
||||
|
||||
blackboard.postArtifact(badFile, moduleName);
|
||||
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + badFile.getArtifactID(), ex); //NON-NLS
|
||||
MessageNotifyUtil.Notify.error(
|
||||
@ -585,7 +574,7 @@ public class HashDbIngestModule implements FileIngestModule {
|
||||
abstractFile.getName() + md5Hash,
|
||||
badFile));
|
||||
}
|
||||
} catch (TskException | Blackboard.BlackboardException ex) {
|
||||
} catch (TskException ex) {
|
||||
logger.log(Level.WARNING, "Error creating blackboard artifact", ex); //NON-NLS
|
||||
}
|
||||
}
|
||||
|
@ -92,6 +92,10 @@ public class ALeappAnalyzerIngestModule implements DataSourceIngestModule {
|
||||
public void startUp(IngestJobContext context) throws IngestModuleException {
|
||||
this.context = context;
|
||||
|
||||
if (false == PlatformUtil.is64BitOS()) {
|
||||
throw new IngestModuleException(NbBundle.getMessage(this.getClass(), "AleappAnalyzerIngestModule.not.64.bit.os"));
|
||||
}
|
||||
|
||||
if (false == PlatformUtil.isWindowsOS()) {
|
||||
throw new IngestModuleException(Bundle.ALeappAnalyzerIngestModule_requires_windows());
|
||||
}
|
||||
|
@ -2,7 +2,9 @@ ILeappAnalyzerIngestModule.init.exception.msg=Unable to find {0}.
|
||||
ILeappAnalyzerIngestModule.processing.file=Processing file {0}
|
||||
ILeappAnalyzerIngestModule.parsing.file=Parsing file {0}
|
||||
ILeappAnalyzerIngestModule.processing.filesystem=Processing filesystem
|
||||
IleappAnalyzerIngestModule.not.64.bit.os=iLeapp will not run on a 32bit operating system
|
||||
ALeappAnalyzerIngestModule.init.exception.msg=Unable to find {0}.
|
||||
ALeappAnalyzerIngestModule.processing.file=Processing file {0}
|
||||
ALeappAnalyzerIngestModule.parsing.file=Parsing file {0}
|
||||
ALeappAnalyzerIngestModule.processing.filesystem=Processing filesystem
|
||||
AleappAnalyzerIngestModule.not.64.bit.os=aLeapp will not run on a 32bit operating system
|
@ -22,10 +22,12 @@ ILeappAnalyzerIngestModule.init.exception.msg=Unable to find {0}.
|
||||
ILeappAnalyzerIngestModule.processing.file=Processing file {0}
|
||||
ILeappAnalyzerIngestModule.parsing.file=Parsing file {0}
|
||||
ILeappAnalyzerIngestModule.processing.filesystem=Processing filesystem
|
||||
IleappAnalyzerIngestModule.not.64.bit.os=iLeapp will not run on 32bit operating system
|
||||
ALeappAnalyzerIngestModule.init.exception.msg=Unable to find {0}.
|
||||
ALeappAnalyzerIngestModule.processing.file=Processing file {0}
|
||||
ALeappAnalyzerIngestModule.parsing.file=Parsing file {0}
|
||||
ALeappAnalyzerIngestModule.processing.filesystem=Processing filesystem
|
||||
AleappAnalyzerIngestModule.not.64.bit.os=aLeapp will not run on 32bit operating system
|
||||
ILeappAnalyzerIngestModule.report.name=iLeapp Html Report
|
||||
ILeappAnalyzerIngestModule.requires.windows=iLeapp module requires windows.
|
||||
ILeappAnalyzerIngestModule.running.iLeapp=Running iLeapp
|
||||
|
@ -92,6 +92,10 @@ public class ILeappAnalyzerIngestModule implements DataSourceIngestModule {
|
||||
public void startUp(IngestJobContext context) throws IngestModuleException {
|
||||
this.context = context;
|
||||
|
||||
if (false == PlatformUtil.is64BitOS()) {
|
||||
throw new IngestModuleException(NbBundle.getMessage(this.getClass(), "IleappAnalyzerIngestModule.not.64.bit.os"));
|
||||
}
|
||||
|
||||
if (false == PlatformUtil.isWindowsOS()) {
|
||||
throw new IngestModuleException(Bundle.ILeappAnalyzerIngestModule_requires_windows());
|
||||
}
|
||||
|
@ -59,6 +59,7 @@ import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import static org.sleuthkit.autopsy.casemodule.Case.getCurrentCase;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.casemodule.services.FileManager;
|
||||
import org.sleuthkit.autopsy.coreutils.NetworkUtils;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
|
||||
import org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException;
|
||||
@ -379,7 +380,9 @@ public final class LeappFileProcessor {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
BlackboardAttribute attr = (value == null) ? null : getAttribute(colAttr.getAttributeType(), value, fileName);
|
||||
String formattedValue = formatValueBasedOnAttrType(colAttr, value);
|
||||
|
||||
BlackboardAttribute attr = (value == null) ? null : getAttribute(colAttr.getAttributeType(), formattedValue, fileName);
|
||||
if (attr == null) {
|
||||
logger.log(Level.WARNING, String.format("Blackboard attribute could not be parsed column %s at line %d in file %s. Omitting row.", colAttr.getColumnName(), lineNum, fileName));
|
||||
return Collections.emptyList();
|
||||
@ -394,6 +397,21 @@ public final class LeappFileProcessor {
|
||||
return attrsToRet;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check type of attribute and possibly format string based on it.
|
||||
*
|
||||
* @param colAttr Column Attribute information
|
||||
* @param value string to be formatted
|
||||
* @return formatted string based on attribute type if no attribute type found then return original string
|
||||
*/
|
||||
private String formatValueBasedOnAttrType(TsvColumn colAttr, String value) {
|
||||
if (colAttr.getAttributeType().getTypeName().equals("TSK_DOMAIN")) {
|
||||
return NetworkUtils.extractDomain(value);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* The format of time stamps in tsv.
|
||||
*/
|
||||
|
@ -14,7 +14,7 @@
|
||||
|
||||
<!-- for viewers -->
|
||||
<dependency conf="autopsy_core->*" org="org.freedesktop.gstreamer" name="gst1-java-core" rev="1.0.0"/>
|
||||
<dependency conf="autopsy_core->*" org="net.java.dev.jna" name="jna-platform" rev="5.6.0"/>
|
||||
<dependency conf="autopsy_core->*" org="net.java.dev.jna" name="jna-platform" rev="5.7.0"/>
|
||||
|
||||
<!-- for file search -->
|
||||
<dependency conf="autopsy_core->*" org="com.github.lgooddatepicker" name="LGoodDatePicker" rev="10.3.1"/>
|
||||
|
@ -42,8 +42,8 @@ file.reference.javassist-3.12.1.GA.jar=release/modules/ext/javassist-3.12.1.GA.j
|
||||
file.reference.jfxtras-common-8.0-r4.jar=release/modules/ext/jfxtras-common-8.0-r4.jar
|
||||
file.reference.jfxtras-controls-8.0-r4.jar=release/modules/ext/jfxtras-controls-8.0-r4.jar
|
||||
file.reference.jfxtras-fxml-8.0-r4.jar=release/modules/ext/jfxtras-fxml-8.0-r4.jar
|
||||
file.reference.jna-5.6.0.jar=release/modules/ext/jna-5.6.0.jar
|
||||
file.reference.jna-platform-5.6.0.jar=release/modules/ext/jna-platform-5.6.0.jar
|
||||
file.reference.jna-5.7.0.jar=release/modules/ext/jna-5.7.0.jar
|
||||
file.reference.jna-platform-5.7.0.jar=release/modules/ext/jna-platform-5.7.0.jar
|
||||
file.reference.joda-time-2.4.jar=release/modules/ext/joda-time-2.4.jar
|
||||
file.reference.jsr305-1.3.9.jar=release/modules/ext/jsr305-1.3.9.jar
|
||||
file.reference.LGoodDatePicker-10.3.1.jar=release/modules/ext/LGoodDatePicker-10.3.1.jar
|
||||
|
@ -923,8 +923,8 @@
|
||||
<binary-origin>release/modules/ext/commons-compress-1.18.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/jna-platform-5.6.0.jar</runtime-relative-path>
|
||||
<binary-origin>release\modules\ext\jna-platform-5.6.0.jar</binary-origin>
|
||||
<runtime-relative-path>ext/jna-platform-5.7.0.jar</runtime-relative-path>
|
||||
<binary-origin>release\modules\ext\jna-platform-5.7.0.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/opencv-248.jar</runtime-relative-path>
|
||||
@ -951,8 +951,8 @@
|
||||
<binary-origin>release/modules/ext/imageio-bmp-3.2.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/jna-5.6.0.jar</runtime-relative-path>
|
||||
<binary-origin>release\modules\ext\jna-5.6.0.jar</binary-origin>
|
||||
<runtime-relative-path>ext/jna-5.7.0.jar</runtime-relative-path>
|
||||
<binary-origin>release\modules\ext\jna-5.7.0.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/commons-lang-2.6.jar</runtime-relative-path>
|
||||
|
@ -96,7 +96,6 @@ public final class ImageGalleryController {
|
||||
private static final Set<IngestManager.IngestJobEvent> INGEST_JOB_EVENTS_OF_INTEREST = EnumSet.of(IngestManager.IngestJobEvent.DATA_SOURCE_ANALYSIS_STARTED, IngestManager.IngestJobEvent.DATA_SOURCE_ANALYSIS_COMPLETED);
|
||||
private static final Set<IngestManager.IngestModuleEvent> INGEST_MODULE_EVENTS_OF_INTEREST = EnumSet.of(IngestManager.IngestModuleEvent.DATA_ADDED, IngestManager.IngestModuleEvent.FILE_DONE);
|
||||
|
||||
private static String DEFAULT_TAG_SET_NAME = "Project VIC";
|
||||
/*
|
||||
* The file limit for image gallery. If the selected data source (or all
|
||||
* data sources, if that option is selected) has more than this many files
|
||||
@ -738,7 +737,7 @@ public final class ImageGalleryController {
|
||||
List<TagSet> tagSetList = getCaseDatabase().getTaggingManager().getTagSets();
|
||||
if (tagSetList != null && !tagSetList.isEmpty()) {
|
||||
for (TagSet set : tagSetList) {
|
||||
if (set.getName().equals(getCategoryTagSetName())) {
|
||||
if (set.getName().equals(ImageGalleryService.PROJECT_VIC_TAG_SET_NAME)) {
|
||||
return set;
|
||||
}
|
||||
}
|
||||
@ -749,14 +748,6 @@ public final class ImageGalleryController {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the category tag set.
|
||||
*
|
||||
* @return Tagset name
|
||||
*/
|
||||
static String getCategoryTagSetName() {
|
||||
return DEFAULT_TAG_SET_NAME;
|
||||
}
|
||||
|
||||
/**
|
||||
* A listener for ingest module application events.
|
||||
@ -839,8 +830,11 @@ public final class ImageGalleryController {
|
||||
Content newDataSource = (Content) event.getNewValue();
|
||||
if (isListeningEnabled()) {
|
||||
try {
|
||||
// If the data source already exists and has a status other than UNKNOWN, don’t overwrite it.
|
||||
if(drawableDB.getDataSourceDbBuildStatus(newDataSource.getId()) == DrawableDB.DrawableDbBuildStatusEnum.UNKNOWN) {
|
||||
drawableDB.insertOrUpdateDataSource(newDataSource.getId(), DrawableDB.DrawableDbBuildStatusEnum.UNKNOWN);
|
||||
} catch (SQLException ex) {
|
||||
}
|
||||
} catch (SQLException | TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Error updating datasources table (data source object ID = %d, status = %s)", newDataSource.getId(), DrawableDB.DrawableDbBuildStatusEnum.UNKNOWN.toString()), ex); //NON-NLS
|
||||
}
|
||||
}
|
||||
|
@ -44,20 +44,32 @@ import org.sleuthkit.datamodel.TskData;
|
||||
})
|
||||
public class ImageGalleryService implements AutopsyService {
|
||||
|
||||
private static final String CATEGORY_ONE_NAME = "Child Exploitation (Illegal)";
|
||||
private static final String CATEGORY_TWO_NAME = "Child Exploitation (Non-Illegal/Age Difficult)";
|
||||
private static final String CATEGORY_THREE_NAME = "CGI/Animation (Child Exploitive)";
|
||||
private static final String CATEGORY_FOUR_NAME = "Exemplar/Comparison (Internal Use Only)";
|
||||
private static final String CATEGORY_FIVE_NAME = "Non-pertinent";
|
||||
/* Image Gallery has its own definition of Project VIC tag names because
|
||||
* these will be used if the Project Vic module is not installed. These will
|
||||
* get added when a case is opened if the tag set is not already defined.
|
||||
*
|
||||
* The following list of names must be kept in sync with the CountryManager
|
||||
* code in the ProjectVic module.
|
||||
*
|
||||
* Autopsy Core Tag code and TSK DataModel upgrade code also have a
|
||||
* references to the "Projet VIC" set name. Be careful changing any of these names.
|
||||
*/
|
||||
static String PROJECT_VIC_TAG_SET_NAME = "Project VIC";
|
||||
private static final String PV_US_CAT0 = "Non-Pertinent";
|
||||
private static final String PV_US_CAT1 = "Child Abuse Material - (CAM)";
|
||||
private static final String PV_US_CAT2 = "Child Exploitive (Non-CAM) Age Difficult";
|
||||
private static final String PV_US_CAT3 = "CGI/Animation - Child Exploitive";
|
||||
private static final String PV_US_CAT4 = "Comparison Images";
|
||||
|
||||
private static final List<TagNameDefinition> DEFAULT_CATEGORY_DEFINITION = new ArrayList<>();
|
||||
private static final List<TagNameDefinition> PROJECT_VIC_US_CATEGORIES = new ArrayList<>();
|
||||
|
||||
static {
|
||||
DEFAULT_CATEGORY_DEFINITION.add(new TagNameDefinition(CATEGORY_ONE_NAME, "", TagName.HTML_COLOR.RED, TskData.FileKnown.BAD));
|
||||
DEFAULT_CATEGORY_DEFINITION.add(new TagNameDefinition(CATEGORY_TWO_NAME, "", TagName.HTML_COLOR.LIME, TskData.FileKnown.BAD));
|
||||
DEFAULT_CATEGORY_DEFINITION.add(new TagNameDefinition(CATEGORY_THREE_NAME, "", TagName.HTML_COLOR.YELLOW, TskData.FileKnown.BAD));
|
||||
DEFAULT_CATEGORY_DEFINITION.add(new TagNameDefinition(CATEGORY_FOUR_NAME, "", TagName.HTML_COLOR.PURPLE, TskData.FileKnown.UNKNOWN));
|
||||
DEFAULT_CATEGORY_DEFINITION.add(new TagNameDefinition(CATEGORY_FIVE_NAME, "", TagName.HTML_COLOR.FUCHSIA, TskData.FileKnown.UNKNOWN));
|
||||
// NOTE: The colors here are what will be shown in the border
|
||||
PROJECT_VIC_US_CATEGORIES.add(new TagNameDefinition(PV_US_CAT0, "", TagName.HTML_COLOR.GREEN, TskData.FileKnown.UNKNOWN));
|
||||
PROJECT_VIC_US_CATEGORIES.add(new TagNameDefinition(PV_US_CAT1, "", TagName.HTML_COLOR.RED, TskData.FileKnown.BAD));
|
||||
PROJECT_VIC_US_CATEGORIES.add(new TagNameDefinition(PV_US_CAT2, "", TagName.HTML_COLOR.YELLOW, TskData.FileKnown.BAD));
|
||||
PROJECT_VIC_US_CATEGORIES.add(new TagNameDefinition(PV_US_CAT3, "", TagName.HTML_COLOR.FUCHSIA, TskData.FileKnown.BAD));
|
||||
PROJECT_VIC_US_CATEGORIES.add(new TagNameDefinition(PV_US_CAT4, "", TagName.HTML_COLOR.BLUE, TskData.FileKnown.UNKNOWN));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -91,17 +103,17 @@ public class ImageGalleryService implements AutopsyService {
|
||||
|
||||
// Check to see if the Project VIC tag set exists, if not create a
|
||||
// tag set using the default tags.
|
||||
boolean addDefaultTagSet = true;
|
||||
boolean addProjVicTagSet = true;
|
||||
List<TagSet> tagSets = context.getCase().getServices().getTagsManager().getAllTagSets();
|
||||
for (TagSet set : tagSets) {
|
||||
if (set.getName().equals(ImageGalleryController.getCategoryTagSetName())) {
|
||||
addDefaultTagSet = false;
|
||||
if (set.getName().equals(PROJECT_VIC_TAG_SET_NAME)) {
|
||||
addProjVicTagSet = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (addDefaultTagSet) {
|
||||
addDefaultTagSet(context.getCase());
|
||||
if (addProjVicTagSet) {
|
||||
addProjetVicTagSet(context.getCase());
|
||||
}
|
||||
|
||||
ImageGalleryController.createController(context.getCase());
|
||||
@ -134,13 +146,11 @@ public class ImageGalleryService implements AutopsyService {
|
||||
*
|
||||
* @throws TskCoreException
|
||||
*/
|
||||
private void addDefaultTagSet(Case currentCase) throws TskCoreException {
|
||||
private void addProjetVicTagSet(Case currentCase) throws TskCoreException {
|
||||
List<TagName> tagNames = new ArrayList<>();
|
||||
for (TagNameDefinition def : DEFAULT_CATEGORY_DEFINITION) {
|
||||
for (TagNameDefinition def : PROJECT_VIC_US_CATEGORIES) {
|
||||
tagNames.add(currentCase.getSleuthkitCase().addOrUpdateTagName(def.getDisplayName(), def.getDescription(), def.getColor(), def.getKnownStatus()));
|
||||
}
|
||||
|
||||
currentCase.getServices().getTagsManager().addTagSet(ImageGalleryController.getCategoryTagSetName(), tagNames);
|
||||
currentCase.getServices().getTagsManager().addTagSet(PROJECT_VIC_TAG_SET_NAME, tagNames);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -157,7 +157,10 @@ public final class OpenAction extends CallableSystemAction {
|
||||
}
|
||||
Platform.runLater(() -> {
|
||||
ImageGalleryController controller;
|
||||
// @@@ This call gets a lock. We shouldn't do this in the UI....
|
||||
controller = ImageGalleryController.getController(currentCase);
|
||||
|
||||
// Display an error if we could not get the controller and return
|
||||
if (controller == null) {
|
||||
Alert errorDIalog = new Alert(Alert.AlertType.ERROR);
|
||||
errorDIalog.initModality(Modality.APPLICATION_MODAL);
|
||||
@ -174,6 +177,7 @@ public final class OpenAction extends CallableSystemAction {
|
||||
return;
|
||||
}
|
||||
|
||||
// Make sure the user is aware of Single vs Multi-user behaviors
|
||||
if (currentCase.getCaseType() == Case.CaseType.MULTI_USER_CASE
|
||||
&& ImageGalleryPreferences.isMultiUserCaseInfoDialogDisabled() == false) {
|
||||
Alert dialog = new Alert(Alert.AlertType.INFORMATION);
|
||||
|
@ -99,9 +99,9 @@ public class CategoryManager {
|
||||
}
|
||||
|
||||
/**
|
||||
* get the number of file with the given {@link DhsImageCategory}
|
||||
* get the number of file with the given tag
|
||||
*
|
||||
* @param cat get the number of files with Category = cat
|
||||
* @param tagName get the number of files with Category = tagName
|
||||
*
|
||||
* @return the number of files with the given Category
|
||||
*/
|
||||
@ -110,20 +110,18 @@ public class CategoryManager {
|
||||
}
|
||||
|
||||
/**
|
||||
* increment the cached value for the number of files with the given
|
||||
* {@link DhsImageCategory}
|
||||
* increment the cached value for the number of files with the given tag
|
||||
*
|
||||
* @param cat the Category to increment
|
||||
* @param tagName the Category to increment
|
||||
*/
|
||||
synchronized public void incrementCategoryCount(TagName tagName) {
|
||||
categoryCounts.getUnchecked(tagName).increment();
|
||||
}
|
||||
|
||||
/**
|
||||
* decrement the cached value for the number of files with the given
|
||||
* DhsImageCategory
|
||||
* decrement the cached value for the number of files with the given tag
|
||||
*
|
||||
* @param cat the Category to decrement
|
||||
* @param tagName the Category to decrement
|
||||
*/
|
||||
synchronized public void decrementCategoryCount(TagName tagName) {
|
||||
categoryCounts.getUnchecked(tagName).decrement();
|
||||
|
@ -207,19 +207,19 @@ public final class DrawableDB {
|
||||
*/
|
||||
UNKNOWN,
|
||||
/**
|
||||
* Analyis (an ingest job or image gallery database rebuild) for the
|
||||
* Analysis (an ingest job or image gallery database rebuild) for the
|
||||
* data source is in progress.
|
||||
*/
|
||||
IN_PROGRESS,
|
||||
/**
|
||||
* Analyis (an ingest job or image gallery database rebuild) for the
|
||||
* Analysis (an ingest job or image gallery database rebuild) for the
|
||||
* data source has been completed and at least one file in the data
|
||||
* source has a MIME type (ingest filters may have been applied, so some
|
||||
* files may not have been typed).
|
||||
*/
|
||||
COMPLETE,
|
||||
/**
|
||||
* Analyis (an ingest job or image gallery database rebuild) for the
|
||||
* Analysis (an ingest job or image gallery database rebuild) for the
|
||||
* data source has been completed, but the files for the data source
|
||||
* were not assigned a MIME type (file typing was not enabled).
|
||||
*/
|
||||
|
@ -187,7 +187,7 @@ Server.query.exception.msg=Error running query: {0}
|
||||
Server.query2.exception.msg=Error running query: {0}
|
||||
Server.queryTerms.exception.msg=Error running terms query: {0}
|
||||
Server.connect.exception.msg=Failed to connect to Solr server: {0}
|
||||
Server.openCore.exception.msg=Keyword search service not yet running
|
||||
Server.openCore.exception.msg=Local keyword search service not yet running
|
||||
Server.openCore.exception.cantOpen.msg=Could not create or open index
|
||||
Server.openCore.exception.noIndexDir.msg=Index directory could not be created or is missing
|
||||
Server.request.exception.exception.msg=Could not issue Solr request
|
||||
|
@ -18,6 +18,9 @@
|
||||
*/
|
||||
package org.sleuthkit.autopsy.keywordsearch;
|
||||
|
||||
import com.google.common.util.concurrent.ThreadFactoryBuilder;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.logging.Level;
|
||||
import org.apache.solr.client.solrj.SolrServerException;
|
||||
import org.openide.modules.ModuleInstall;
|
||||
@ -39,6 +42,7 @@ class Installer extends ModuleInstall {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(Installer.class.getName());
|
||||
private static final long serialVersionUID = 1L;
|
||||
private static final String KWS_START_THREAD_NAME = "KWS-server-start-%d";
|
||||
|
||||
@Override
|
||||
public void restored() {
|
||||
@ -46,6 +50,10 @@ class Installer extends ModuleInstall {
|
||||
KeywordSearchSettings.setDefaults();
|
||||
|
||||
final Server server = KeywordSearch.getServer();
|
||||
|
||||
ExecutorService jobProcessingExecutor = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setNameFormat(KWS_START_THREAD_NAME).build());
|
||||
Runnable kwsStartTask = new Runnable() {
|
||||
public void run() {
|
||||
try {
|
||||
server.start();
|
||||
} catch (SolrServerNoPortException ex) {
|
||||
@ -60,6 +68,12 @@ class Installer extends ModuleInstall {
|
||||
reportInitError(ex.getMessage());
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// start KWS service on the background thread. Currently all it does is start the embedded Solr server.
|
||||
jobProcessingExecutor.submit(kwsStartTask);
|
||||
jobProcessingExecutor.shutdown(); // tell executor no more work is coming
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean closing() {
|
||||
|
@ -31,6 +31,7 @@ import java.io.InputStreamReader;
|
||||
import java.io.OutputStream;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.net.ConnectException;
|
||||
import java.net.DatagramSocket;
|
||||
import java.net.ServerSocket;
|
||||
import java.net.SocketException;
|
||||
import java.nio.charset.Charset;
|
||||
@ -239,6 +240,8 @@ public class Server {
|
||||
private static final String CORE_PROPERTIES = "core.properties";
|
||||
private static final boolean DEBUG = false;//(Version.getBuildType() == Version.Type.DEVELOPMENT);
|
||||
private static final int NUM_COLLECTION_CREATION_RETRIES = 5;
|
||||
private static final int NUM_EMBEDDED_SERVER_RETRIES = 12; // attempt to connect to embedded Solr server for 1 minute
|
||||
private static final int EMBEDDED_SERVER_RETRY_WAIT_SEC = 5;
|
||||
|
||||
public enum CORE_EVT_STATES {
|
||||
|
||||
@ -270,6 +273,8 @@ public class Server {
|
||||
Server() {
|
||||
initSettings();
|
||||
|
||||
localSolrServer = getSolrClient("http://localhost:" + localSolrServerPort + "/solr");
|
||||
|
||||
serverAction = new ServerAction();
|
||||
File solr8Folder = InstalledFileLocator.getDefault().locate("solr", Server.class.getPackage().getName(), false); //NON-NLS
|
||||
File solr4Folder = InstalledFileLocator.getDefault().locate("solr4", Server.class.getPackage().getName(), false); //NON-NLS
|
||||
@ -664,11 +669,13 @@ public class Server {
|
||||
*/
|
||||
@NbBundle.Messages({
|
||||
"Server.status.failed.msg=Local Solr server did not respond to status request. This may be because the server failed to start or is taking too long to initialize.",})
|
||||
void startLocalSolr(SOLR_VERSION version) throws KeywordSearchModuleException, SolrServerNoPortException, SolrServerException {
|
||||
synchronized void startLocalSolr(SOLR_VERSION version) throws KeywordSearchModuleException, SolrServerNoPortException, SolrServerException {
|
||||
|
||||
logger.log(Level.INFO, "Starting local Solr " + version + " server"); //NON-NLS
|
||||
if (isLocalSolrRunning()) {
|
||||
if (localServerVersion.equals(version)) {
|
||||
// this version of local server is already running
|
||||
logger.log(Level.INFO, "Local Solr " + version + " server is already running"); //NON-NLS
|
||||
return;
|
||||
} else {
|
||||
// wrong version of local server is running, stop it
|
||||
@ -723,9 +730,8 @@ public class Server {
|
||||
}
|
||||
|
||||
// Wait for the Solr server to start and respond to a statusRequest request.
|
||||
for (int numRetries = 0; numRetries < 6; numRetries++) {
|
||||
for (int numRetries = 0; numRetries < NUM_EMBEDDED_SERVER_RETRIES; numRetries++) {
|
||||
if (isLocalSolrRunning()) {
|
||||
localSolrServer = getSolrClient("http://localhost:" + localSolrServerPort + "/solr");
|
||||
final List<Long> pids = this.getSolrPIDs();
|
||||
logger.log(Level.INFO, "New Solr process PID: {0}", pids); //NON-NLS
|
||||
return;
|
||||
@ -734,7 +740,7 @@ public class Server {
|
||||
// Local Solr server did not respond so we sleep for
|
||||
// 5 seconds before trying again.
|
||||
try {
|
||||
TimeUnit.SECONDS.sleep(5);
|
||||
TimeUnit.SECONDS.sleep(EMBEDDED_SERVER_RETRY_WAIT_SEC);
|
||||
} catch (InterruptedException ex) {
|
||||
logger.log(Level.WARNING, "Timer interrupted"); //NON-NLS
|
||||
}
|
||||
@ -767,6 +773,23 @@ public class Server {
|
||||
* @param port the port to check for availability
|
||||
*/
|
||||
static boolean isPortAvailable(int port) {
|
||||
final String osName = PlatformUtil.getOSName().toLowerCase();
|
||||
if (osName != null && osName.toLowerCase().startsWith("mac")) {
|
||||
return isPortAvailableOSX(port);
|
||||
} else {
|
||||
return isPortAvailableDefault(port);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks to see if a specific port is available.
|
||||
*
|
||||
* NOTE: This is used on non-OS X systems as of right now but could be
|
||||
* replaced with the OS X version.
|
||||
*
|
||||
* @param port the port to check for availability
|
||||
*/
|
||||
static boolean isPortAvailableDefault(int port) {
|
||||
ServerSocket ss = null;
|
||||
try {
|
||||
|
||||
@ -792,6 +815,48 @@ public class Server {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks to see if a specific port is available.
|
||||
*
|
||||
* NOTE: This is only used on OSX for now, but could replace default
|
||||
* implementation in the future.
|
||||
*
|
||||
* @param port The port to check for availability.
|
||||
* @throws IllegalArgumentException If port is outside range of possible ports.
|
||||
*/
|
||||
static boolean isPortAvailableOSX(int port) {
|
||||
// implementation taken from https://stackoverflow.com/a/435579
|
||||
if (port < 1 || port > 65535) {
|
||||
throw new IllegalArgumentException("Invalid start port: " + port);
|
||||
}
|
||||
|
||||
ServerSocket ss = null;
|
||||
DatagramSocket ds = null;
|
||||
try {
|
||||
ss = new ServerSocket(port);
|
||||
ss.setReuseAddress(true);
|
||||
ds = new DatagramSocket(port);
|
||||
ds.setReuseAddress(true);
|
||||
return true;
|
||||
} catch (IOException e) {
|
||||
} finally {
|
||||
if (ds != null) {
|
||||
ds.close();
|
||||
}
|
||||
|
||||
if (ss != null) {
|
||||
try {
|
||||
ss.close();
|
||||
} catch (IOException e) {
|
||||
/* should not be thrown */
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Changes the current solr server port. Only call this after available.
|
||||
*
|
||||
@ -1875,13 +1940,22 @@ public class Server {
|
||||
* @throws IOException
|
||||
*/
|
||||
private void connectToEmbeddedSolrServer() throws SolrServerException, IOException {
|
||||
HttpSolrClient solrServer = getSolrClient("http://localhost:" + localSolrServerPort + "/solr");
|
||||
TimingMetric metric = HealthMonitor.getTimingMetric("Solr: Connectivity check");
|
||||
CoreAdminRequest.getStatus(null, solrServer);
|
||||
CoreAdminRequest.getStatus(null, localSolrServer);
|
||||
HealthMonitor.submitTimingMetric(metric);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Attempts to connect to the given Solr server, which is running in
|
||||
* SoulrCloud mode. This API does not work for the local Solr which is NOT
|
||||
* running in SolrCloud mode.
|
||||
*
|
||||
* @param host Host name of the remote Solr server
|
||||
* @param port Port of the remote Solr server
|
||||
*
|
||||
* @throws SolrServerException
|
||||
* @throws IOException
|
||||
*/
|
||||
void connectToSolrServer(String host, String port) throws SolrServerException, IOException {
|
||||
try (HttpSolrClient solrServer = getSolrClient("http://" + host + ":" + port + "/solr")) {
|
||||
connectToSolrServer(solrServer);
|
||||
@ -1947,46 +2021,6 @@ public class Server {
|
||||
}
|
||||
}
|
||||
|
||||
/* ELTODO leaving this for reference, will delete later
|
||||
private boolean clusterStatusWithCollection(String collectionName) throws IOException, SolrServerException {
|
||||
ModifiableSolrParams params = new ModifiableSolrParams();
|
||||
params.set("action", CollectionParams.CollectionAction.CLUSTERSTATUS.toString());
|
||||
params.set("collection", collectionName);
|
||||
SolrRequest request = new QueryRequest(params);
|
||||
request.setPath("/admin/collections");
|
||||
|
||||
NamedList<Object> statusResponse;
|
||||
try {
|
||||
statusResponse = currentSolrServer.request(request);
|
||||
} catch (RemoteSolrException ex) {
|
||||
// collection doesn't exist
|
||||
return false;
|
||||
}
|
||||
|
||||
if (statusResponse == null) {
|
||||
logger.log(Level.SEVERE, "Collections response should not be null"); //NON-NLS
|
||||
return false;
|
||||
}
|
||||
|
||||
NamedList<Object> cluster = (NamedList<Object>) statusResponse.get("cluster");
|
||||
if (cluster == null) {
|
||||
logger.log(Level.SEVERE, "Cluster should not be null"); //NON-NLS
|
||||
return false;
|
||||
}
|
||||
NamedList<Object> collections = (NamedList<Object>) cluster.get("collections");
|
||||
if (cluster == null) {
|
||||
logger.log(Level.SEVERE, "Collections should not be null in cluster state"); //NON-NLS
|
||||
return false;
|
||||
}
|
||||
if (collections.size() == 0) {
|
||||
logger.log(Level.SEVERE, "Collections should not be empty in cluster state"); //NON-NLS
|
||||
return false;
|
||||
}
|
||||
|
||||
Object collection = collections.get(collectionName);
|
||||
return (collection != null);
|
||||
}*/
|
||||
|
||||
class Collection {
|
||||
|
||||
// handle to the collection in Solr
|
||||
|
@ -401,8 +401,10 @@ public class SolrSearchService implements KeywordSearchService, AutopsyService {
|
||||
throw new AutopsyServiceException(String.format("Failed to close core for %s", context.getCase().getCaseDirectory()), ex);
|
||||
}
|
||||
|
||||
if (context.getCase().getSleuthkitCase() != null) {
|
||||
context.getCase().getSleuthkitCase().unregisterForEvents(this);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Event handler for ArtifactsPostedEvents from SleuthkitCase.
|
||||
|
@ -2,7 +2,7 @@
|
||||
*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2012-2020 Basis Technology Corp.
|
||||
* Copyright 2012-2021 Basis Technology Corp.
|
||||
*
|
||||
* Copyright 2012 42six Solutions.
|
||||
*
|
||||
@ -487,7 +487,7 @@ class Chromium extends Extract {
|
||||
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL,
|
||||
RecentActivityExtracterModuleFactory.getModuleName(),
|
||||
((result.get("host_key").toString() != null) ? result.get("host_key").toString() : ""))); //NON-NLS
|
||||
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME,
|
||||
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED,
|
||||
RecentActivityExtracterModuleFactory.getModuleName(),
|
||||
(Long.valueOf(result.get("last_access_utc").toString()) / 1000000) - Long.valueOf("11644473600"))); //NON-NLS
|
||||
|
||||
|
@ -2,7 +2,7 @@
|
||||
*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2012-2019 Basis Technology Corp.
|
||||
* Copyright 2012-2021 Basis Technology Corp.
|
||||
*
|
||||
* Copyright 2012 42six Solutions.
|
||||
* Contact: aebadirad <at> 42six <dot> com
|
||||
@ -325,18 +325,28 @@ abstract class Extract {
|
||||
* @return List of BlackboarAttributes for the passed in attributes
|
||||
*/
|
||||
protected Collection<BlackboardAttribute> createCookieAttributes(String url,
|
||||
Long creationTime, String name, String value, String programName, String domain) {
|
||||
Long creationTime, Long accessTime, Long endTime, String name, String value, String programName, String domain) {
|
||||
|
||||
Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
|
||||
bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL,
|
||||
RecentActivityExtracterModuleFactory.getModuleName(),
|
||||
(url != null) ? url : "")); //NON-NLS
|
||||
|
||||
if (creationTime != null) {
|
||||
bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME,
|
||||
if (creationTime != null && creationTime != 0) {
|
||||
bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_CREATED,
|
||||
RecentActivityExtracterModuleFactory.getModuleName(), creationTime));
|
||||
}
|
||||
|
||||
if (accessTime != null && accessTime != 0) {
|
||||
bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED,
|
||||
RecentActivityExtracterModuleFactory.getModuleName(), accessTime));
|
||||
}
|
||||
|
||||
if(endTime != null && endTime != 0) {
|
||||
bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_END,
|
||||
RecentActivityExtracterModuleFactory.getModuleName(), endTime));
|
||||
}
|
||||
|
||||
bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME,
|
||||
RecentActivityExtracterModuleFactory.getModuleName(),
|
||||
(name != null) ? name : "")); //NON-NLS
|
||||
|
@ -2,7 +2,7 @@
|
||||
*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2019-2020 Basis Technology Corp.
|
||||
* Copyright 2019-2021 Basis Technology Corp.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@ -659,7 +659,7 @@ final class ExtractEdge extends Extract {
|
||||
String url = flipDomain(domain);
|
||||
|
||||
BlackboardArtifact bbart = origFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE);
|
||||
bbart.addAttributes(createCookieAttributes(url, ftime, name, value, this.getName(), NetworkUtils.extractDomain(url)));
|
||||
bbart.addAttributes(createCookieAttributes(url, null, ftime, null, name, value, this.getName(), NetworkUtils.extractDomain(url)));
|
||||
return bbart;
|
||||
}
|
||||
|
||||
|
@ -267,7 +267,7 @@ class ExtractIE extends Extract {
|
||||
Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
|
||||
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL,
|
||||
RecentActivityExtracterModuleFactory.getModuleName(), url));
|
||||
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME,
|
||||
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED,
|
||||
RecentActivityExtracterModuleFactory.getModuleName(), datetime));
|
||||
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME,
|
||||
RecentActivityExtracterModuleFactory.getModuleName(), (name != null) ? name : ""));
|
||||
|
@ -2,7 +2,7 @@
|
||||
*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2019 Basis Technology Corp.
|
||||
* Copyright 2019-2021 Basis Technology Corp.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@ -566,7 +566,7 @@ final class ExtractSafari extends Extract {
|
||||
Cookie cookie = iter.next();
|
||||
|
||||
BlackboardArtifact bbart = origFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE);
|
||||
bbart.addAttributes(createCookieAttributes(cookie.getURL(), cookie.getCreationDate(), cookie.getName(), cookie.getValue(), this.getName(), NetworkUtils.extractDomain(cookie.getURL())));
|
||||
bbart.addAttributes(createCookieAttributes(cookie.getURL(), cookie.getCreationDate(), null, cookie.getExpirationDate(), cookie.getName(), cookie.getValue(), this.getName(), NetworkUtils.extractDomain(cookie.getURL())));
|
||||
bbartifacts.add(bbart);
|
||||
}
|
||||
}
|
||||
|
@ -2,7 +2,7 @@
|
||||
*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2012-2020 Basis Technology Corp.
|
||||
* Copyright 2012-2021 Basis Technology Corp.
|
||||
*
|
||||
* Copyright 2012 42six Solutions.
|
||||
* Contact: aebadirad <at> 42six <dot> com
|
||||
@ -423,7 +423,7 @@ class Firefox extends Extract {
|
||||
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL,
|
||||
RecentActivityExtracterModuleFactory.getModuleName(),
|
||||
((host != null) ? host : ""))); //NON-NLS
|
||||
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME,
|
||||
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED,
|
||||
RecentActivityExtracterModuleFactory.getModuleName(),
|
||||
(Long.valueOf(result.get("lastAccessed").toString())))); //NON-NLS
|
||||
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME,
|
||||
|
@ -43,7 +43,7 @@ class EmailMessage {
|
||||
private String localPath = "";
|
||||
private boolean hasAttachment = false;
|
||||
private long sentDate = 0L;
|
||||
private List<Attachment> attachments = new ArrayList<>();
|
||||
private final List<Attachment> attachments = new ArrayList<>();
|
||||
private long id = -1L;
|
||||
private String messageID = "";
|
||||
private String inReplyToID = "";
|
||||
@ -410,4 +410,16 @@ class EmailMessage {
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class AttachedEmailMessage extends Attachment {
|
||||
private final EmailMessage emailMessage;
|
||||
|
||||
AttachedEmailMessage(EmailMessage emailMessage) {
|
||||
this.emailMessage = emailMessage;
|
||||
}
|
||||
|
||||
EmailMessage getEmailMessage() {
|
||||
return emailMessage;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2019 Basis Technology Corp.
|
||||
* Copyright 2019-2020 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -26,12 +26,11 @@ import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
import java.util.logging.Level;
|
||||
import org.apache.james.mime4j.dom.BinaryBody;
|
||||
import org.apache.james.mime4j.dom.Body;
|
||||
import org.apache.james.mime4j.dom.Entity;
|
||||
import org.apache.james.mime4j.dom.Message;
|
||||
import org.apache.james.mime4j.dom.MessageWriter;
|
||||
import org.apache.james.mime4j.dom.Multipart;
|
||||
import org.apache.james.mime4j.dom.SingleBody;
|
||||
import org.apache.james.mime4j.dom.TextBody;
|
||||
import org.apache.james.mime4j.dom.address.AddressList;
|
||||
import org.apache.james.mime4j.dom.address.Mailbox;
|
||||
@ -39,6 +38,7 @@ import org.apache.james.mime4j.dom.address.MailboxList;
|
||||
import org.apache.james.mime4j.dom.field.ContentDispositionField;
|
||||
import org.apache.james.mime4j.dom.field.ContentTypeField;
|
||||
import org.apache.james.mime4j.message.DefaultMessageBuilder;
|
||||
import org.apache.james.mime4j.message.DefaultMessageWriter;
|
||||
import org.apache.james.mime4j.stream.Field;
|
||||
import org.apache.james.mime4j.stream.MimeConfig;
|
||||
import org.openide.util.NbBundle;
|
||||
@ -293,7 +293,7 @@ class MimeJ4MessageParser implements AutoCloseable{
|
||||
* @param e
|
||||
*/
|
||||
@NbBundle.Messages({"MimeJ4MessageParser.handleAttch.noOpenCase.errMsg=Exception while getting open case."})
|
||||
private static void handleAttachment(EmailMessage email, Entity e, long fileID, int index) {
|
||||
private void handleAttachment(EmailMessage email, Entity e, long fileID, int index) {
|
||||
String outputDirPath;
|
||||
String relModuleOutputPath;
|
||||
try {
|
||||
@ -322,25 +322,31 @@ class MimeJ4MessageParser implements AutoCloseable{
|
||||
String outPath = outputDirPath + uniqueFilename;
|
||||
|
||||
Body body = e.getBody();
|
||||
if (body instanceof SingleBody) {
|
||||
if (body != null) {
|
||||
long fileLength;
|
||||
try (EncodedFileOutputStream fos = new EncodedFileOutputStream(new FileOutputStream(outPath), TskData.EncodingType.XOR1)) {
|
||||
((SingleBody) body).writeTo(fos);
|
||||
fileLength = fos.getBytesWritten();
|
||||
} catch (IOException ex) {
|
||||
logger.log(Level.WARNING, "Failed to create file output stream for: " + outPath, ex); //NON-NLS
|
||||
return;
|
||||
}
|
||||
|
||||
EmailMessage.Attachment attach = new EmailMessage.Attachment();
|
||||
EmailMessage.Attachment attach;
|
||||
MessageWriter msgWriter = new DefaultMessageWriter();
|
||||
|
||||
if(body instanceof Message) {
|
||||
msgWriter.writeMessage((Message)body, fos);
|
||||
attach = new EmailMessage.AttachedEmailMessage(extractEmail((Message)body, email.getLocalPath(), fileID));
|
||||
} else {
|
||||
msgWriter.writeBody(body, fos);
|
||||
attach = new EmailMessage.Attachment();
|
||||
}
|
||||
fileLength = fos.getBytesWritten();
|
||||
attach.setName(filename);
|
||||
attach.setLocalPath(relModuleOutputPath + uniqueFilename);
|
||||
attach.setSize(fileLength);
|
||||
attach.setEncodingType(TskData.EncodingType.XOR1);
|
||||
email.addAttachment(attach);
|
||||
|
||||
} catch (IOException ex) {
|
||||
logger.log(Level.WARNING, "Failed to create file output stream for: " + outPath, ex); //NON-NLS
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -48,6 +48,7 @@ import org.sleuthkit.autopsy.ingest.IngestModule.ProcessResult;
|
||||
import org.sleuthkit.autopsy.ingest.IngestMonitor;
|
||||
import org.sleuthkit.autopsy.ingest.IngestServices;
|
||||
import org.sleuthkit.autopsy.ingest.ModuleContentEvent;
|
||||
import org.sleuthkit.autopsy.thunderbirdparser.EmailMessage.AttachedEmailMessage;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Account;
|
||||
import org.sleuthkit.datamodel.AccountFileInstance;
|
||||
@ -72,6 +73,7 @@ import org.sleuthkit.datamodel.blackboardutils.attributes.MessageAttachments.Fil
|
||||
* structure and metadata.
|
||||
*/
|
||||
public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(ThunderbirdMboxFileIngestModule.class.getName());
|
||||
private final IngestServices services = IngestServices.getInstance();
|
||||
private FileManager fileManager;
|
||||
@ -112,8 +114,8 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
|
||||
}
|
||||
|
||||
//skip unalloc
|
||||
if ((abstractFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)) ||
|
||||
(abstractFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.SLACK))) {
|
||||
if ((abstractFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS))
|
||||
|| (abstractFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.SLACK))) {
|
||||
return ProcessResult.OK;
|
||||
}
|
||||
|
||||
@ -374,8 +376,8 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
|
||||
int len = in.read(buffer);
|
||||
while (len != -1) {
|
||||
len = in.read(buffer);
|
||||
if (buffer[0] == 13 && buffer[1] == 10 && buffer[2] == 70 && buffer[3] == 114 &&
|
||||
buffer[4] == 111 && buffer[5] == 109 && buffer[6] == 32) {
|
||||
if (buffer[0] == 13 && buffer[1] == 10 && buffer[2] == 70 && buffer[3] == 114
|
||||
&& buffer[4] == 111 && buffer[5] == 109 && buffer[6] == 32) {
|
||||
mboxSplitOffset.add(in.getCurPosition() - 5);
|
||||
in.skip(MBOX_SIZE_TO_SPLIT);
|
||||
}
|
||||
@ -385,7 +387,6 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
|
||||
|
||||
}
|
||||
|
||||
|
||||
private void processMboxFile(File file, AbstractFile abstractFile, String emailFolder) {
|
||||
|
||||
try (MboxParser emailIterator = MboxParser.getEmailIterator(emailFolder, file, abstractFile.getId())) {
|
||||
@ -450,13 +451,9 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
|
||||
List<AbstractFile> derivedFiles = new ArrayList<>();
|
||||
|
||||
AccountFileInstanceCache accountFileInstanceCache = new AccountFileInstanceCache(abstractFile, currentCase);
|
||||
BlackboardArtifact msgArtifact = addEmailArtifact(message, abstractFile, accountFileInstanceCache);
|
||||
createEmailArtifact(message, abstractFile, accountFileInstanceCache, derivedFiles);
|
||||
accountFileInstanceCache.clear();
|
||||
|
||||
if ((msgArtifact != null) && (message.hasAttachment())) {
|
||||
derivedFiles.addAll(handleAttachments(message.getAttachments(), abstractFile, msgArtifact));
|
||||
}
|
||||
|
||||
if (derivedFiles.isEmpty() == false) {
|
||||
for (AbstractFile derived : derivedFiles) {
|
||||
services.fireModuleContentEvent(new ModuleContentEvent(derived));
|
||||
@ -558,17 +555,12 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
|
||||
if (partialEmailsForThreading.size() > msgCnt) {
|
||||
EmailMessage threaded = partialEmailsForThreading.get(msgCnt++);
|
||||
|
||||
if(threaded.getMessageID().equals(current.getMessageID()) &&
|
||||
threaded.getSubject().equals(current.getSubject())) {
|
||||
if (threaded.getMessageID().equals(current.getMessageID())
|
||||
&& threaded.getSubject().equals(current.getSubject())) {
|
||||
current.setMessageThreadID(threaded.getMessageThreadID());
|
||||
}
|
||||
}
|
||||
|
||||
BlackboardArtifact msgArtifact = addEmailArtifact(current, abstractFile, accountFileInstanceCache);
|
||||
|
||||
if ((msgArtifact != null) && (current.hasAttachment())) {
|
||||
derivedFiles.addAll(handleAttachments(current.getAttachments(), abstractFile, msgArtifact ));
|
||||
}
|
||||
createEmailArtifact(current, abstractFile, accountFileInstanceCache, derivedFiles);
|
||||
}
|
||||
|
||||
if (derivedFiles.isEmpty() == false) {
|
||||
@ -581,6 +573,21 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
|
||||
}
|
||||
context.addFilesToJob(derivedFiles);
|
||||
}
|
||||
|
||||
void createEmailArtifact(EmailMessage email, AbstractFile abstractFile, AccountFileInstanceCache accountFileInstanceCache, List<AbstractFile> derivedFiles) {
|
||||
BlackboardArtifact msgArtifact = addEmailArtifact(email, abstractFile, accountFileInstanceCache);
|
||||
|
||||
if ((msgArtifact != null) && (email.hasAttachment())) {
|
||||
derivedFiles.addAll(handleAttachments(email.getAttachments(), abstractFile, msgArtifact));
|
||||
|
||||
for (EmailMessage.Attachment attach : email.getAttachments()) {
|
||||
if (attach instanceof AttachedEmailMessage) {
|
||||
createEmailArtifact(((AttachedEmailMessage) attach).getEmailMessage(), abstractFile, accountFileInstanceCache, derivedFiles);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add the given attachments as derived files and reschedule them for
|
||||
* ingest.
|
||||
@ -627,7 +634,6 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
try {
|
||||
communicationArtifactsHelper.addAttachments(messageArtifact, new MessageAttachments(fileAttachments, Collections.emptyList()));
|
||||
} catch (TskCoreException ex) {
|
||||
@ -654,7 +660,8 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds and returns a set of unique email addresses found in the input string
|
||||
* Finds and returns a set of unique email addresses found in the input
|
||||
* string
|
||||
*
|
||||
* @param input - input string, like the To/CC line from an email header
|
||||
*
|
||||
@ -712,12 +719,10 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
|
||||
senderAddress = senderAddressList.get(0);
|
||||
try {
|
||||
senderAccountInstance = accountFileInstanceCache.getAccountInstance(senderAddress);
|
||||
}
|
||||
catch(TskCoreException ex) {
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.WARNING, "Failed to create account for email address " + senderAddress, ex); //NON-NLS
|
||||
}
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
logger.log(Level.WARNING, "Failed to find sender address, from = {0}", from); //NON-NLS
|
||||
}
|
||||
|
||||
@ -738,8 +743,7 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
|
||||
try {
|
||||
AccountFileInstance recipientAccountInstance = accountFileInstanceCache.getAccountInstance(addr);
|
||||
recipientAccountInstances.add(recipientAccountInstance);
|
||||
}
|
||||
catch(TskCoreException ex) {
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.WARNING, "Failed to create account for email address " + addr, ex); //NON-NLS
|
||||
}
|
||||
}
|
||||
@ -765,7 +769,6 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
|
||||
addArtifactAttribute(rtf, ATTRIBUTE_TYPE.TSK_EMAIL_CONTENT_RTF, bbattributes);
|
||||
addArtifactAttribute(threadID, ATTRIBUTE_TYPE.TSK_THREAD_ID, bbattributes);
|
||||
|
||||
|
||||
try {
|
||||
if (context.fileIngestIsCancelled()) {
|
||||
return null;
|
||||
@ -839,17 +842,19 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
|
||||
}
|
||||
|
||||
/**
|
||||
* Cache for storing AccountFileInstance.
|
||||
* The idea is that emails will be used multiple times in a file and
|
||||
* we shouldn't do a database lookup each time.
|
||||
* Cache for storing AccountFileInstance. The idea is that emails will be
|
||||
* used multiple times in a file and we shouldn't do a database lookup each
|
||||
* time.
|
||||
*/
|
||||
static private class AccountFileInstanceCache {
|
||||
|
||||
private final Map<String, AccountFileInstance> cacheMap;
|
||||
private final AbstractFile file;
|
||||
private final Case currentCase;
|
||||
|
||||
/**
|
||||
* Create a new cache. Caches are linked to a specific file.
|
||||
*
|
||||
* @param file
|
||||
* @param currentCase
|
||||
*/
|
||||
@ -873,8 +878,8 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
|
||||
return cacheMap.get(email);
|
||||
}
|
||||
|
||||
AccountFileInstance accountInstance =
|
||||
currentCase.getSleuthkitCase().getCommunicationsManager().createAccountFileInstance(Account.Type.EMAIL, email,
|
||||
AccountFileInstance accountInstance
|
||||
= currentCase.getSleuthkitCase().getCommunicationsManager().createAccountFileInstance(Account.Type.EMAIL, email,
|
||||
EmailParserModuleFactory.getModuleName(), file);
|
||||
cacheMap.put(email, accountInstance);
|
||||
return accountInstance;
|
||||
|
@ -76,6 +76,9 @@ fi
|
||||
chmod u+x autopsy/markmckinnon/Export*
|
||||
chmod u+x autopsy/markmckinnon/parse*
|
||||
|
||||
# allow solr dependencies to execute
|
||||
chmod -R u+x autopsy/solr/bin
|
||||
|
||||
# make sure it is executable
|
||||
chmod u+x bin/autopsy
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user