mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-06 21:00:22 +00:00
Merge branch 'release-4.13.0' of https://github.com/sleuthkit/autopsy into 5437-fb-messenger
# Conflicts: # InternalPythonModules/android/imo.py # InternalPythonModules/android/module.py
This commit is contained in:
commit
0e94944723
@ -0,0 +1,49 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2019 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.casemodule;
|
||||
|
||||
import java.io.File;
|
||||
import java.nio.file.Paths;
|
||||
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
|
||||
|
||||
/**
|
||||
* Class for methods to check if access should be limited to a feature
|
||||
*
|
||||
*/
|
||||
final class AccessLimiterUtils {
|
||||
|
||||
private final static String MULTI_USER_ACCESS_FILE_NAME = "mualimit"; // NON-NLS
|
||||
private final static String MULTI_USER_ACCESS_FILE_PATH = Paths.get(PlatformUtil.getUserConfigDirectory(), MULTI_USER_ACCESS_FILE_NAME).toString();
|
||||
|
||||
/**
|
||||
* Check if privileges regarding multi-user cases should be restricted.
|
||||
*
|
||||
* @return True if privileges should be restricted, false otherwise.
|
||||
*/
|
||||
static boolean limitMultiUserAccess() {
|
||||
return new File(MULTI_USER_ACCESS_FILE_PATH).exists();
|
||||
}
|
||||
|
||||
/**
|
||||
* Private constructor for a utility class
|
||||
*/
|
||||
private AccessLimiterUtils() {
|
||||
//private constructer left empty intentionally
|
||||
}
|
||||
}
|
@ -1049,7 +1049,7 @@ public class Case {
|
||||
/*
|
||||
* Enable the case-specific actions.
|
||||
*/
|
||||
CallableSystemAction.get(AddImageAction.class).setEnabled(true);
|
||||
CallableSystemAction.get(AddImageAction.class).setEnabled(Case.getCurrentCase().getMetadata().getCaseType() == CaseType.SINGLE_USER_CASE || !AccessLimiterUtils.limitMultiUserAccess());
|
||||
CallableSystemAction.get(CaseCloseAction.class).setEnabled(true);
|
||||
CallableSystemAction.get(CaseDetailsAction.class).setEnabled(true);
|
||||
CallableSystemAction.get(DataSourceSummaryAction.class).setEnabled(true);
|
||||
|
@ -61,7 +61,7 @@ final class NewCaseVisualPanel1 extends JPanel implements DocumentListener {
|
||||
*/
|
||||
void readSettings() {
|
||||
caseNameTextField.setText("");
|
||||
if (UserPreferences.getIsMultiUserModeEnabled()) {
|
||||
if (UserPreferences.getIsMultiUserModeEnabled() && !AccessLimiterUtils.limitMultiUserAccess()) {
|
||||
multiUserCaseRadioButton.setEnabled(true);
|
||||
multiUserCaseRadioButton.setSelected(true);
|
||||
} else {
|
||||
|
@ -29,6 +29,7 @@ import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.logging.Level;
|
||||
@ -63,6 +64,8 @@ final class AddLogicalImageTask implements Runnable {
|
||||
private final static String MODULE_NAME = "Logical Imager"; //NON-NLS
|
||||
private final static String ROOT_STR = "root"; // NON-NLS
|
||||
private final static String VHD_EXTENSION = ".vhd"; // NON-NLS
|
||||
private final static int REPORT_PROGRESS_INTERVAL = 100;
|
||||
private final static int POST_ARTIFACT_INTERVAL = 1000;
|
||||
private final String deviceId;
|
||||
private final String timeZone;
|
||||
private final File src;
|
||||
@ -144,7 +147,7 @@ final class AddLogicalImageTask implements Runnable {
|
||||
callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errorList, emptyDataSources);
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
// Add the SearchResults.txt and users.txt to the case report
|
||||
String resultsFilename;
|
||||
if (Paths.get(dest.toString(), SEARCH_RESULTS_TXT).toFile().exists()) {
|
||||
@ -198,6 +201,7 @@ final class AddLogicalImageTask implements Runnable {
|
||||
}
|
||||
|
||||
List<Content> newDataSources = new ArrayList<>();
|
||||
Map<String, List<Long>> interestingFileMap = new HashMap<>();
|
||||
|
||||
if (imagePaths.isEmpty()) {
|
||||
createVHD = false;
|
||||
@ -214,7 +218,7 @@ final class AddLogicalImageTask implements Runnable {
|
||||
|
||||
try {
|
||||
progressMonitor.setProgressText(Bundle.AddLogicalImageTask_addingExtractedFiles());
|
||||
addExtractedFiles(dest, resultsPath, newDataSources);
|
||||
interestingFileMap = addExtractedFiles(dest, resultsPath, newDataSources);
|
||||
progressMonitor.setProgressText(Bundle.AddLogicalImageTask_doneAddingExtractedFiles());
|
||||
} catch (IOException | TskCoreException ex) {
|
||||
errorList.add(ex.getMessage());
|
||||
@ -239,8 +243,16 @@ final class AddLogicalImageTask implements Runnable {
|
||||
if (addMultipleImagesTask.getResult() == DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS) {
|
||||
LOGGER.log(Level.SEVERE, "Failed to add VHD datasource"); // NON-NLS
|
||||
callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, addMultipleImagesTask.getErrorMessages(), emptyDataSources);
|
||||
return;
|
||||
return;
|
||||
}
|
||||
try {
|
||||
interestingFileMap = getInterestingFileMapForVHD(Paths.get(dest.toString(), resultsFilename));
|
||||
} catch (TskCoreException | IOException ex) {
|
||||
errorList.add(Bundle.AddLogicalImageTask_failedToAddInterestingFiles(ex.getMessage()));
|
||||
LOGGER.log(Level.SEVERE, "Failed to add interesting files", ex); // NON-NLS
|
||||
callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.NONCRITICAL_ERRORS, errorList, emptyDataSources);
|
||||
}
|
||||
|
||||
} catch (NoCurrentCaseException ex) {
|
||||
String msg = Bundle.AddLogicalImageTask_noCurrentCase();
|
||||
errorList.add(msg);
|
||||
@ -251,7 +263,7 @@ final class AddLogicalImageTask implements Runnable {
|
||||
|
||||
if (cancelled) {
|
||||
if (!createVHD) {
|
||||
// TODO: When 5453 is fixed, we should be able to delete it when adding VHD.
|
||||
// TODO: When 5453 is fixed, we should be able to delete it when adding VHD.
|
||||
deleteDestinationDirectory();
|
||||
}
|
||||
errorList.add(Bundle.AddLogicalImageTask_addImageCancelled());
|
||||
@ -261,7 +273,7 @@ final class AddLogicalImageTask implements Runnable {
|
||||
|
||||
try {
|
||||
progressMonitor.setProgressText(Bundle.AddLogicalImageTask_addingInterestingFiles());
|
||||
addInterestingFiles(Paths.get(dest.toString(), resultsFilename), createVHD);
|
||||
addInterestingFiles(interestingFileMap);
|
||||
progressMonitor.setProgressText(Bundle.AddLogicalImageTask_doneAddingInterestingFiles());
|
||||
if (createVHD) {
|
||||
callback.done(addMultipleImagesTask.getResult(), addMultipleImagesTask.getErrorMessages(), addMultipleImagesTask.getNewDataSources());
|
||||
@ -332,13 +344,69 @@ final class AddLogicalImageTask implements Runnable {
|
||||
"# {0} - target image path", "AddLogicalImageTask.cannotFindDataSourceObjId=Cannot find obj_id in tsk_image_names for {0}",
|
||||
"# {0} - file number", "# {1} - total files", "AddLogicalImageTask.addingInterestingFile=Adding interesting files ({0}/{1})"
|
||||
})
|
||||
private void addInterestingFiles(Path resultsPath, boolean createVHD) throws IOException, TskCoreException {
|
||||
private void addInterestingFiles(Map<String, List<Long>> interestingFileMap) throws IOException, TskCoreException {
|
||||
int lineNumber = 0;
|
||||
List<BlackboardArtifact> artifacts = new ArrayList<>();
|
||||
|
||||
Iterator<Map.Entry<String, List<Long>>> iterator = interestingFileMap.entrySet().iterator();
|
||||
while (iterator.hasNext()) {
|
||||
|
||||
if (cancelled) {
|
||||
// Don't delete destination directory once we started adding interesting files.
|
||||
// At this point the database and destination directory are complete.
|
||||
break;
|
||||
}
|
||||
|
||||
Map.Entry<String, List<Long>> entry = iterator.next();
|
||||
String key = entry.getKey();
|
||||
String ruleSetName;
|
||||
String ruleName;
|
||||
String[] split = key.split("\t");
|
||||
ruleSetName = split[0];
|
||||
ruleName = split[1];
|
||||
|
||||
List<Long> fileIds = entry.getValue();
|
||||
for (Long fileId: fileIds) {
|
||||
if (cancelled) {
|
||||
postArtifacts(artifacts);
|
||||
return;
|
||||
}
|
||||
if (lineNumber % REPORT_PROGRESS_INTERVAL == 0) {
|
||||
progressMonitor.setProgressText(Bundle.AddLogicalImageTask_addingInterestingFile(lineNumber, totalFiles));
|
||||
}
|
||||
if (lineNumber % POST_ARTIFACT_INTERVAL == 0) {
|
||||
postArtifacts(artifacts);
|
||||
artifacts.clear();
|
||||
}
|
||||
addInterestingFileToArtifacts(fileId, ruleSetName, ruleName, artifacts);
|
||||
lineNumber++;
|
||||
}
|
||||
iterator.remove();
|
||||
}
|
||||
postArtifacts(artifacts);
|
||||
}
|
||||
|
||||
private void addInterestingFileToArtifacts(long fileId, String ruleSetName, String ruleName, List<BlackboardArtifact> artifacts) throws TskCoreException {
|
||||
Collection<BlackboardAttribute> attributes = new ArrayList<>();
|
||||
BlackboardAttribute setNameAttribute = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, MODULE_NAME, ruleSetName);
|
||||
attributes.add(setNameAttribute);
|
||||
BlackboardAttribute ruleNameAttribute = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY, MODULE_NAME, ruleName);
|
||||
attributes.add(ruleNameAttribute);
|
||||
BlackboardArtifact artifact = this.currentCase.getSleuthkitCase().newBlackboardArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, fileId);
|
||||
artifact.addAttributes(attributes);
|
||||
artifacts.add(artifact);
|
||||
}
|
||||
|
||||
@Messages({
|
||||
"# {0} - file number", "# {1} - total files", "AddLogicalImageTask.searchingInterestingFile=Searching for interesting files ({0}/{1})"
|
||||
})
|
||||
private Map<String, List<Long>> getInterestingFileMapForVHD(Path resultsPath) throws TskCoreException, IOException {
|
||||
Map<Long, List<String>> objIdToimagePathsMap = currentCase.getSleuthkitCase().getImagePaths();
|
||||
imagePathToObjIdMap = imagePathsToDataSourceObjId(objIdToimagePathsMap);
|
||||
Map<String, List<Long>> interestingFileMap = new HashMap<>();
|
||||
|
||||
try (BufferedReader br = new BufferedReader(new InputStreamReader(
|
||||
new FileInputStream(resultsPath.toFile()), "UTF8"))) { // NON-NLS
|
||||
List<BlackboardArtifact> artifacts = new ArrayList<>();
|
||||
String line;
|
||||
br.readLine(); // skip the header line
|
||||
int lineNumber = 2;
|
||||
@ -362,47 +430,44 @@ final class AddLogicalImageTask implements Runnable {
|
||||
String filename = fields[7];
|
||||
String parentPath = fields[8];
|
||||
|
||||
if (lineNumber % 100 == 0) {
|
||||
progressMonitor.setProgressText(Bundle.AddLogicalImageTask_addingInterestingFile(lineNumber, totalFiles));
|
||||
if (lineNumber % REPORT_PROGRESS_INTERVAL == 0) {
|
||||
progressMonitor.setProgressText(Bundle.AddLogicalImageTask_searchingInterestingFile(lineNumber, totalFiles));
|
||||
}
|
||||
String query = makeQuery(createVHD, vhdFilename, fileMetaAddressStr, parentPath, filename);
|
||||
|
||||
// TODO - findAllFilesWhere should SQL-escape the query
|
||||
String query = makeQuery(vhdFilename, fileMetaAddressStr, parentPath, filename);
|
||||
List<AbstractFile> matchedFiles = Case.getCurrentCase().getSleuthkitCase().findAllFilesWhere(query);
|
||||
List<Long> fileIds = new ArrayList<>();
|
||||
for (AbstractFile file : matchedFiles) {
|
||||
addInterestingFileToArtifacts(file, ruleSetName, ruleName, artifacts);
|
||||
fileIds.add(file.getId());
|
||||
}
|
||||
String key = String.format("%s\t%s", ruleSetName, ruleName);
|
||||
if (interestingFileMap.containsKey(key)) {
|
||||
interestingFileMap.get(key).addAll(fileIds);
|
||||
} else {
|
||||
interestingFileMap.put(key, fileIds);
|
||||
}
|
||||
lineNumber++;
|
||||
} // end reading file
|
||||
|
||||
try {
|
||||
// index the artifact for keyword search
|
||||
blackboard.postArtifacts(artifacts, MODULE_NAME);
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Unable to post artifacts to blackboard", ex); //NON-NLS
|
||||
}
|
||||
}
|
||||
return interestingFileMap;
|
||||
}
|
||||
|
||||
private void addInterestingFileToArtifacts(AbstractFile file, String ruleSetName, String ruleName, List<BlackboardArtifact> artifacts) throws TskCoreException {
|
||||
Collection<BlackboardAttribute> attributes = new ArrayList<>();
|
||||
BlackboardAttribute setNameAttribute = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, MODULE_NAME, ruleSetName);
|
||||
attributes.add(setNameAttribute);
|
||||
BlackboardAttribute ruleNameAttribute = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY, MODULE_NAME, ruleName);
|
||||
attributes.add(ruleNameAttribute);
|
||||
if (!blackboard.artifactExists(file, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, attributes)) {
|
||||
BlackboardArtifact artifact = this.currentCase.getSleuthkitCase().newBlackboardArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, file.getId());
|
||||
artifact.addAttributes(attributes);
|
||||
artifacts.add(artifact);
|
||||
private void postArtifacts(List<BlackboardArtifact> artifacts) {
|
||||
try {
|
||||
// index the artifact for keyword search
|
||||
blackboard.postArtifacts(artifacts, MODULE_NAME);
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Unable to post artifacts to blackboard", ex); //NON-NLS
|
||||
}
|
||||
}
|
||||
|
||||
@Messages({
|
||||
"# {0} - file number", "# {1} - total files", "AddLogicalImageTask.addingExtractedFile=Adding extracted files ({0}/{1})"
|
||||
})
|
||||
private void addExtractedFiles(File src, Path resultsPath, List<Content> newDataSources) throws TskCoreException, IOException {
|
||||
private Map<String, List<Long>> addExtractedFiles(File src, Path resultsPath, List<Content> newDataSources) throws TskCoreException, IOException {
|
||||
SleuthkitCase skCase = Case.getCurrentCase().getSleuthkitCase();
|
||||
SleuthkitCase.CaseDbTransaction trans = null;
|
||||
Map<String, List<Long>> interestingFileMap = new HashMap<>();
|
||||
|
||||
try {
|
||||
trans = skCase.beginTransaction();
|
||||
@ -417,7 +482,7 @@ final class AddLogicalImageTask implements Runnable {
|
||||
while ((line = br.readLine()) != null) {
|
||||
if (cancelled) {
|
||||
rollbackTransaction(trans);
|
||||
return;
|
||||
return new HashMap<>();
|
||||
}
|
||||
String[] fields = line.split("\t", -1); // NON-NLS
|
||||
if (fields.length != 14) {
|
||||
@ -428,8 +493,8 @@ final class AddLogicalImageTask implements Runnable {
|
||||
// String fileSystemOffsetStr = fields[1];
|
||||
// String fileMetaAddressStr = fields[2];
|
||||
// String extractStatusStr = fields[3];
|
||||
// String ruleSetName = fields[4];
|
||||
// String ruleName = fields[5];
|
||||
String ruleSetName = fields[4];
|
||||
String ruleName = fields[5];
|
||||
// String description = fields[6];
|
||||
String filename = fields[7];
|
||||
String parentPath = fields[8];
|
||||
@ -440,12 +505,12 @@ final class AddLogicalImageTask implements Runnable {
|
||||
String ctime = fields[13];
|
||||
parentPath = ROOT_STR + "/" + vhdFilename + "/" + parentPath;
|
||||
|
||||
if (lineNumber % 100 == 0) {
|
||||
if (lineNumber % REPORT_PROGRESS_INTERVAL == 0) {
|
||||
progressMonitor.setProgressText(Bundle.AddLogicalImageTask_addingExtractedFile(lineNumber, totalFiles));
|
||||
}
|
||||
|
||||
//addLocalFile here
|
||||
fileImporter.addLocalFile(
|
||||
AbstractFile fileAdded = fileImporter.addLocalFile(
|
||||
Paths.get(src.toString(), extractedFilePath).toFile(),
|
||||
filename,
|
||||
parentPath,
|
||||
@ -454,12 +519,19 @@ final class AddLogicalImageTask implements Runnable {
|
||||
Long.parseLong(atime),
|
||||
Long.parseLong(mtime),
|
||||
localFilesDataSource);
|
||||
|
||||
String key = String.format("%s\t%s", ruleSetName, ruleName);
|
||||
List<Long> value = new ArrayList<>();
|
||||
if (interestingFileMap.containsKey(key)) {
|
||||
value = interestingFileMap.get(key);
|
||||
}
|
||||
value.add(fileAdded.getId());
|
||||
interestingFileMap.put(key, value);
|
||||
lineNumber++;
|
||||
} // end reading file
|
||||
}
|
||||
trans.commit();
|
||||
newDataSources.add(localFilesDataSource);
|
||||
return interestingFileMap;
|
||||
|
||||
} catch (NumberFormatException | TskCoreException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Error adding extracted files", ex); // NON-NLS
|
||||
@ -489,21 +561,16 @@ final class AddLogicalImageTask implements Runnable {
|
||||
}
|
||||
}
|
||||
|
||||
String makeQuery(boolean createVHD, String vhdFilename, String fileMetaAddressStr, String parentPath, String filename) throws TskCoreException {
|
||||
String makeQuery(String vhdFilename, String fileMetaAddressStr, String parentPath, String filename) throws TskCoreException {
|
||||
String query;
|
||||
if (createVHD) {
|
||||
String targetImagePath = Paths.get(dest.toString(), vhdFilename).toString();
|
||||
Long dataSourceObjId = imagePathToObjIdMap.get(targetImagePath);
|
||||
if (dataSourceObjId == null) {
|
||||
throw new TskCoreException(Bundle.AddLogicalImageTask_cannotFindDataSourceObjId(targetImagePath));
|
||||
}
|
||||
query = String.format("data_source_obj_id = '%s' AND meta_addr = '%s' AND name = '%s'", // NON-NLS
|
||||
dataSourceObjId.toString(), fileMetaAddressStr, filename.replace("'", "''"));
|
||||
} else {
|
||||
String newParentPath = "/" + ROOT_STR + "/" + vhdFilename + "/" + parentPath;
|
||||
query = String.format("name = '%s' AND parent_path = '%s'", // NON-NLS
|
||||
filename.replace("'", "''"), newParentPath.replace("'", "''"));
|
||||
String targetImagePath = Paths.get(dest.toString(), vhdFilename).toString();
|
||||
Long dataSourceObjId = imagePathToObjIdMap.get(targetImagePath);
|
||||
if (dataSourceObjId == null) {
|
||||
throw new TskCoreException(Bundle.AddLogicalImageTask_cannotFindDataSourceObjId(targetImagePath));
|
||||
}
|
||||
query = String.format("data_source_obj_id = '%s' AND meta_addr = '%s' AND name = '%s'", // NON-NLS
|
||||
dataSourceObjId.toString(), fileMetaAddressStr, filename.replace("'", "''"));
|
||||
// TODO - findAllFilesWhere should SQL-escape the query
|
||||
return query;
|
||||
}
|
||||
|
||||
|
@ -46,6 +46,9 @@ AddLogicalImageTask.noCurrentCase=No current case
|
||||
# {1} - fields length
|
||||
# {2} - expected length
|
||||
AddLogicalImageTask.notEnoughFields=File does not contain enough fields at line {0}, got {1}, expecting {2}
|
||||
# {0} - file number
|
||||
# {1} - total files
|
||||
AddLogicalImageTask.searchingInterestingFile=Searching for interesting files ({0}/{1})
|
||||
# {0} - imageFilePath
|
||||
AddMultipleImagesTask.adding=Adding: {0}
|
||||
# {0} - file
|
||||
|
@ -24,6 +24,9 @@ PortableCaseReportModule.generateReport.errorCopyingFiles=Error copying tagged f
|
||||
PortableCaseReportModule.generateReport.errorCopyingInterestingFiles=Error copying interesting files
|
||||
PortableCaseReportModule.generateReport.errorCopyingInterestingResults=Error copying interesting results
|
||||
PortableCaseReportModule.generateReport.errorCopyingTags=Error copying tags
|
||||
PortableCaseReportModule.generateReport.errorCreatingImageTagTable=Error creating image tags table
|
||||
PortableCaseReportModule.generateReport.errorCreatingReportFolder=Could not make report folder
|
||||
PortableCaseReportModule.generateReport.errorGeneratingUCOreport=Problem while generating CASE-UCO report
|
||||
# {0} - attribute type name
|
||||
PortableCaseReportModule.generateReport.errorLookingUpAttrType=Error looking up attribute type {0}
|
||||
PortableCaseReportModule.generateReport.errorReadingSets=Error while reading interesting items sets from case database
|
||||
|
@ -770,8 +770,11 @@ final public class ViewFrame extends BorderPane {
|
||||
Notifications.create().owner(getScene().getWindow())
|
||||
.text(Bundle.ViewFrame_pickerListener_errorMessage())
|
||||
.showError();
|
||||
logger.log(Level.SEVERE, "Error responding to date/time picker change.", ex);
|
||||
logger.log(Level.WARNING, "Error responding to date/time picker change.", ex); //NON-NLS
|
||||
} catch (IllegalArgumentException ex ) {
|
||||
logger.log(Level.INFO, "Timeline: User supplied invalid time range."); //NON-NLS
|
||||
}
|
||||
|
||||
Platform.runLater(ViewFrame.this::refreshTimeUI);
|
||||
}
|
||||
}
|
||||
|
35
InternalPythonModules/android/ResultSetIterator.py
Normal file
35
InternalPythonModules/android/ResultSetIterator.py
Normal file
@ -0,0 +1,35 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
class ResultSetIterator(object):
|
||||
"""
|
||||
Generic base class for iterating through database recordms
|
||||
"""
|
||||
|
||||
def __init__(self, result_set):
|
||||
self.result_set = result_set
|
||||
|
||||
def next(self):
|
||||
if self.result_set is None:
|
||||
return False
|
||||
return self.result_set.next()
|
||||
|
||||
def close(self):
|
||||
if self.result_set is not None:
|
||||
self.result_set.close()
|
65
InternalPythonModules/android/TskCallLogsParser.py
Normal file
65
InternalPythonModules/android/TskCallLogsParser.py
Normal file
@ -0,0 +1,65 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
from ResultSetIterator import ResultSetIterator
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import CallMediaType
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import CommunicationDirection
|
||||
from org.sleuthkit.datamodel import Account
|
||||
|
||||
class TskCallLogsParser(ResultSetIterator):
|
||||
"""
|
||||
Generic TSK_CALLLOG artifact template. Each of these methods
|
||||
will contain the extraction and transformation logic for
|
||||
converting raw database records to the expected TSK_CALLLOG
|
||||
format.
|
||||
|
||||
A simple example of data transformation would be computing
|
||||
the end time of a call when the database only supplies the start
|
||||
time and duration.
|
||||
"""
|
||||
|
||||
def __init__(self, result_set):
|
||||
super(TskCallLogsParser, self).__init__(result_set)
|
||||
self._DEFAULT_STRING = ""
|
||||
self._DEFAULT_DIRECTION = CommunicationDirection.UNKNOWN
|
||||
self._DEFAULT_ADDRESS = None
|
||||
self._DEFAULT_CALL_TYPE = CallMediaType.UNKNOWN
|
||||
self._DEFAULT_LONG = -1L
|
||||
|
||||
self.INCOMING_CALL = CommunicationDirection.INCOMING
|
||||
self.OUTGOING_CALL = CommunicationDirection.OUTGOING
|
||||
self.AUDIO_CALL = CallMediaType.AUDIO
|
||||
self.VIDEO_CALL = CallMediaType.VIDEO
|
||||
|
||||
def get_call_direction(self):
|
||||
return self._DEFAULT_DIRECTION
|
||||
|
||||
def get_phone_number_from(self):
|
||||
return self._DEFAULT_ADDRESS
|
||||
|
||||
def get_phone_number_to(self):
|
||||
return self._DEFAULT_ADDRESS
|
||||
|
||||
def get_call_start_date_time(self):
|
||||
return self._DEFAULT_LONG
|
||||
|
||||
def get_call_end_date_time(self):
|
||||
return self._DEFAULT_LONG
|
||||
|
||||
def get_call_type(self):
|
||||
return self._DEFAULT_CALL_TYPE
|
49
InternalPythonModules/android/TskContactsParser.py
Normal file
49
InternalPythonModules/android/TskContactsParser.py
Normal file
@ -0,0 +1,49 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
from ResultSetIterator import ResultSetIterator
|
||||
|
||||
class TskContactsParser(ResultSetIterator):
|
||||
"""
|
||||
Generic TSK_CONTACT artifact template. Each of these methods
|
||||
will contain the extraction and transformation logic for
|
||||
converting raw database records to the expected TSK_CONTACT
|
||||
format.
|
||||
"""
|
||||
|
||||
def __init__(self, result_set):
|
||||
super(TskContactsParser, self).__init__(result_set)
|
||||
self._DEFAULT_VALUE = ""
|
||||
|
||||
def get_account_name(self):
|
||||
return self._DEFAULT_VALUE
|
||||
|
||||
def get_contact_name(self):
|
||||
return self._DEFAULT_VALUE
|
||||
|
||||
def get_phone(self):
|
||||
return self._DEFAULT_VALUE
|
||||
|
||||
def get_home_phone(self):
|
||||
return self._DEFAULT_VALUE
|
||||
|
||||
def get_mobile_phone(self):
|
||||
return self._DEFAULT_VALUE
|
||||
|
||||
def get_email(self):
|
||||
return self._DEFAULT_VALUE
|
73
InternalPythonModules/android/TskMessagesParser.py
Normal file
73
InternalPythonModules/android/TskMessagesParser.py
Normal file
@ -0,0 +1,73 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
from ResultSetIterator import ResultSetIterator
|
||||
from org.sleuthkit.datamodel import Account
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import MessageReadStatus
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import CommunicationDirection
|
||||
|
||||
class TskMessagesParser(ResultSetIterator):
|
||||
"""
|
||||
Generic TSK_MESSAGE artifact template. Each of these methods
|
||||
will contain the extraction and transformation logic for
|
||||
converting raw database records to the expected TSK_MESSAGE
|
||||
format.
|
||||
|
||||
An easy example of such a transformation would be converting
|
||||
message date time from milliseconds to seconds.
|
||||
"""
|
||||
|
||||
def __init__(self, result_set):
|
||||
super(TskMessagesParser, self).__init__(result_set)
|
||||
self._DEFAULT_TEXT = ""
|
||||
self._DEFAULT_LONG = -1L
|
||||
self._DEFAULT_MSG_READ_STATUS = MessageReadStatus.UNKNOWN
|
||||
self._DEFAULT_ACCOUNT_ADDRESS = None
|
||||
self._DEFAULT_COMMUNICATION_DIRECTION = CommunicationDirection.UNKNOWN
|
||||
|
||||
self.INCOMING = CommunicationDirection.INCOMING
|
||||
self.OUTGOING = CommunicationDirection.OUTGOING
|
||||
self.READ = MessageReadStatus.READ
|
||||
self.UNREAD = MessageReadStatus.UNREAD
|
||||
|
||||
def get_message_type(self):
|
||||
return self._DEFAULT_TEXT
|
||||
|
||||
def get_message_direction(self):
|
||||
return self._DEFAULT_COMMUNICATION_DIRECTION
|
||||
|
||||
def get_phone_number_from(self):
|
||||
return self._DEFAULT_ACCOUNT_ADDRESS
|
||||
|
||||
def get_phone_number_to(self):
|
||||
return self._DEFAULT_ACCOUNT_ADDRESS
|
||||
|
||||
def get_message_date_time(self):
|
||||
return self._DEFAULT_LONG
|
||||
|
||||
def get_message_read_status(self):
|
||||
return self._DEFAULT_MSG_READ_STATUS
|
||||
|
||||
def get_message_subject(self):
|
||||
return self._DEFAULT_TEXT
|
||||
|
||||
def get_message_text(self):
|
||||
return self._DEFAULT_TEXT
|
||||
|
||||
def get_thread_id(self):
|
||||
return self._DEFAULT_TEXT
|
@ -26,3 +26,14 @@ class AndroidComponentAnalyzer:
|
||||
# The Analyzer should implement this method
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
raise NotImplementedError
|
||||
|
||||
"""
|
||||
A utility method to append list of attachments to msg body
|
||||
"""
|
||||
def appendAttachmentList(msgBody, attachmentsList):
|
||||
body = msgBody
|
||||
if attachmentsList:
|
||||
body = body + "\n\n------------Attachments------------\n"
|
||||
body = body + "\n".join(attachmentsList)
|
||||
|
||||
return body
|
||||
|
@ -29,6 +29,7 @@ from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from org.apache.commons.codec.binary import Base64
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
|
||||
@ -56,10 +57,14 @@ and adds artifacts to the case.
|
||||
class IMOAnalyzer(general.AndroidComponentAnalyzer):
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
self._PACKAGE_NAME = "com.imo.android.imous"
|
||||
self._PARSER_NAME = "IMO Parser"
|
||||
self._MESSAGE_TYPE = "IMO Message"
|
||||
self._VERSION = "9.8.0"
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
selfAccountAddress = None
|
||||
accountDbs = AppSQLiteDB.findAppDatabases(dataSource, "accountdb.db", True, "com.imo.android.imous")
|
||||
accountDbs = AppSQLiteDB.findAppDatabases(dataSource, "accountdb.db", True, self._PACKAGE_NAME)
|
||||
for accountDb in accountDbs:
|
||||
try:
|
||||
accountResultSet = accountDb.runQuery("SELECT uid, name FROM account")
|
||||
@ -71,16 +76,26 @@ class IMOAnalyzer(general.AndroidComponentAnalyzer):
|
||||
selfAccountAddress = Account.Address(accountResultSet.getString("uid"), accountResultSet.getString("name"))
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.SEVERE, "Error processing query result for account", ex)
|
||||
self._logger.log(Level.WARNING, "Error processing query result for account", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
accountDb.close()
|
||||
|
||||
friendsDbs = AppSQLiteDB.findAppDatabases(dataSource, "imofriends.db", True, "com.imo.android.imous")
|
||||
friendsDbs = AppSQLiteDB.findAppDatabases(dataSource, "imofriends.db", True, self._PACKAGE_NAME)
|
||||
for friendsDb in friendsDbs:
|
||||
try:
|
||||
friendsDBHelper = CommunicationArtifactsHelper(Case.getCurrentCase().getSleuthkitCase(),
|
||||
"IMO Parser", friendsDb.getDBFile(),
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
if selfAccountAddress is not None:
|
||||
friendsDBHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._PARSER_NAME,
|
||||
friendsDb.getDBFile(),
|
||||
Account.Type.IMO, Account.Type.IMO, selfAccountAddress )
|
||||
else:
|
||||
friendsDBHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._PARSER_NAME,
|
||||
friendsDb.getDBFile(),
|
||||
Account.Type.IMO
|
||||
)
|
||||
contactsResultSet = friendsDb.runQuery("SELECT buid, name FROM friends")
|
||||
if contactsResultSet is not None:
|
||||
while contactsResultSet.next():
|
||||
@ -121,7 +136,7 @@ class IMOAnalyzer(general.AndroidComponentAnalyzer):
|
||||
|
||||
|
||||
messageArtifact = friendsDBHelper.addMessage(
|
||||
"IMO Message",
|
||||
self._MESSAGE_TYPE,
|
||||
direction,
|
||||
fromAddress,
|
||||
toAddress,
|
||||
@ -138,8 +153,14 @@ class IMOAnalyzer(general.AndroidComponentAnalyzer):
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query result for IMO friends", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except (TskCoreException, BlackboardException) as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to message artifacts.", ex)
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add IMO message artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except NoCurrentCaseException as ex:
|
||||
self._logger.log(Level.WARNING, "No case currently open.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
friendsDb.close()
|
||||
|
93
InternalPythonModules/android/installedapps.py
Normal file
93
InternalPythonModules/android/installedapps.py
Normal file
@ -0,0 +1,93 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
from java.io import File
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.lang import Long
|
||||
from java.lang import String
|
||||
from java.sql import ResultSet
|
||||
from java.sql import SQLException
|
||||
from java.sql import Statement
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from org.apache.commons.codec.binary import Base64
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
from org.sleuthkit.datamodel.Blackboard import BlackboardException
|
||||
from org.sleuthkit.datamodel.blackboardutils import ArtifactsHelper
|
||||
|
||||
import traceback
|
||||
import general
|
||||
|
||||
"""
|
||||
Finds the SQLite DB for Android installed applications, parses the DB,
|
||||
and adds artifacts to the case.
|
||||
"""
|
||||
class InstalledApplicationsAnalyzer(general.AndroidComponentAnalyzer):
|
||||
|
||||
moduleName = "Android Installed Applications Analyzer"
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
self._PACKAGE_NAME = "com.android.vending"
|
||||
self._MODULE_NAME = "Android Installed Applications Analyzer"
|
||||
self._VERSION = "5.1.1" ## Android version
|
||||
self.current_case = None
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
libraryDbs = AppSQLiteDB.findAppDatabases(dataSource, "library.db", True, self._PACKAGE_NAME)
|
||||
for libraryDb in libraryDbs:
|
||||
try:
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
libraryDbHelper = ArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self.moduleName, libraryDb.getDBFile())
|
||||
queryString = "SELECT doc_id, purchase_time FROM ownership"
|
||||
ownershipResultSet = libraryDb.runQuery(queryString)
|
||||
if ownershipResultSet is not None:
|
||||
while ownershipResultSet.next():
|
||||
purchase_time = ownershipResultSet.getLong("purchase_time") / 1000
|
||||
libraryDbHelper.addInstalledProgram(ownershipResultSet.getString("doc_id"),
|
||||
purchase_time)
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query result for installed applications. ", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to adding installed application artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except NoCurrentCaseException as ex:
|
||||
self._logger.log(Level.WARNING, "No case currently open.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
libraryDb.close()
|
||||
|
386
InternalPythonModules/android/line.py
Normal file
386
InternalPythonModules/android/line.py
Normal file
@ -0,0 +1,386 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
from java.io import File
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.lang import Long
|
||||
from java.lang import String
|
||||
from java.sql import ResultSet
|
||||
from java.sql import SQLException
|
||||
from java.sql import Statement
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from org.apache.commons.codec.binary import Base64
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
|
||||
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
from org.sleuthkit.datamodel.Blackboard import BlackboardException
|
||||
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
|
||||
from org.sleuthkit.datamodel import Account
|
||||
from org.sleuthkit.datamodel.blackboardutils import CommunicationArtifactsHelper
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import MessageReadStatus
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import CommunicationDirection
|
||||
from TskContactsParser import TskContactsParser
|
||||
from TskMessagesParser import TskMessagesParser
|
||||
from TskCallLogsParser import TskCallLogsParser
|
||||
|
||||
import traceback
|
||||
import general
|
||||
|
||||
class LineAnalyzer(general.AndroidComponentAnalyzer):
|
||||
"""
|
||||
Parses the Line App databases for TSK contacts & message artifacts.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
self._LINE_PACKAGE_NAME = "jp.naver.line.android"
|
||||
self._PARSER_NAME = "Line Parser"
|
||||
self._VERSION = "9.15.1"
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
try:
|
||||
contact_and_message_dbs = AppSQLiteDB.findAppDatabases(dataSource,
|
||||
"naver_line", True, self._LINE_PACKAGE_NAME)
|
||||
calllog_dbs = AppSQLiteDB.findAppDatabases(dataSource,
|
||||
"call_history", True, self._LINE_PACKAGE_NAME)
|
||||
|
||||
for contact_and_message_db in contact_and_message_dbs:
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
helper = CommunicationArtifactsHelper(
|
||||
current_case.getSleuthkitCase(), self._PARSER_NAME,
|
||||
contact_and_message_db.getDBFile(), Account.Type.LINE)
|
||||
self.parse_contacts(contact_and_message_db, helper)
|
||||
self.parse_messages(contact_and_message_db, helper)
|
||||
|
||||
for calllog_db in calllog_dbs:
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
helper = CommunicationArtifactsHelper(
|
||||
current_case.getSleuthkitCase(), self._PARSER_NAME,
|
||||
calllog_db.getDBFile(), Account.Type.LINE)
|
||||
self.parse_calllogs(dataSource, calllog_db, helper)
|
||||
|
||||
except NoCurrentCaseException as ex:
|
||||
# Error parsing Line databases.
|
||||
self._logger.log(Level.WARNING, "Error parsing the Line App Databases", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
for contact_and_message_db in contact_and_message_dbs:
|
||||
contact_and_message_db.close()
|
||||
|
||||
for calllog_db in calllog_dbs:
|
||||
calllog_db.close()
|
||||
|
||||
def parse_contacts(self, contacts_db, helper):
|
||||
try:
|
||||
contacts_parser = LineContactsParser(contacts_db)
|
||||
while contacts_parser.next():
|
||||
helper.addContact(
|
||||
contacts_parser.get_account_name(),
|
||||
contacts_parser.get_contact_name(),
|
||||
contacts_parser.get_phone(),
|
||||
contacts_parser.get_home_phone(),
|
||||
contacts_parser.get_mobile_phone(),
|
||||
contacts_parser.get_email()
|
||||
)
|
||||
contacts_parser.close()
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error parsing the Line App Database for contacts", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
#Error adding artifact to case database... case is not complete.
|
||||
self._logger.log(Level.SEVERE,
|
||||
"Error adding Line contact artifacts to the case database.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
#Error posting notification to blackboard
|
||||
self._logger.log(Level.WARNING,
|
||||
"Error posting Line contact artifacts to blackboard.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
def parse_calllogs(self, dataSource, calllogs_db, helper):
|
||||
try:
|
||||
calllogs_db.attachDatabase(
|
||||
dataSource, "naver_line",
|
||||
calllogs_db.getDBFile().getParentPath(), "naver")
|
||||
|
||||
calllog_parser = LineCallLogsParser(calllogs_db)
|
||||
while calllog_parser.next():
|
||||
helper.addCalllog(
|
||||
calllog_parser.get_call_direction(),
|
||||
calllog_parser.get_phone_number_from(),
|
||||
calllog_parser.get_phone_number_to(),
|
||||
calllog_parser.get_call_start_date_time(),
|
||||
calllog_parser.get_call_end_date_time(),
|
||||
calllog_parser.get_call_type()
|
||||
)
|
||||
calllog_parser.close()
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error parsing the Line App Database for calllogs", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
#Error adding artifact to case database... case is not complete.
|
||||
self._logger.log(Level.SEVERE,
|
||||
"Error adding Line calllog artifacts to the case database.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
#Error posting notification to blackboard
|
||||
self._logger.log(Level.WARNING,
|
||||
"Error posting Line calllog artifacts to blackboard.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
def parse_messages(self, messages_db, helper):
|
||||
try:
|
||||
|
||||
messages_parser = LineMessagesParser(messages_db)
|
||||
while messages_parser.next():
|
||||
helper.addMessage(
|
||||
messages_parser.get_message_type(),
|
||||
messages_parser.get_message_direction(),
|
||||
messages_parser.get_phone_number_from(),
|
||||
messages_parser.get_phone_number_to(),
|
||||
messages_parser.get_message_date_time(),
|
||||
messages_parser.get_message_read_status(),
|
||||
messages_parser.get_message_subject(),
|
||||
messages_parser.get_message_text(),
|
||||
messages_parser.get_thread_id()
|
||||
)
|
||||
messages_parser.close()
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error parsing the Line App Database for messages.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
#Error adding artifact to case database... case is not complete.
|
||||
self._logger.log(Level.SEVERE,
|
||||
"Error adding Line message artifacts to the case database.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
#Error posting notification to blackboard
|
||||
self._logger.log(Level.WARNING,
|
||||
"Error posting Line message artifacts to blackboard.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
class LineCallLogsParser(TskCallLogsParser):
|
||||
"""
|
||||
Parses out TSK_CALLLOG information from the Line database.
|
||||
TSK_CALLLOG fields that are not in the line database are given
|
||||
a default value inherited from the super class.
|
||||
"""
|
||||
|
||||
def __init__(self, calllog_db):
|
||||
super(LineCallLogsParser, self).__init__(calllog_db.runQuery(
|
||||
"""
|
||||
SELECT substr(CallH.call_type, -1) AS direction,
|
||||
CallH.start_time AS start_time,
|
||||
CallH.end_time AS end_time,
|
||||
ConT.server_name AS name,
|
||||
CallH.voip_type AS call_type,
|
||||
ConT.m_id
|
||||
FROM call_history AS CallH
|
||||
JOIN naver.contacts AS ConT
|
||||
ON CallH.caller_mid = ConT.m_id
|
||||
"""
|
||||
)
|
||||
)
|
||||
self._OUTGOING_CALL_TYPE = "O"
|
||||
self._INCOMING_CALL_TYPE = "I"
|
||||
self._VIDEO_CALL_TYPE = "V"
|
||||
self._AUDIO_CALL_TYPE = "A"
|
||||
|
||||
def get_call_direction(self):
|
||||
direction = self.result_set.getString("direction")
|
||||
if direction == self._OUTGOING_CALL_TYPE:
|
||||
return self.OUTGOING_CALL
|
||||
return self.INCOMING_CALL
|
||||
|
||||
def get_call_start_date_time(self):
|
||||
try:
|
||||
return long(self.result_set.getString("start_time")) / 1000
|
||||
except ValueError as ve:
|
||||
return super(LineCallLogsParser, self).get_call_start_date_time()
|
||||
|
||||
def get_call_end_date_time(self):
|
||||
try:
|
||||
return long(self.result_set.getString("end_time")) / 1000
|
||||
except ValueError as ve:
|
||||
return super(LineCallLogsParser, self).get_call_end_date_time()
|
||||
|
||||
def get_phone_number_to(self):
|
||||
if self.get_call_direction() == self.OUTGOING_CALL:
|
||||
return Account.Address(self.result_set.getString("m_id"),
|
||||
self.result_set.getString("name"))
|
||||
return super(LineCallLogsParser, self).get_phone_number_to()
|
||||
|
||||
def get_phone_number_from(self):
|
||||
if self.get_call_direction() == self.INCOMING_CALL:
|
||||
return Account.Address(self.result_set.getString("m_id"),
|
||||
self.result_set.getString("name"))
|
||||
return super(LineCallLogsParser, self).get_phone_number_from()
|
||||
|
||||
def get_call_type(self):
|
||||
if self.result_set.getString("call_type") == self._VIDEO_CALL_TYPE:
|
||||
return self.VIDEO_CALL
|
||||
if self.result_set.getString("call_type") == self._AUDIO_CALL_TYPE:
|
||||
return self.AUDIO_CALL
|
||||
return super(LineCallLogsParser, self).get_call_type()
|
||||
|
||||
class LineContactsParser(TskContactsParser):
|
||||
"""
|
||||
Parses out TSK_CONTACT information from the Line database.
|
||||
TSK_CONTACT fields that are not in the line database are given
|
||||
a default value inherited from the super class.
|
||||
"""
|
||||
|
||||
def __init__(self, contact_db):
|
||||
super(LineContactsParser, self).__init__(contact_db.runQuery(
|
||||
"""
|
||||
SELECT m_id,
|
||||
server_name
|
||||
FROM contacts
|
||||
"""
|
||||
)
|
||||
)
|
||||
def get_account_name(self):
|
||||
return self.result_set.getString("m_id")
|
||||
|
||||
def get_contact_name(self):
|
||||
return self.result_set.getString("server_name")
|
||||
|
||||
class LineMessagesParser(TskMessagesParser):
|
||||
"""
|
||||
Parse out TSK_MESSAGE information from the Line database.
|
||||
TSK_MESSAGE fields that are not in the line database are given
|
||||
a default value inherited from the super class.
|
||||
"""
|
||||
|
||||
def __init__(self, message_db):
|
||||
super(LineMessagesParser, self).__init__(message_db.runQuery(
|
||||
"""
|
||||
SELECT contact_list_with_groups.name,
|
||||
contact_list_with_groups.id,
|
||||
contact_list_with_groups.members,
|
||||
contact_list_with_groups.member_names,
|
||||
CH.from_mid,
|
||||
C.server_name AS from_name,
|
||||
CH.content,
|
||||
CH.created_time,
|
||||
CH.attachement_type,
|
||||
CH.attachement_local_uri,
|
||||
CH.status
|
||||
FROM (SELECT G.name,
|
||||
group_members.id,
|
||||
group_members.members,
|
||||
group_members.member_names
|
||||
FROM (SELECT id,
|
||||
group_concat(M.m_id) AS members,
|
||||
group_concat(replace(C.server_name,
|
||||
",",
|
||||
"")) as member_names
|
||||
FROM membership AS M
|
||||
JOIN contacts as C
|
||||
ON M.m_id = C.m_id
|
||||
GROUP BY id) AS group_members
|
||||
JOIN groups AS G
|
||||
ON G.id = group_members.id
|
||||
UNION
|
||||
SELECT server_name,
|
||||
m_id,
|
||||
NULL,
|
||||
NULL
|
||||
FROM contacts) AS contact_list_with_groups
|
||||
JOIN chat_history AS CH
|
||||
ON CH.chat_id = contact_list_with_groups.id
|
||||
LEFT JOIN contacts as C
|
||||
ON C.m_id = CH.from_mid
|
||||
WHERE attachement_type != 6
|
||||
"""
|
||||
)
|
||||
)
|
||||
self._LINE_MESSAGE_TYPE = "Line Message"
|
||||
#From the limited test data, it appeared that incoming
|
||||
#was only associated with a 1 status. Status # 3 and 7
|
||||
#was only associated with outgoing.
|
||||
self._INCOMING_MESSAGE_TYPE = 1
|
||||
|
||||
def get_message_type(self):
|
||||
return self._LINE_MESSAGE_TYPE
|
||||
|
||||
def get_message_date_time(self):
|
||||
created_time = self.result_set.getString("created_time")
|
||||
try:
|
||||
#Get time in seconds (created_time is stored in ms from epoch)
|
||||
return long(created_time) / 1000
|
||||
except ValueError as ve:
|
||||
return super(LineMessagesParser, self).get_message_date_time()
|
||||
|
||||
def get_message_text(self):
|
||||
content = self.result_set.getString("content")
|
||||
attachment_uri = self.result_set.getString("attachement_local_uri")
|
||||
if attachment_uri is not None and content is not None:
|
||||
return general.appendAttachmentList(content, [attachment_uri])
|
||||
elif attachment_uri is not None and content is None:
|
||||
return general.appendAttachmentList("", [attachment_uri])
|
||||
return content
|
||||
|
||||
def get_message_direction(self):
|
||||
if self.result_set.getInt("status") == self._INCOMING_MESSAGE_TYPE:
|
||||
return self.INCOMING
|
||||
return self.OUTGOING
|
||||
|
||||
def get_phone_number_from(self):
|
||||
if self.get_message_direction() == self.INCOMING:
|
||||
from_mid = self.result_set.getString("from_mid")
|
||||
if from_mid is not None:
|
||||
return Account.Address(from_mid,
|
||||
self.result_set.getString("from_name"))
|
||||
return super(LineMessagesParser, self).get_phone_number_from()
|
||||
|
||||
def get_phone_number_to(self):
|
||||
if self.get_message_direction() == self.OUTGOING:
|
||||
group = self.result_set.getString("members")
|
||||
if group is not None:
|
||||
group = group.split(",")
|
||||
names = self.result_set.getString("member_names").split(",")
|
||||
|
||||
recipients = []
|
||||
|
||||
for recipient_id, recipient_name in zip(group, names):
|
||||
recipients.append(Account.Address(recipient_id, recipient_name))
|
||||
|
||||
return recipients
|
||||
|
||||
return Account.Address(self.result_set.getString("id"),
|
||||
self.result_set.getString("name"))
|
||||
|
||||
return super(LineMessagesParser, self).get_phone_number_to()
|
||||
|
||||
def get_thread_id(self):
|
||||
members = self.result_set.getString("members")
|
||||
if members is not None:
|
||||
return self.result_set.getString("id")
|
||||
return super(LineMessagesParser, self).get_thread_id()
|
@ -47,8 +47,21 @@ import tangomessage
|
||||
import textmessage
|
||||
import wwfmessage
|
||||
import imo
|
||||
import xender
|
||||
import zapya
|
||||
import shareit
|
||||
import viber
|
||||
import skype
|
||||
import line
|
||||
import whatsapp
|
||||
import textnow
|
||||
import sbrowser
|
||||
import operabrowser
|
||||
import oruxmaps
|
||||
import installedapps
|
||||
import fbmessenger
|
||||
|
||||
|
||||
class AndroidModuleFactory(IngestModuleFactoryAdapter):
|
||||
|
||||
moduleName = general.MODULE_NAME
|
||||
@ -93,7 +106,13 @@ class AndroidIngestModule(DataSourceIngestModule):
|
||||
tangomessage.TangoMessageAnalyzer(), wwfmessage.WWFMessageAnalyzer(),
|
||||
googlemaplocation.GoogleMapLocationAnalyzer(), browserlocation.BrowserLocationAnalyzer(),
|
||||
cachelocation.CacheLocationAnalyzer(), imo.IMOAnalyzer(),
|
||||
fbmessenger.FBMessengerAnalyzer()]
|
||||
xender.XenderAnalyzer(), zapya.ZapyaAnalyzer(), shareit.ShareItAnalyzer(),
|
||||
line.LineAnalyzer(), whatsapp.WhatsAppAnalyzer(),
|
||||
textnow.TextNowAnalyzer(), skype.SkypeAnalyzer(), viber.ViberAnalyzer(),
|
||||
fbmessenger.FBMessengerAnalyzer()
|
||||
sbrowser.SBrowserAnalyzer(), operabrowser.OperaAnalyzer(),
|
||||
oruxmaps.OruxMapsAnalyzer(),
|
||||
installedapps.InstalledApplicationsAnalyzer()]
|
||||
self.log(Level.INFO, "running " + str(len(analyzers)) + " analyzers")
|
||||
progressBar.switchToDeterminate(len(analyzers))
|
||||
|
||||
|
240
InternalPythonModules/android/operabrowser.py
Normal file
240
InternalPythonModules/android/operabrowser.py
Normal file
@ -0,0 +1,240 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
from java.io import File
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.lang import Long
|
||||
from java.lang import String
|
||||
from java.sql import ResultSet
|
||||
from java.sql import SQLException
|
||||
from java.sql import Statement
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from org.apache.commons.codec.binary import Base64
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
from org.sleuthkit.datamodel.Blackboard import BlackboardException
|
||||
from org.sleuthkit.datamodel.blackboardutils import WebBrowserArtifactsHelper
|
||||
|
||||
import traceback
|
||||
import general
|
||||
|
||||
"""
|
||||
Finds the SQLite DB for Opera browser, parses the DB for Bookmarks, Cookies, Web History
|
||||
and adds artifacts to the case.
|
||||
"""
|
||||
class OperaAnalyzer(general.AndroidComponentAnalyzer):
|
||||
|
||||
moduleName = "Opera Parser"
|
||||
progName = "Opera"
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
self._PACKAGE_NAME = "com.opera.browser"
|
||||
self._MODULE_NAME = "Opera Analyzer"
|
||||
self._PROGRAM_NAME = "Opera"
|
||||
self._VERSION = "53.1.2569"
|
||||
self.current_case = None
|
||||
|
||||
def analyzeCookies(self, dataSource, fileManager, context):
|
||||
cookiesDbs = AppSQLiteDB.findAppDatabases(dataSource, "Cookies", True, self._PACKAGE_NAME)
|
||||
for cookiesDb in cookiesDbs:
|
||||
try:
|
||||
cookiesDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self.moduleName, cookiesDb.getDBFile())
|
||||
cookiesResultSet = cookiesDb.runQuery("SELECT host_key, name, value, creation_utc FROM cookies")
|
||||
if cookiesResultSet is not None:
|
||||
while cookiesResultSet.next():
|
||||
createTime = cookiesResultSet.getLong("creation_utc") / 1000000 - 11644473600 # Webkit time
|
||||
cookiesDbHelper.addWebCookie( cookiesResultSet.getString("host_key"),
|
||||
createTime,
|
||||
cookiesResultSet.getString("name"),
|
||||
cookiesResultSet.getString("value"),
|
||||
self._PROGRAM_NAME)
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query results for Opera cookies.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add Opera cookie artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
cookiesDb.close()
|
||||
|
||||
|
||||
|
||||
def analyzeHistory(self, dataSource, fileManager, context):
|
||||
historyDbs = AppSQLiteDB.findAppDatabases(dataSource, "History", True, self._PACKAGE_NAME)
|
||||
for historyDb in historyDbs:
|
||||
try:
|
||||
historyDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self.moduleName, historyDb.getDBFile())
|
||||
historyResultSet = historyDb.runQuery("SELECT url, title, last_visit_time FROM urls")
|
||||
if historyResultSet is not None:
|
||||
while historyResultSet.next():
|
||||
accessTime = historyResultSet.getLong("last_visit_time") / 1000000 - 11644473600
|
||||
historyDbHelper.addWebHistory( historyResultSet.getString("url"),
|
||||
accessTime,
|
||||
"", # referrer
|
||||
historyResultSet.getString("title"),
|
||||
self._PROGRAM_NAME)
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query results for Opera history.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add Opera history artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
historyDb.close()
|
||||
|
||||
|
||||
|
||||
def analyzeDownloads(self, dataSource, fileManager, context):
|
||||
downloadsDbs = AppSQLiteDB.findAppDatabases(dataSource, "History", True, self._PACKAGE_NAME)
|
||||
for downloadsDb in downloadsDbs:
|
||||
try:
|
||||
downloadsDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self.moduleName, downloadsDb.getDBFile())
|
||||
queryString = "SELECT target_path, start_time, url FROM downloads"\
|
||||
" INNER JOIN downloads_url_chains ON downloads.id = downloads_url_chains.id"
|
||||
downloadsResultSet = downloadsDb.runQuery(queryString)
|
||||
if downloadsResultSet is not None:
|
||||
while downloadsResultSet.next():
|
||||
startTime = historyResultSet.getLong("start_time") / 1000000 - 11644473600 #Webkit time format
|
||||
downloadsDbHelper.addWebDownload( downloadsResultSet.getString("target_path"),
|
||||
startTime,
|
||||
downloadsResultSet.getString("url"),
|
||||
self._PROGRAM_NAME)
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query results for Opera downloads.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add Opera download artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
downloadsDb.close()
|
||||
|
||||
def analyzeAutofill(self, dataSource, fileManager, context):
|
||||
autofillDbs = AppSQLiteDB.findAppDatabases(dataSource, "Web Data", True, self._PACKAGE_NAME)
|
||||
for autofillDb in autofillDbs:
|
||||
try:
|
||||
autofillDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self.moduleName, autofillDb.getDBFile())
|
||||
autofillsResultSet = autofillDb.runQuery("SELECT name, value, count, date_created FROM autofill")
|
||||
if autofillsResultSet is not None:
|
||||
while autofillsResultSet.next():
|
||||
creationTime = autofillsResultSet.getLong("date_created") / 1000000 - 11644473600 #Webkit time format
|
||||
autofillDbHelper.addWebFormAutofill( autofillsResultSet.getString("name"),
|
||||
autofillsResultSet.getString("value"),
|
||||
creationTime,
|
||||
0,
|
||||
autofillsResultSet.getInt("count"))
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query results for Opera autofill.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add Opera autofill artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
autofillDb.close()
|
||||
|
||||
def analyzeWebFormAddress(self, dataSource, fileManager, context):
|
||||
webFormAddressDbs = AppSQLiteDB.findAppDatabases(dataSource, "Web Data", True, self._PACKAGE_NAME)
|
||||
for webFormAddressDb in webFormAddressDbs:
|
||||
try:
|
||||
webFormAddressDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self.moduleName, webFormAddressDb.getDBFile())
|
||||
queryString = "SELECT street_address, city, state, zipcode, country_code, date_modified, first_name, last_name, number, email FROM autofill_profiles "\
|
||||
" INNER JOIN autofill_profile_names"\
|
||||
" ON autofill_profiles.guid = autofill_profile_names.guid"\
|
||||
" INNER JOIN autofill_profile_phones"\
|
||||
" ON autofill_profiles.guid = autofill_profile_phones.guid"\
|
||||
" INNER JOIN autofill_profile_emails"\
|
||||
" ON autofill_profiles.guid = autofill_profile_emails.guid"
|
||||
webFormAddressResultSet = webFormAddressDb.runQuery(queryString)
|
||||
if webFormAddressResultSet is not None:
|
||||
while webFormAddressResultSet.next():
|
||||
personName = webFormAddressResultSet.getString("first_name") + " " + webFormAddressResultSet.getString("last_name")
|
||||
address = '\n'.join([ webFormAddressResultSet.getString("street_address"),
|
||||
webFormAddressResultSet.getString("city"),
|
||||
webFormAddressResultSet.getString("state") + " " + webFormAddressResultSet.getString("zipcode"),
|
||||
webFormAddressResultSet.getString("country_code") ])
|
||||
|
||||
creationTime = webFormAddressResultSet.getLong("date_modified") / 1000000 - 11644473600
|
||||
autofillDbHelper.addWebFormAddress( personName,
|
||||
webFormAddressResultSet.getString("email"),
|
||||
webFormAddressResultSet.getString("number"),
|
||||
address,
|
||||
creationTime,
|
||||
0,
|
||||
0)
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query results for Opera web form addresses.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add Opera form address artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
webFormAddressDb.close()
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
|
||||
## open current case
|
||||
try:
|
||||
self.current_case = Case.getCurrentCaseThrows()
|
||||
except NoCurrentCaseException as ex:
|
||||
self._logger.log(Level.WARNING, "No case currently open.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
return
|
||||
|
||||
self.analyzeCookies(dataSource, fileManager, context)
|
||||
self.analyzeHistory(dataSource, fileManager, context)
|
||||
self.analyzeDownloads(dataSource, fileManager, context)
|
||||
self.analyzeAutofill(dataSource, fileManager, context)
|
||||
self.analyzeWebFormAddress(dataSource, fileManager, context)
|
||||
|
108
InternalPythonModules/android/oruxmaps.py
Normal file
108
InternalPythonModules/android/oruxmaps.py
Normal file
@ -0,0 +1,108 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2016-2018 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
from java.io import File
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.lang import Double
|
||||
from java.lang import Long
|
||||
from java.sql import Connection
|
||||
from java.sql import DriverManager
|
||||
from java.sql import ResultSet
|
||||
from java.sql import SQLException
|
||||
from java.sql import Statement
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
|
||||
from org.sleuthkit.autopsy.casemodule.services import FileManager
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import Blackboard
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
from org.sleuthkit.datamodel.Blackboard import BlackboardException
|
||||
from org.sleuthkit.datamodel.blackboardutils import ArtifactsHelper
|
||||
|
||||
import traceback
|
||||
import general
|
||||
|
||||
"""
|
||||
Analyzes database created by ORUX Maps.
|
||||
"""
|
||||
class OruxMapsAnalyzer(general.AndroidComponentAnalyzer):
|
||||
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
self._PACKAGE_NAME = "oruxmaps"
|
||||
self._MODULE_NAME = "OruxMaps Analyzer"
|
||||
self._PROGRAM_NAME = "OruxMaps"
|
||||
self._VERSION = "7.5.7"
|
||||
self.current_case = None
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
oruxMapsTrackpointsDbs = AppSQLiteDB.findAppDatabases(dataSource, "oruxmapstracks.db", True, self._PACKAGE_NAME)
|
||||
for oruxMapsTrackpointsDb in oruxMapsTrackpointsDbs:
|
||||
try:
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
oruxDbHelper = ArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, oruxMapsTrackpointsDb.getDBFile())
|
||||
|
||||
poiQueryString = "SELECT poilat, poilon, poitime, poiname FROM pois"
|
||||
poisResultSet = oruxMapsTrackpointsDb.runQuery(poiQueryString)
|
||||
if poisResultSet is not None:
|
||||
while poisResultSet.next():
|
||||
oruxDbHelper.addGPSLocation(
|
||||
poisResultSet.getDouble("poilat"),
|
||||
poisResultSet.getDouble("poilon"),
|
||||
poisResultSet.getLong("poitime") / 1000, # milliseconds since unix epoch
|
||||
poisResultSet.getString("poiname"),
|
||||
self._PROGRAM_NAME)
|
||||
|
||||
trackpointsQueryString = "SELECT trkptlat, trkptlon, trkpttime FROM trackpoints"
|
||||
trackpointsResultSet = oruxMapsTrackpointsDb.runQuery(trackpointsQueryString)
|
||||
if trackpointsResultSet is not None:
|
||||
while trackpointsResultSet.next():
|
||||
oruxDbHelper.addGPSLocation(
|
||||
trackpointsResultSet.getDouble("trkptlat"),
|
||||
trackpointsResultSet.getDouble("trkptlon"),
|
||||
trackpointsResultSet.getLong("trkpttime") / 1000, # milliseconds since unix epoch
|
||||
"",
|
||||
self._PROGRAM_NAME)
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query result for Orux Map trackpoints.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add Orux Map trackpoint artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except NoCurrentCaseException as ex:
|
||||
self._logger.log(Level.WARNING, "No case currently open.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
oruxMapsTrackpointsDb.close()
|
266
InternalPythonModules/android/sbrowser.py
Normal file
266
InternalPythonModules/android/sbrowser.py
Normal file
@ -0,0 +1,266 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
from java.io import File
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.lang import Long
|
||||
from java.lang import String
|
||||
from java.sql import ResultSet
|
||||
from java.sql import SQLException
|
||||
from java.sql import Statement
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from org.apache.commons.codec.binary import Base64
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
from org.sleuthkit.datamodel.Blackboard import BlackboardException
|
||||
from org.sleuthkit.datamodel.blackboardutils import WebBrowserArtifactsHelper
|
||||
|
||||
import traceback
|
||||
import general
|
||||
|
||||
"""
|
||||
Finds the SQLite DB for S-Browser, parses the DB for Bookmarks, Cookies, Web History
|
||||
and adds artifacts to the case.
|
||||
"""
|
||||
class SBrowserAnalyzer(general.AndroidComponentAnalyzer):
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
self._PACKAGE_NAME = "com.sec.android.app.sbrowser"
|
||||
self._MODULE_NAME = "SBrowser Analyzer"
|
||||
self._PROGRAM_NAME = "SBrowser"
|
||||
self._VERSION = "10.1.00.27"
|
||||
self.current_case = None
|
||||
|
||||
def analyzeBookmarks(self, dataSource, fileManager, context):
|
||||
sbrowserDbs = AppSQLiteDB.findAppDatabases(dataSource, "sbrowser.db", True, self._PACKAGE_NAME)
|
||||
for sbrowserDb in sbrowserDbs:
|
||||
try:
|
||||
sbrowserDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, sbrowserDb.getDBFile())
|
||||
bookmarkResultSet = sbrowserDb.runQuery("SELECT url, title, created FROM bookmarks WHERE url IS NOT NULL")
|
||||
if bookmarkResultSet is not None:
|
||||
while bookmarkResultSet.next():
|
||||
createTime = bookmarkResultSet.getLong("created") / 1000
|
||||
sbrowserDbHelper.addWebBookmark( bookmarkResultSet.getString("url"),
|
||||
bookmarkResultSet.getString("title"),
|
||||
createTime,
|
||||
self._PROGRAM_NAME)
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query results for SBrowser bookmarks.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add SBrowser bookmark artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
sbrowserDb.close()
|
||||
|
||||
|
||||
|
||||
def analyzeCookies(self, dataSource, fileManager, context):
|
||||
cookiesDbs = AppSQLiteDB.findAppDatabases(dataSource, "Cookies", True, self._PACKAGE_NAME)
|
||||
for cookiesDb in cookiesDbs:
|
||||
try:
|
||||
cookiesDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, cookiesDb.getDBFile())
|
||||
cookiesResultSet = cookiesDb.runQuery("SELECT host_key, name, value, creation_utc FROM cookies")
|
||||
if cookiesResultSet is not None:
|
||||
while cookiesResultSet.next():
|
||||
createTime = cookiesResultSet.getLong("creation_utc") / 1000000 - 11644473600 # Webkit time
|
||||
cookiesDbHelper.addWebCookie( cookiesResultSet.getString("host_key"),
|
||||
createTime,
|
||||
cookiesResultSet.getString("name"),
|
||||
cookiesResultSet.getString("value"),
|
||||
self._PROGRAM_NAME)
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query results for SBrowser cookies.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add SBrowser cookie artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
cookiesDb.close()
|
||||
|
||||
|
||||
|
||||
def analyzeHistory(self, dataSource, fileManager, context):
|
||||
historyDbs = AppSQLiteDB.findAppDatabases(dataSource, "History", True, self._PACKAGE_NAME)
|
||||
for historyDb in historyDbs:
|
||||
try:
|
||||
historyDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, historyDb.getDBFile())
|
||||
historyResultSet = historyDb.runQuery("SELECT url, title, last_visit_time FROM urls")
|
||||
if historyResultSet is not None:
|
||||
while historyResultSet.next():
|
||||
accessTime = historyResultSet.getLong("last_visit_time") / 1000000 - 11644473600 # Webkit time
|
||||
historyDbHelper.addWebHistory( historyResultSet.getString("url"),
|
||||
accessTime,
|
||||
"", # referrer
|
||||
historyResultSet.getString("title"),
|
||||
self._PROGRAM_NAME)
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query results for SBrowser history.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add SBrowser history artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
historyDb.close()
|
||||
|
||||
|
||||
|
||||
def analyzeDownloads(self, dataSource, fileManager, context):
|
||||
downloadsDbs = AppSQLiteDB.findAppDatabases(dataSource, "History", True, self._PACKAGE_NAME)
|
||||
for downloadsDb in downloadsDbs:
|
||||
try:
|
||||
downloadsDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, downloadsDb.getDBFile())
|
||||
queryString = "SELECT target_path, start_time, url FROM downloads"\
|
||||
" INNER JOIN downloads_url_chains ON downloads.id = downloads_url_chains.id"
|
||||
downloadsResultSet = downloadsDb.runQuery(queryString)
|
||||
if downloadsResultSet is not None:
|
||||
while downloadsResultSet.next():
|
||||
startTime = historyResultSet.getLong("start_time") / 1000000 - 11644473600 # Webkit time
|
||||
downloadsDbHelper.addWebDownload( downloadsResultSet.getString("target_path"),
|
||||
startTime,
|
||||
downloadsResultSet.getString("url"),
|
||||
self._PROGRAM_NAME)
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query results for SBrowser downloads.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add SBrowser download artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
downloadsDb.close()
|
||||
|
||||
def analyzeAutofill(self, dataSource, fileManager, context):
|
||||
autofillDbs = AppSQLiteDB.findAppDatabases(dataSource, "Web Data", True, self._PACKAGE_NAME)
|
||||
for autofillDb in autofillDbs:
|
||||
try:
|
||||
autofillDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, autofillDb.getDBFile())
|
||||
autofillsResultSet = autofillDb.runQuery("SELECT name, value, count, date_created FROM autofill INNER JOIN autofill_dates ON autofill.pair_id = autofill_dates.pair_id")
|
||||
if autofillsResultSet is not None:
|
||||
while autofillsResultSet.next():
|
||||
creationTime = autofillsResultSet.getLong("date_created") / 1000000 - 11644473600 # Webkit time
|
||||
autofillDbHelper.addWebFormAutofill( autofillsResultSet.getString("name"),
|
||||
autofillsResultSet.getString("value"),
|
||||
creationTime,
|
||||
0,
|
||||
autofillsResultSet.getInt("count"))
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query results for SBrowser autofill.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add SBrowser autofill artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
autofillDb.close()
|
||||
|
||||
def analyzeWebFormAddress(self, dataSource, fileManager, context):
|
||||
webFormAddressDbs = AppSQLiteDB.findAppDatabases(dataSource, "Web Data", True, self._PACKAGE_NAME)
|
||||
for webFormAddressDb in webFormAddressDbs:
|
||||
try:
|
||||
webFormAddressDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, webFormAddressDb.getDBFile())
|
||||
queryString = "SELECT street_address, city, state, zipcode, country_code, date_modified, first_name, last_name, number, email FROM autofill_profiles "\
|
||||
" INNER JOIN autofill_profile_names"\
|
||||
" ON autofill_profiles.guid = autofill_profile_names.guid"\
|
||||
" INNER JOIN autofill_profile_phones"\
|
||||
" ON autofill_profiles.guid = autofill_profile_phones.guid"\
|
||||
" INNER JOIN autofill_profile_emails"\
|
||||
" ON autofill_profiles.guid = autofill_profile_emails.guid"
|
||||
webFormAddressResultSet = webFormAddressDb.runQuery(queryString)
|
||||
if webFormAddressResultSet is not None:
|
||||
while webFormAddressResultSet.next():
|
||||
personName = webFormAddressResultSet.getString("first_name") + " " + webFormAddressResultSet.getString("last_name")
|
||||
address = '\n'.join([ webFormAddressResultSet.getString("street_address"),
|
||||
webFormAddressResultSet.getString("city"),
|
||||
webFormAddressResultSet.getString("state") + " " + webFormAddressResultSet.getString("zipcode"),
|
||||
webFormAddressResultSet.getString("country_code") ])
|
||||
|
||||
creationTime = webFormAddressResultSet.getLong("date_modified") / 1000000 - 11644473600 # Webkit time
|
||||
autofillDbHelper.addWebFormAddress( personName,
|
||||
webFormAddressResultSet.getString("email"),
|
||||
webFormAddressResultSet.getString("number"),
|
||||
address,
|
||||
creationTime,
|
||||
0,
|
||||
0)
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query results for SBrowser form addresses.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add SBrowser form address artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
webFormAddressDb.close()
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
## open current case
|
||||
try:
|
||||
self.current_case = Case.getCurrentCaseThrows()
|
||||
except NoCurrentCaseException as ex:
|
||||
self._logger.log(Level.WARNING, "No case currently open.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
return
|
||||
|
||||
|
||||
self.analyzeBookmarks(dataSource, fileManager, context)
|
||||
self.analyzeCookies(dataSource, fileManager, context)
|
||||
self.analyzeHistory(dataSource, fileManager, context)
|
||||
self.analyzeDownloads(dataSource, fileManager, context)
|
||||
self.analyzeAutofill(dataSource, fileManager, context)
|
||||
self.analyzeWebFormAddress(dataSource, fileManager, context)
|
||||
|
123
InternalPythonModules/android/shareit.py
Normal file
123
InternalPythonModules/android/shareit.py
Normal file
@ -0,0 +1,123 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
from java.io import File
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.lang import Long
|
||||
from java.lang import String
|
||||
from java.sql import ResultSet
|
||||
from java.sql import SQLException
|
||||
from java.sql import Statement
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from org.apache.commons.codec.binary import Base64
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
from org.sleuthkit.datamodel.Blackboard import BlackboardException
|
||||
from org.sleuthkit.datamodel import Account
|
||||
from org.sleuthkit.datamodel.blackboardutils import CommunicationArtifactsHelper
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import MessageReadStatus
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import CommunicationDirection
|
||||
|
||||
import traceback
|
||||
import general
|
||||
|
||||
"""
|
||||
Finds the SQLite DB for ShareIt, parses the DB for contacts & messages,
|
||||
and adds artifacts to the case.
|
||||
"""
|
||||
class ShareItAnalyzer(general.AndroidComponentAnalyzer):
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
self._PACKAGE_NAME = "com.lenovo.anyshare.gps"
|
||||
self._MODULE_NAME = "ShareIt Analyzer"
|
||||
self._MESSAGE_TYPE = "ShareIt Message"
|
||||
self._VERSION = "5.0.28_ww"
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
historyDbs = AppSQLiteDB.findAppDatabases(dataSource, "history.db", True, self._PACKAGE_NAME)
|
||||
for historyDb in historyDbs:
|
||||
try:
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
historyDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, historyDb.getDBFile(),
|
||||
Account.Type.SHAREIT)
|
||||
|
||||
queryString = "SELECT history_type, device_id, device_name, description, timestamp, import_path FROM history"
|
||||
historyResultSet = historyDb.runQuery(queryString)
|
||||
if historyResultSet is not None:
|
||||
while historyResultSet.next():
|
||||
direction = ""
|
||||
fromAddress = None
|
||||
toAdddress = None
|
||||
|
||||
if (historyResultSet.getInt("history_type") == 1):
|
||||
direction = CommunicationDirection.OUTGOING
|
||||
toAddress = Account.Address(historyResultSet.getString("device_id"), historyResultSet.getString("device_name") )
|
||||
else:
|
||||
direction = CommunicationDirection.INCOMING
|
||||
fromAddress = Account.Address(historyResultSet.getString("device_id"), historyResultSet.getString("device_name") )
|
||||
|
||||
msgBody = "" # there is no body.
|
||||
attachments = [historyResultSet.getString("import_path")]
|
||||
msgBody = general.appendAttachmentList(msgBody, attachments)
|
||||
|
||||
timeStamp = historyResultSet.getLong("timestamp") / 1000
|
||||
messageArtifact = transferDbHelper.addMessage(
|
||||
self._MESSAGE_TYPE,
|
||||
direction,
|
||||
fromAddress,
|
||||
toAddress,
|
||||
timeStamp,
|
||||
MessageReadStatus.UNKNOWN,
|
||||
None, # subject
|
||||
msgBody,
|
||||
None ) # thread id
|
||||
|
||||
# TBD: add the file as attachment ??
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query result for ShareIt history.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to create ShareIt message artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except NoCurrentCaseException as ex:
|
||||
self._logger.log(Level.WARNING, "No case currently open.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
historyDb.close()
|
||||
|
||||
|
||||
|
505
InternalPythonModules/android/skype.py
Normal file
505
InternalPythonModules/android/skype.py
Normal file
@ -0,0 +1,505 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
from java.io import File
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.lang import Long
|
||||
from java.lang import String
|
||||
from java.sql import ResultSet
|
||||
from java.sql import SQLException
|
||||
from java.sql import Statement
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from org.apache.commons.codec.binary import Base64
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
|
||||
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
from org.sleuthkit.datamodel.Blackboard import BlackboardException
|
||||
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
|
||||
from org.sleuthkit.datamodel import Account
|
||||
from org.sleuthkit.datamodel.blackboardutils import CommunicationArtifactsHelper
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import MessageReadStatus
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import CommunicationDirection
|
||||
from TskMessagesParser import TskMessagesParser
|
||||
from TskContactsParser import TskContactsParser
|
||||
from TskCallLogsParser import TskCallLogsParser
|
||||
|
||||
import traceback
|
||||
import general
|
||||
|
||||
class SkypeAnalyzer(general.AndroidComponentAnalyzer):
|
||||
"""
|
||||
Parses the Skype App databases for TSK contacts, message
|
||||
and calllog artifacts.
|
||||
|
||||
About version 8.15.0.428 (9/17/2019) Skype database:
|
||||
- There are 4 tables this parser uses:
|
||||
1) person - this table appears to hold all contacts known to the user.
|
||||
2) user - this table holds information pertaining to the user.
|
||||
3) particiapnt - Yes, that is not a typo. This table maps group chat
|
||||
ids to skype ids (1 to many).
|
||||
4) chatItem - This table contains all messages. It maps the group id or
|
||||
skype id (for 1 to 1 communication) to the message content
|
||||
and metadata. Either the group id or skype id is stored in
|
||||
a column named 'conversation_link'.
|
||||
|
||||
More info and implementation details:
|
||||
- The person table does not include groups. To get
|
||||
all 1 to 1 communications, we could simply join the person and chatItem tables.
|
||||
This would mean we'd need to do a second pass to get all the group information
|
||||
as they would be excluded in the join. Since the chatItem table stores both the
|
||||
group id or skype_id in one column, an implementation decision was made to union
|
||||
the person and particiapnt table together so that all rows are matched in one join
|
||||
with chatItem. This result is consistently labeled contact_list_with_groups in the
|
||||
following queries.
|
||||
- In order to keep the formatting of the name consistent throughout each query,
|
||||
a _format_user_name() function was created to encapsulate the CASE statement
|
||||
that was being shared across them. Refer to the method for more details.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
self._SKYPE_PACKAGE_NAME = "com.skype.raider"
|
||||
self._PARSER_NAME = "Skype Parser"
|
||||
self._VERSION = "8.15.0.428"
|
||||
|
||||
def get_user_account(self, skype_db):
|
||||
account_query_result = skype_db.runQuery(
|
||||
"""
|
||||
SELECT entry_id,
|
||||
"""+_format_user_name()+""" AS name
|
||||
FROM user
|
||||
"""
|
||||
)
|
||||
|
||||
if account_query_result is not None and account_query_result.next():
|
||||
return Account.Address(account_query_result.getString("entry_id"),
|
||||
account_query_result.getString("name"))
|
||||
return None
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
#Skype databases are of the form: live:XYZ.db, where
|
||||
#XYZ is the skype id of the user. The following search
|
||||
#does a generic substring match for 'live' in the skype
|
||||
#package.
|
||||
skype_dbs = AppSQLiteDB.findAppDatabases(dataSource,
|
||||
"live:", False, self._SKYPE_PACKAGE_NAME)
|
||||
try:
|
||||
for skype_db in skype_dbs:
|
||||
#Attempt to get the user account id from the database
|
||||
user_account_instance = None
|
||||
try:
|
||||
user_account_instance = self.get_user_account(skype_db)
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING,
|
||||
"Error querying for the user account in the Skype db.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
|
||||
if user_account_instance is None:
|
||||
helper = CommunicationArtifactsHelper(
|
||||
current_case.getSleuthkitCase(), self._PARSER_NAME,
|
||||
skype_db.getDBFile(), Account.Type.SKYPE
|
||||
)
|
||||
else:
|
||||
helper = CommunicationArtifactsHelper(
|
||||
current_case.getSleuthkitCase(), self._PARSER_NAME,
|
||||
skype_db.getDBFile(), Account.Type.SKYPE,
|
||||
Account.Type.SKYPE, user_account_instance
|
||||
)
|
||||
self.parse_contacts(skype_db, helper)
|
||||
self.parse_calllogs(skype_db, helper)
|
||||
self.parse_messages(skype_db, helper)
|
||||
except NoCurrentCaseException as ex:
|
||||
self._logger.log(Level.WARNING, "No case currently open.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
for skype_db in skype_dbs:
|
||||
skype_db.close()
|
||||
|
||||
def parse_contacts(self, skype_db, helper):
|
||||
#Query for contacts and iterate row by row adding
|
||||
#each contact artifact
|
||||
try:
|
||||
contacts_parser = SkypeContactsParser(skype_db)
|
||||
while contacts_parser.next():
|
||||
helper.addContact(
|
||||
contacts_parser.get_account_name(),
|
||||
contacts_parser.get_contact_name(),
|
||||
contacts_parser.get_phone(),
|
||||
contacts_parser.get_home_phone(),
|
||||
contacts_parser.get_mobile_phone(),
|
||||
contacts_parser.get_email()
|
||||
)
|
||||
contacts_parser.close()
|
||||
except SQLException as ex:
|
||||
#Error parsing Skype db
|
||||
self._logger.log(Level.WARNING,
|
||||
"Error parsing contact database for call logs artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
#Severe error trying to add to case database.. case is not complete.
|
||||
#These exceptions are thrown by the CommunicationArtifactsHelper.
|
||||
self._logger.log(Level.SEVERE,
|
||||
"Failed to add contact artifacts to the case database.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
#Failed to post notification to blackboard
|
||||
self._logger.log(Level.WARNING,
|
||||
"Failed to post contact artifact to the blackboard", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
def parse_calllogs(self, skype_db, helper):
|
||||
#Query for call logs and iterate row by row adding
|
||||
#each call log artifact
|
||||
try:
|
||||
calllog_parser = SkypeCallLogsParser(skype_db)
|
||||
while calllog_parser.next():
|
||||
helper.addCalllog(
|
||||
calllog_parser.get_call_direction(),
|
||||
calllog_parser.get_phone_number_from(),
|
||||
calllog_parser.get_phone_number_to(),
|
||||
calllog_parser.get_call_start_date_time(),
|
||||
calllog_parser.get_call_end_date_time(),
|
||||
calllog_parser.get_call_type()
|
||||
)
|
||||
calllog_parser.close()
|
||||
except SQLException as ex:
|
||||
#Error parsing Skype db
|
||||
self._logger.log(Level.WARNING,
|
||||
"Error parsing Skype database for call logs artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
#Severe error trying to add to case database.. case is not complete.
|
||||
#These exceptions are thrown by the CommunicationArtifactsHelper.
|
||||
self._logger.log(Level.SEVERE,
|
||||
"Failed to add call log artifacts to the case database.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
#Failed to post notification to blackboard
|
||||
self._logger.log(Level.WARNING,
|
||||
"Failed to post call log artifact to the blackboard", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
def parse_messages(self, skype_db, helper):
|
||||
#Query for messages and iterate row by row adding
|
||||
#each message artifact
|
||||
try:
|
||||
messages_parser = SkypeMessagesParser(skype_db)
|
||||
while messages_parser.next():
|
||||
helper.addMessage(
|
||||
messages_parser.get_message_type(),
|
||||
messages_parser.get_message_direction(),
|
||||
messages_parser.get_phone_number_from(),
|
||||
messages_parser.get_phone_number_to(),
|
||||
messages_parser.get_message_date_time(),
|
||||
messages_parser.get_message_read_status(),
|
||||
messages_parser.get_message_subject(),
|
||||
messages_parser.get_message_text(),
|
||||
messages_parser.get_thread_id()
|
||||
)
|
||||
messages_parser.close()
|
||||
except SQLException as ex:
|
||||
#Error parsing Skype db
|
||||
self._logger.log(Level.WARNING,
|
||||
"Error parsing Skype database for message artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
#Severe error trying to add to case database.. case is not complete.
|
||||
#These exceptions are thrown by the CommunicationArtifactsHelper.
|
||||
self._logger.log(Level.SEVERE,
|
||||
"Failed to add message artifacts to the case database.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
#Failed to post notification to blackboard
|
||||
self._logger.log(Level.WARNING,
|
||||
"Failed to post message artifact to the blackboard", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
class SkypeCallLogsParser(TskCallLogsParser):
|
||||
"""
|
||||
Extracts TSK_CALLLOG information from the Skype database.
|
||||
TSK_CALLLOG fields that are not in the Skype database are given
|
||||
a default value inherited from the super class.
|
||||
"""
|
||||
|
||||
def __init__(self, calllog_db):
|
||||
"""
|
||||
Big picture:
|
||||
The query below creates a contacts_list_with_groups table, which
|
||||
represents the recipient info. A chatItem record holds ids for
|
||||
both the recipient and sender. The first join onto chatItem fills
|
||||
in the blanks for the recipients. The second join back onto person
|
||||
handles the sender info. The result is a table with all of the
|
||||
communication details.
|
||||
|
||||
Implementation details:
|
||||
- message_type w/ value 3 appeared to be the call type, regardless
|
||||
of if it was audio or video.
|
||||
|
||||
"""
|
||||
super(SkypeCallLogsParser, self).__init__(calllog_db.runQuery(
|
||||
"""
|
||||
SELECT contacts_list_with_groups.conversation_id,
|
||||
contacts_list_with_groups.participant_ids,
|
||||
contacts_list_with_groups.participants,
|
||||
time,
|
||||
duration,
|
||||
is_sender_me,
|
||||
person_id as sender_id,
|
||||
sender_name.name as sender_name
|
||||
FROM (SELECT conversation_id,
|
||||
Group_concat(person_id) AS participant_ids,
|
||||
Group_concat("""+_format_user_name()+""") AS participants
|
||||
FROM particiapnt AS PART
|
||||
JOIN person AS P
|
||||
ON PART.person_id = P.entry_id
|
||||
GROUP BY conversation_id
|
||||
UNION
|
||||
SELECT entry_id,
|
||||
NULL,
|
||||
"""+_format_user_name()+""" AS participant
|
||||
FROM person) AS contacts_list_with_groups
|
||||
JOIN chatitem AS C
|
||||
ON C.conversation_link = contacts_list_with_groups.conversation_id
|
||||
JOIN (SELECT entry_id as id,
|
||||
"""+_format_user_name()+""" AS name
|
||||
FROM person
|
||||
UNION
|
||||
SELECT entry_id as id,
|
||||
"""+_format_user_name()+""" AS name
|
||||
FROM user) AS sender_name
|
||||
ON sender_name.id = C.person_id
|
||||
WHERE message_type == 3
|
||||
"""
|
||||
)
|
||||
)
|
||||
self._INCOMING_CALL_TYPE = 0
|
||||
self._OUTGOING_CALL_TYPE = 1
|
||||
|
||||
|
||||
def get_phone_number_from(self):
|
||||
if self.get_call_direction() == self.INCOMING_CALL:
|
||||
return Account.Address(self.result_set.getString("sender_id"),
|
||||
self.result_set.getString("sender_name"))
|
||||
|
||||
def get_phone_number_to(self):
|
||||
if self.get_call_direction() == self.OUTGOING_CALL:
|
||||
group_ids = self.result_set.getString("participant_ids")
|
||||
name = self.result_set.getString("participants")
|
||||
|
||||
if group_ids is not None:
|
||||
group_ids = group_ids.split(",")
|
||||
name = name.split(",")
|
||||
recipients = []
|
||||
|
||||
for person_id, person_name in zip(group_ids, name):
|
||||
recipients.append(Account.Address(person_id, person_name))
|
||||
|
||||
return recipients
|
||||
|
||||
return Account.Address(self.result_set.getString("conversation_id"), name)
|
||||
|
||||
return super(SkypeCallLogsParser, self).get_phone_number_to()
|
||||
|
||||
def get_call_direction(self):
|
||||
direction = self.result_set.getInt("is_sender_me")
|
||||
if direction == self._INCOMING_CALL_TYPE:
|
||||
return self.INCOMING_CALL
|
||||
if direction == self._OUTGOING_CALL_TYPE:
|
||||
return self.OUTGOING_CALL
|
||||
return super(SkypeCallLogsParser, self).get_call_direction()
|
||||
|
||||
def get_call_start_date_time(self):
|
||||
return self.result_set.getLong("time") / 1000
|
||||
|
||||
def get_call_end_date_time(self):
|
||||
start = self.get_call_start_date_time()
|
||||
duration = self.result_set.getInt("duration") / 1000
|
||||
return start + duration
|
||||
|
||||
class SkypeContactsParser(TskContactsParser):
|
||||
"""
|
||||
Extracts TSK_CONTACT information from the Skype database.
|
||||
TSK_CONTACT fields that are not in the Skype database are given
|
||||
a default value inherited from the super class.
|
||||
"""
|
||||
|
||||
def __init__(self, contact_db):
|
||||
super(SkypeContactsParser, self).__init__(contact_db.runQuery(
|
||||
"""
|
||||
SELECT entry_id,
|
||||
"""+_format_user_name()+""" AS name
|
||||
FROM person
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
def get_account_name(self):
|
||||
return self.result_set.getString("entry_id")
|
||||
|
||||
def get_contact_name(self):
|
||||
return self.result_set.getString("name")
|
||||
|
||||
class SkypeMessagesParser(TskMessagesParser):
|
||||
"""
|
||||
Extract TSK_MESSAGE information from the Skype database.
|
||||
TSK_CONTACT fields that are not in the Skype database are given
|
||||
a default value inherited from the super class.
|
||||
"""
|
||||
|
||||
def __init__(self, message_db):
|
||||
"""
|
||||
This query is very similar to the call logs query, the only difference is
|
||||
it grabs more columns in the SELECT and excludes message_types which have
|
||||
the call type value (3).
|
||||
"""
|
||||
super(SkypeMessagesParser, self).__init__(message_db.runQuery(
|
||||
"""
|
||||
SELECT contacts_list_with_groups.conversation_id,
|
||||
contacts_list_with_groups.participant_ids,
|
||||
contacts_list_with_groups.participants,
|
||||
time,
|
||||
content,
|
||||
device_gallery_path,
|
||||
is_sender_me,
|
||||
person_id as sender_id,
|
||||
sender_name.name AS sender_name
|
||||
FROM (SELECT conversation_id,
|
||||
Group_concat(person_id) AS participant_ids,
|
||||
Group_concat("""+_format_user_name()+""") AS participants
|
||||
FROM particiapnt AS PART
|
||||
JOIN person AS P
|
||||
ON PART.person_id = P.entry_id
|
||||
GROUP BY conversation_id
|
||||
UNION
|
||||
SELECT entry_id as conversation_id,
|
||||
NULL,
|
||||
"""+_format_user_name()+""" AS participant
|
||||
FROM person) AS contacts_list_with_groups
|
||||
JOIN chatitem AS C
|
||||
ON C.conversation_link = contacts_list_with_groups.conversation_id
|
||||
JOIN (SELECT entry_id as id,
|
||||
"""+_format_user_name()+""" AS name
|
||||
FROM person
|
||||
UNION
|
||||
SELECT entry_id as id,
|
||||
"""+_format_user_name()+""" AS name
|
||||
FROM user) AS sender_name
|
||||
ON sender_name.id = C.person_id
|
||||
WHERE message_type != 3
|
||||
"""
|
||||
)
|
||||
)
|
||||
self._SKYPE_MESSAGE_TYPE = "Skype Message"
|
||||
self._OUTGOING_MESSAGE_TYPE = 1
|
||||
self._INCOMING_MESSAGE_TYPE = 0
|
||||
|
||||
def get_message_type(self):
|
||||
return self._SKYPE_MESSAGE_TYPE
|
||||
|
||||
def get_phone_number_from(self):
|
||||
if self.get_message_direction() == self.INCOMING:
|
||||
return Account.Address(self.result_set.getString("sender_id"),
|
||||
self.result_set.getString("sender_name"))
|
||||
return super(SkypeMessagesParser, self).get_phone_number_from()
|
||||
|
||||
def get_message_direction(self):
|
||||
direction = self.result_set.getInt("is_sender_me")
|
||||
if direction == self._OUTGOING_MESSAGE_TYPE:
|
||||
return self.OUTGOING
|
||||
if direction == self._INCOMING_MESSAGE_TYPE:
|
||||
return self.INCOMING
|
||||
return super(SkypeMessagesParser, self).get_message_direction()
|
||||
|
||||
def get_phone_number_to(self):
|
||||
if self.get_message_direction() == self.OUTGOING:
|
||||
group_ids = self.result_set.getString("participant_ids")
|
||||
names = self.result_set.getString("participants")
|
||||
|
||||
if group_ids is not None:
|
||||
group_ids = group_ids.split(",")
|
||||
names = names.split(",")
|
||||
recipients = []
|
||||
|
||||
for participant_id, participant_name in zip(group_ids, names):
|
||||
recipients.append(Account.Address(participant_id, participant_name))
|
||||
|
||||
return recipients
|
||||
|
||||
return Account.Address(self.result_set.getString("conversation_id"), names)
|
||||
|
||||
return super(SkypeMessagesParser, self).get_phone_number_to()
|
||||
|
||||
def get_message_date_time(self):
|
||||
date = self.result_set.getLong("time")
|
||||
return date / 1000
|
||||
|
||||
def get_message_text(self):
|
||||
content = self.result_set.getString("content")
|
||||
|
||||
if content is not None:
|
||||
file_path = self.result_set.getString("device_gallery_path")
|
||||
|
||||
#if a file name and file path are associated with a message, append it
|
||||
if file_path is not None:
|
||||
return general.appendAttachmentList(content, [file_path])
|
||||
|
||||
return content
|
||||
|
||||
return super(SkypeMessagesParser, self).get_message_text()
|
||||
|
||||
def get_thread_id(self):
|
||||
group_ids = self.result_set.getString("participant_ids")
|
||||
if group_ids is not None:
|
||||
return self.result_set.getString("conversation_id")
|
||||
return super(SkypeMessagesParser, self).get_thread_id()
|
||||
|
||||
def _format_user_name():
|
||||
"""
|
||||
This CASE SQL statement is used in many queries to
|
||||
format the names of users. For a user, there is a first_name
|
||||
column and a last_name column. Some of these columns can be null
|
||||
and our goal is to produce the cleanest data possible. In the event
|
||||
that both the first and last name columns are null, we return the skype_id
|
||||
which is stored in the database as 'entry_id'. Commas are removed from the name
|
||||
so that we can concatenate names into a comma seperate list for group chats.
|
||||
"""
|
||||
|
||||
return """
|
||||
CASE
|
||||
WHEN Ifnull(first_name, "") == "" AND Ifnull(last_name, "") == "" THEN entry_id
|
||||
WHEN first_name is NULL THEN replace(last_name, ",", "")
|
||||
WHEN last_name is NULL THEN replace(first_name, ",", "")
|
||||
ELSE replace(first_name, ",", "") || " " || replace(last_name, ",", "")
|
||||
END
|
||||
"""
|
||||
|
||||
|
@ -118,7 +118,7 @@ class TangoMessageAnalyzer(general.AndroidComponentAnalyzer):
|
||||
try:
|
||||
# index the artifact for keyword search
|
||||
blackboard = Case.getCurrentCase().getSleuthkitCase().getBlackboard()
|
||||
blackboard.postArtifact(artifact, MODULE_NAME)
|
||||
blackboard.postArtifact(artifact, general.MODULE_NAME)
|
||||
except Blackboard.BlackboardException as ex:
|
||||
self._logger.log(Level.SEVERE, "Unable to index blackboard artifact " + str(artifact.getArtifactID()), ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
|
392
InternalPythonModules/android/textnow.py
Normal file
392
InternalPythonModules/android/textnow.py
Normal file
@ -0,0 +1,392 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
from java.io import File
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.lang import Long
|
||||
from java.lang import String
|
||||
from java.sql import ResultSet
|
||||
from java.sql import SQLException
|
||||
from java.sql import Statement
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from org.apache.commons.codec.binary import Base64
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
|
||||
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
from org.sleuthkit.datamodel.Blackboard import BlackboardException
|
||||
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
|
||||
from org.sleuthkit.datamodel import Account
|
||||
from org.sleuthkit.datamodel.blackboardutils import CommunicationArtifactsHelper
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import MessageReadStatus
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import CommunicationDirection
|
||||
|
||||
from TskMessagesParser import TskMessagesParser
|
||||
from TskContactsParser import TskContactsParser
|
||||
from TskCallLogsParser import TskCallLogsParser
|
||||
|
||||
import traceback
|
||||
import general
|
||||
|
||||
class TextNowAnalyzer(general.AndroidComponentAnalyzer):
|
||||
"""
|
||||
Parses the TextNow App databases for TSK contacts, message
|
||||
and calllog artifacts.
|
||||
|
||||
The TextNow database in v6.41.0.2 is structured as follows:
|
||||
- A messages table, which stores messages from/to a number
|
||||
- A contacts table, which stores phone numbers
|
||||
- A groups table, which stores each group the device owner is a part of
|
||||
- A group_members table, which stores who is in each group
|
||||
|
||||
The messages table contains both call logs and messages, with a type
|
||||
column differentiating the two.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
self._TEXTNOW_PACKAGE_NAME = "com.enflick.android.TextNow"
|
||||
self._PARSER_NAME = "TextNow Parser"
|
||||
self._VERSION = "6.41.0.2"
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
"""
|
||||
Extract, Transform and Load all messages, contacts and
|
||||
calllogs from the TextNow databases.
|
||||
"""
|
||||
|
||||
textnow_dbs = AppSQLiteDB.findAppDatabases(dataSource,
|
||||
"textnow_data.db", True, self._TEXTNOW_PACKAGE_NAME)
|
||||
|
||||
try:
|
||||
for textnow_db in textnow_dbs:
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
helper = CommunicationArtifactsHelper(
|
||||
current_case.getSleuthkitCase(), self._PARSER_NAME,
|
||||
textnow_db.getDBFile(), Account.Type.TEXTNOW
|
||||
)
|
||||
self.parse_contacts(textnow_db, helper)
|
||||
self.parse_calllogs(textnow_db, helper)
|
||||
self.parse_messages(textnow_db, helper)
|
||||
except NoCurrentCaseException as ex:
|
||||
self._logger.log(Level.WARNING, "No case currently open.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
for textnow_db in textnow_dbs:
|
||||
textnow_db.close()
|
||||
|
||||
def parse_contacts(self, textnow_db, helper):
|
||||
#Query for contacts and iterate row by row adding
|
||||
#each contact artifact
|
||||
try:
|
||||
contacts_parser = TextNowContactsParser(textnow_db)
|
||||
while contacts_parser.next():
|
||||
helper.addContact(
|
||||
contacts_parser.get_account_name(),
|
||||
contacts_parser.get_contact_name(),
|
||||
contacts_parser.get_phone(),
|
||||
contacts_parser.get_home_phone(),
|
||||
contacts_parser.get_mobile_phone(),
|
||||
contacts_parser.get_email()
|
||||
)
|
||||
contacts_parser.close()
|
||||
except SQLException as ex:
|
||||
#Error parsing TextNow db
|
||||
self._logger.log(Level.WARNING, "Error parsing TextNow databases for contacts", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
#Error adding artifacts to the case database.. case database is not complete.
|
||||
self._logger.log(Level.SEVERE,
|
||||
"Error adding TextNow contacts artifacts to the case database", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
#Error posting notification to blackboard...
|
||||
self._logger.log(Level.WARNING,
|
||||
"Error posting TextNow contacts artifact to the blackboard", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
def parse_calllogs(self, textnow_db, helper):
|
||||
#Query for call logs and iterate row by row adding
|
||||
#each call log artifact
|
||||
try:
|
||||
calllog_parser = TextNowCallLogsParser(textnow_db)
|
||||
while calllog_parser.next():
|
||||
helper.addCalllog(
|
||||
calllog_parser.get_call_direction(),
|
||||
calllog_parser.get_phone_number_from(),
|
||||
calllog_parser.get_phone_number_to(),
|
||||
calllog_parser.get_call_start_date_time(),
|
||||
calllog_parser.get_call_end_date_time(),
|
||||
calllog_parser.get_call_type()
|
||||
)
|
||||
calllog_parser.close()
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error parsing TextNow databases for calllogs", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
#Error adding artifacts to the case database.. case database is not complete.
|
||||
self._logger.log(Level.SEVERE,
|
||||
"Error adding TextNow call log artifacts to the case database", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
#Error posting notification to blackboard...
|
||||
self._logger.log(Level.WARNING,
|
||||
"Error posting TextNow call log artifact to the blackboard", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
def parse_messages(self, textnow_db, helper):
|
||||
#Query for messages and iterate row by row adding
|
||||
#each message artifact
|
||||
try:
|
||||
messages_parser = TextNowMessagesParser(textnow_db)
|
||||
while messages_parser.next():
|
||||
helper.addMessage(
|
||||
messages_parser.get_message_type(),
|
||||
messages_parser.get_message_direction(),
|
||||
messages_parser.get_phone_number_from(),
|
||||
messages_parser.get_phone_number_to(),
|
||||
messages_parser.get_message_date_time(),
|
||||
messages_parser.get_message_read_status(),
|
||||
messages_parser.get_message_subject(),
|
||||
messages_parser.get_message_text(),
|
||||
messages_parser.get_thread_id()
|
||||
)
|
||||
messages_parser.close()
|
||||
except SQLException as ex:
|
||||
#Error parsing TextNow db
|
||||
self._logger.log(Level.WARNING, "Error parsing TextNow databases for messages.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
#Error adding artifacts to the case database.. case database is not complete.
|
||||
self._logger.log(Level.SEVERE,
|
||||
"Error adding TextNow messages artifacts to the case database", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
#Error posting notification to blackboard...
|
||||
self._logger.log(Level.WARNING,
|
||||
"Error posting TextNow messages artifact to the blackboard", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
class TextNowCallLogsParser(TskCallLogsParser):
|
||||
"""
|
||||
Extracts TSK_CALLLOG information from the TextNow database.
|
||||
TSK_CALLLOG fields that are not in the TextNow database are given
|
||||
a default value inherited from the super class.
|
||||
"""
|
||||
|
||||
def __init__(self, calllog_db):
|
||||
"""
|
||||
message_type of 100 or 102 are for calls (audio, video)
|
||||
"""
|
||||
super(TextNowCallLogsParser, self).__init__(calllog_db.runQuery(
|
||||
"""
|
||||
SELECT contact_value AS num,
|
||||
message_direction AS direction,
|
||||
message_text AS duration,
|
||||
date AS datetime
|
||||
FROM messages AS M
|
||||
WHERE message_type IN ( 100, 102 )
|
||||
"""
|
||||
)
|
||||
)
|
||||
self._INCOMING_CALL_TYPE = 1
|
||||
self._OUTGOING_CALL_TYPE = 2
|
||||
|
||||
def get_phone_number_from(self):
|
||||
if self.get_call_direction() == self.OUTGOING_CALL:
|
||||
return super(TextNowCallLogsParser, self).get_phone_number_from()
|
||||
return Account.Address(self.result_set.getString("num"),
|
||||
self.result_set.getString("num"))
|
||||
|
||||
def get_phone_number_to(self):
|
||||
if self.get_call_direction() == self.INCOMING_CALL:
|
||||
return super(TextNowCallLogsParser, self).get_phone_number_to()
|
||||
return Account.Address(self.result_set.getString("num"),
|
||||
self.result_set.getString("num"))
|
||||
|
||||
def get_call_direction(self):
|
||||
if self.result_set.getInt("direction") == self._INCOMING_CALL_TYPE:
|
||||
return self.INCOMING_CALL
|
||||
return self.OUTGOING_CALL
|
||||
|
||||
def get_call_start_date_time(self):
|
||||
return self.result_set.getLong("datetime") / 1000
|
||||
|
||||
def get_call_end_date_time(self):
|
||||
start = self.get_call_start_date_time()
|
||||
duration = self.result_set.getString("duration")
|
||||
try:
|
||||
return start + long(duration)
|
||||
except ValueError as ve:
|
||||
return super(TextNowCallLogsParser, self).get_call_end_date_time()
|
||||
|
||||
class TextNowContactsParser(TskContactsParser):
|
||||
"""
|
||||
Extracts TSK_CONTACT information from the TextNow database.
|
||||
TSK_CONTACT fields that are not in the TextNow database are given
|
||||
a default value inherited from the super class.
|
||||
"""
|
||||
|
||||
def __init__(self, contact_db):
|
||||
super(TextNowContactsParser, self).__init__(contact_db.runQuery(
|
||||
"""
|
||||
SELECT C.contact_value AS number,
|
||||
CASE
|
||||
WHEN contact_name IS NULL THEN contact_value
|
||||
WHEN contact_name == "" THEN contact_value
|
||||
ELSE contact_name
|
||||
END name
|
||||
FROM contacts AS C
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
def get_account_name(self):
|
||||
return self.result_set.getString("number")
|
||||
|
||||
def get_contact_name(self):
|
||||
return self.result_set.getString("name")
|
||||
|
||||
def get_phone(self):
|
||||
return self.result_set.getString("number")
|
||||
|
||||
class TextNowMessagesParser(TskMessagesParser):
|
||||
"""
|
||||
Extract TSK_MESSAGE information from the TextNow database.
|
||||
TSK_CONTACT fields that are not in the TextNow database are given
|
||||
a default value inherited from the super class.
|
||||
"""
|
||||
|
||||
def __init__(self, message_db):
|
||||
"""
|
||||
The query below does the following:
|
||||
- The group_info inner query creates a comma seperated list of group recipients
|
||||
for each group. This result is then joined on the groups table to get the thread id.
|
||||
- The contacts table is unioned with this result so we have a complete map
|
||||
of "from" phone_numbers -> recipients (group or single). This is the
|
||||
'to_from_map' inner query.
|
||||
- Finally, the to_from_map results are joined with the messages table to get all
|
||||
of the communication details.
|
||||
"""
|
||||
super(TextNowMessagesParser, self).__init__(message_db.runQuery(
|
||||
"""
|
||||
|
||||
SELECT CASE
|
||||
WHEN message_direction == 2 THEN ""
|
||||
WHEN to_addresses IS NULL THEN M.contact_value
|
||||
ELSE contact_name
|
||||
end from_address,
|
||||
CASE
|
||||
WHEN message_direction == 1 THEN ""
|
||||
WHEN to_addresses IS NULL THEN M.contact_value
|
||||
ELSE to_addresses
|
||||
end to_address,
|
||||
message_direction,
|
||||
message_text,
|
||||
M.READ,
|
||||
M.date,
|
||||
M.attach,
|
||||
thread_id
|
||||
FROM (SELECT group_info.contact_value,
|
||||
group_info.to_addresses,
|
||||
G.contact_value AS thread_id
|
||||
FROM (SELECT GM.contact_value,
|
||||
Group_concat(GM.member_contact_value) AS to_addresses
|
||||
FROM group_members AS GM
|
||||
GROUP BY GM.contact_value) AS group_info
|
||||
JOIN groups AS G
|
||||
ON G.contact_value = group_info.contact_value
|
||||
UNION
|
||||
SELECT c.contact_value,
|
||||
NULL,
|
||||
"-1"
|
||||
FROM contacts AS c) AS to_from_map
|
||||
JOIN messages AS M
|
||||
ON M.contact_value = to_from_map.contact_value
|
||||
WHERE message_type NOT IN ( 102, 100 )
|
||||
"""
|
||||
)
|
||||
)
|
||||
self._TEXTNOW_MESSAGE_TYPE = "TextNow Message"
|
||||
self._INCOMING_MESSAGE_TYPE = 1
|
||||
self._OUTGOING_MESSAGE_TYPE = 2
|
||||
self._UNKNOWN_THREAD_ID = "-1"
|
||||
|
||||
def get_message_type(self):
|
||||
return self._TEXTNOW_MESSAGE_TYPE
|
||||
|
||||
def get_phone_number_from(self):
|
||||
if self.result_set.getString("from_address") == "":
|
||||
return super(TextNowMessagesParser, self).get_phone_number_from()
|
||||
return Account.Address(self.result_set.getString("from_address"),
|
||||
self.result_set.getString("from_address"))
|
||||
|
||||
def get_message_direction(self):
|
||||
direction = self.result_set.getInt("message_direction")
|
||||
if direction == self._INCOMING_MESSAGE_TYPE:
|
||||
return self.INCOMING
|
||||
return self.OUTGOING
|
||||
|
||||
def get_phone_number_to(self):
|
||||
if self.result_set.getString("to_address") == "":
|
||||
return super(TextNowMessagesParser, self).get_phone_number_to()
|
||||
recipients = self.result_set.getString("to_address").split(",")
|
||||
|
||||
recipient_accounts = []
|
||||
for recipient in recipients:
|
||||
recipient_accounts.append(Account.Address(recipient, recipient))
|
||||
|
||||
return recipient_accounts
|
||||
|
||||
def get_message_date_time(self):
|
||||
#convert ms to s
|
||||
return self.result_set.getLong("date") / 1000;
|
||||
|
||||
def get_message_read_status(self):
|
||||
read = self.result_set.getBoolean("read")
|
||||
if self.get_message_direction() == self.INCOMING:
|
||||
if read == True:
|
||||
return self.READ
|
||||
return self.UNREAD
|
||||
|
||||
#read status for outgoing messages cannot be determined, give default
|
||||
return super(TextNowMessagesParser, self).get_message_read_status()
|
||||
|
||||
def get_message_text(self):
|
||||
text = self.result_set.getString("message_text")
|
||||
attachment = self.result_set.getString("attach")
|
||||
if attachment != "":
|
||||
text = general.appendAttachmentList(text, [attachment])
|
||||
return text
|
||||
|
||||
def get_thread_id(self):
|
||||
thread_id = self.result_set.getString("thread_id")
|
||||
if thread_id == self._UNKNOWN_THREAD_ID:
|
||||
return super(TextNowMessagesParser, self).get_thread_id()
|
||||
return thread_id
|
376
InternalPythonModules/android/viber.py
Normal file
376
InternalPythonModules/android/viber.py
Normal file
@ -0,0 +1,376 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
from java.io import File
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.lang import Long
|
||||
from java.lang import String
|
||||
from java.sql import ResultSet
|
||||
from java.sql import SQLException
|
||||
from java.sql import Statement
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from org.apache.commons.codec.binary import Base64
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
|
||||
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
from org.sleuthkit.datamodel.Blackboard import BlackboardException
|
||||
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
|
||||
from org.sleuthkit.datamodel import Account
|
||||
from org.sleuthkit.datamodel.blackboardutils import CommunicationArtifactsHelper
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import MessageReadStatus
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import CommunicationDirection
|
||||
from TskMessagesParser import TskMessagesParser
|
||||
from TskContactsParser import TskContactsParser
|
||||
from TskCallLogsParser import TskCallLogsParser
|
||||
|
||||
import traceback
|
||||
import general
|
||||
|
||||
class ViberAnalyzer(general.AndroidComponentAnalyzer):
|
||||
"""
|
||||
Parses the Viber App databases for TSK contacts, message
|
||||
and calllog artifacts.
|
||||
|
||||
The Viber v11.5.0 database structure is as follows:
|
||||
- People can take part in N conversation(s). A conversation can have M
|
||||
members and messages are exchanged in a conversation.
|
||||
- Viber has a conversation table, a participant table (the people/members in the above
|
||||
analogy) and a messages table.
|
||||
- Each row of the participants table maps a person to a conversation_id
|
||||
- Each row in the messages table has a from participant id and a conversation id.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
self._VIBER_PACKAGE_NAME = "com.viber.voip"
|
||||
self._PARSER_NAME = "Viber Parser"
|
||||
self._VERSION = "11.5.0"
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
"""
|
||||
Extract, Transform and Load all messages, contacts and
|
||||
calllogs from the Viber databases.
|
||||
"""
|
||||
|
||||
try:
|
||||
contact_and_calllog_dbs = AppSQLiteDB.findAppDatabases(dataSource,
|
||||
"viber_data", True, self._VIBER_PACKAGE_NAME)
|
||||
message_dbs = AppSQLiteDB.findAppDatabases(dataSource,
|
||||
"viber_messages", True, self._VIBER_PACKAGE_NAME)
|
||||
|
||||
#Extract TSK_CONTACT and TSK_CALLLOG information
|
||||
for contact_and_calllog_db in contact_and_calllog_dbs:
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
helper = CommunicationArtifactsHelper(
|
||||
current_case.getSleuthkitCase(), self._PARSER_NAME,
|
||||
contact_and_calllog_db.getDBFile(), Account.Type.VIBER)
|
||||
self.parse_contacts(contact_and_calllog_db, helper)
|
||||
self.parse_calllogs(contact_and_calllog_db, helper)
|
||||
|
||||
#Extract TSK_MESSAGE information
|
||||
for message_db in message_dbs:
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
helper = CommunicationArtifactsHelper(
|
||||
current_case.getSleuthkitCase(), self._PARSER_NAME,
|
||||
message_db.getDBFile(), Account.Type.VIBER)
|
||||
self.parse_messages(message_db, helper)
|
||||
|
||||
except NoCurrentCaseException as ex:
|
||||
self._logger.log(Level.WARNING, "No case currently open.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
for message_db in message_dbs:
|
||||
message_db.close()
|
||||
|
||||
for contact_and_calllog_db in contact_and_calllog_dbs:
|
||||
contact_and_calllog_db.close()
|
||||
|
||||
def parse_contacts(self, contacts_db, helper):
|
||||
try:
|
||||
contacts_parser = ViberContactsParser(contacts_db)
|
||||
while contacts_parser.next():
|
||||
helper.addContact(
|
||||
contacts_parser.get_account_name(),
|
||||
contacts_parser.get_contact_name(),
|
||||
contacts_parser.get_phone(),
|
||||
contacts_parser.get_home_phone(),
|
||||
contacts_parser.get_mobile_phone(),
|
||||
contacts_parser.get_email()
|
||||
)
|
||||
contacts_parser.close()
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error querying the viber database for contacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE,
|
||||
"Error adding viber contacts artifact to case database.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING,
|
||||
"Error posting viber contacts artifact to the blackboard.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
def parse_calllogs(self, calllogs_db, helper):
|
||||
try:
|
||||
calllog_parser = ViberCallLogsParser(calllogs_db)
|
||||
while calllog_parser.next():
|
||||
helper.addCalllog(
|
||||
calllog_parser.get_call_direction(),
|
||||
calllog_parser.get_phone_number_from(),
|
||||
calllog_parser.get_phone_number_to(),
|
||||
calllog_parser.get_call_start_date_time(),
|
||||
calllog_parser.get_call_end_date_time(),
|
||||
calllog_parser.get_call_type()
|
||||
)
|
||||
calllog_parser.close()
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error querying the viber database for calllogs.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE,
|
||||
"Error adding viber calllogs artifact to case database.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING,
|
||||
"Error posting viber calllogs artifact to the blackboard.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
|
||||
def parse_messages(self, messages_db, helper):
|
||||
try:
|
||||
messages_parser = ViberMessagesParser(messages_db)
|
||||
while messages_parser.next():
|
||||
helper.addMessage(
|
||||
messages_parser.get_message_type(),
|
||||
messages_parser.get_message_direction(),
|
||||
messages_parser.get_phone_number_from(),
|
||||
messages_parser.get_phone_number_to(),
|
||||
messages_parser.get_message_date_time(),
|
||||
messages_parser.get_message_read_status(),
|
||||
messages_parser.get_message_subject(),
|
||||
messages_parser.get_message_text(),
|
||||
messages_parser.get_thread_id()
|
||||
)
|
||||
messages_parser.close()
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error querying the viber database for messages.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE,
|
||||
"Error adding viber messages artifact to case database.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING,
|
||||
"Error posting viber messages artifact to the blackboard.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
class ViberCallLogsParser(TskCallLogsParser):
|
||||
"""
|
||||
Extracts TSK_CALLLOG information from the Viber database.
|
||||
TSK_CALLLOG fields that are not in the Viber database are given
|
||||
a default value inherited from the super class.
|
||||
"""
|
||||
|
||||
def __init__(self, calllog_db):
|
||||
super(ViberCallLogsParser, self).__init__(calllog_db.runQuery(
|
||||
"""
|
||||
SELECT C.canonized_number AS number,
|
||||
C.type AS direction,
|
||||
C.duration AS seconds,
|
||||
C.date AS start_time,
|
||||
C.viber_call_type AS call_type
|
||||
FROM calls AS C
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
self._OUTGOING_CALL_TYPE = 2
|
||||
self._INCOMING_CALL_TYPE = 1
|
||||
self._MISSED_CALL_TYPE = 3
|
||||
self._AUDIO_CALL_TYPE = 1
|
||||
self._VIDEO_CALL_TYPE = 4
|
||||
|
||||
def get_phone_number_from(self):
|
||||
if self.get_call_direction() == self.INCOMING_CALL:
|
||||
return Account.Address(self.result_set.getString("number"),
|
||||
self.result_set.getString("number"))
|
||||
#Give default value if the call is outgoing,
|
||||
#the device's # is not stored in the database.
|
||||
return super(ViberCallLogsParser, self).get_phone_number_from()
|
||||
|
||||
def get_phone_number_to(self):
|
||||
if self.get_call_direction() == self.OUTGOING_CALL:
|
||||
return Account.Address(self.result_set.getString("number"),
|
||||
self.result_set.getString("number"))
|
||||
#Give default value if the call is incoming,
|
||||
#the device's # is not stored in the database.
|
||||
return super(ViberCallLogsParser, self).get_phone_number_to()
|
||||
|
||||
def get_call_direction(self):
|
||||
direction = self.result_set.getInt("direction")
|
||||
if direction == self._INCOMING_CALL_TYPE or direction == self._MISSED_CALL_TYPE:
|
||||
return self.INCOMING_CALL
|
||||
return self.OUTGOING_CALL
|
||||
|
||||
def get_call_start_date_time(self):
|
||||
return self.result_set.getLong("start_time") / 1000
|
||||
|
||||
def get_call_end_date_time(self):
|
||||
start_time = self.get_call_start_date_time()
|
||||
duration = self.result_set.getLong("seconds")
|
||||
return start_time + duration
|
||||
|
||||
def get_call_type(self):
|
||||
call_type = self.result_set.getInt("call_type")
|
||||
if call_type == self._AUDIO_CALL_TYPE:
|
||||
return self.AUDIO_CALL
|
||||
if call_type == self._VIDEO_CALL_TYPE:
|
||||
return self.VIDEO_CALL
|
||||
return super(ViberCallLogsParser, self).get_call_type()
|
||||
|
||||
class ViberContactsParser(TskContactsParser):
|
||||
"""
|
||||
Extracts TSK_CONTACT information from the Viber database.
|
||||
TSK_CONTACT fields that are not in the Viber database are given
|
||||
a default value inherited from the super class.
|
||||
"""
|
||||
|
||||
def __init__(self, contact_db):
|
||||
super(ViberContactsParser, self).__init__(contact_db.runQuery(
|
||||
"""
|
||||
SELECT C.display_name AS name,
|
||||
D.data2 AS number
|
||||
FROM phonebookcontact AS C
|
||||
JOIN phonebookdata AS D
|
||||
ON C._id = D.contact_id
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
def get_account_name(self):
|
||||
return self.result_set.getString("number")
|
||||
|
||||
def get_contact_name(self):
|
||||
return self.result_set.getString("name")
|
||||
|
||||
def get_phone(self):
|
||||
return self.result_set.getString("number")
|
||||
|
||||
class ViberMessagesParser(TskMessagesParser):
|
||||
"""
|
||||
Extract TSK_MESSAGE information from the Viber database.
|
||||
TSK_CONTACT fields that are not in the Viber database are given
|
||||
a default value inherited from the super class.
|
||||
"""
|
||||
|
||||
def __init__(self, message_db):
|
||||
"""
|
||||
The query below does the following:
|
||||
- The first two inner joins on participants and participants_info build
|
||||
the 1 to many (M) mappings between the sender and the recipients for each
|
||||
conversation_id. If a and b do private messaging, then 2 rows in the result
|
||||
will be a -> b and b -> a.
|
||||
If a, b, c, d are in a group, then 4 rows containing a -> b,c,d. b -> a,c,d. etc.
|
||||
Participants_info is needed to get phone numbers.
|
||||
- The result of the above step is a look up table for each message. Joining this result
|
||||
onto the messages table lets us know which participant a message originated from and
|
||||
everyone else that received it.
|
||||
"""
|
||||
super(ViberMessagesParser, self).__init__(message_db.runQuery(
|
||||
"""
|
||||
SELECT convo_participants.from_number AS from_number,
|
||||
convo_participants.recipients AS recipients,
|
||||
M.conversation_id AS thread_id,
|
||||
M.body AS msg_content,
|
||||
M.send_type AS direction,
|
||||
M.msg_date AS msg_date,
|
||||
M.unread AS read_status
|
||||
FROM (SELECT *,
|
||||
group_concat(TO_RESULT.number) AS recipients
|
||||
FROM (SELECT P._id AS FROM_ID,
|
||||
P.conversation_id,
|
||||
PI.number AS FROM_NUMBER
|
||||
FROM participants AS P
|
||||
JOIN participants_info AS PI
|
||||
ON P.participant_info_id = PI._id) AS FROM_RESULT
|
||||
JOIN (SELECT P._id AS TO_ID,
|
||||
P.conversation_id,
|
||||
PI.number
|
||||
FROM participants AS P
|
||||
JOIN participants_info AS PI
|
||||
ON P.participant_info_id = PI._id) AS TO_RESULT
|
||||
ON FROM_RESULT.from_id != TO_RESULT.to_id
|
||||
AND FROM_RESULT.conversation_id = TO_RESULT.conversation_id
|
||||
GROUP BY FROM_RESULT.from_id) AS convo_participants
|
||||
JOIN messages AS M
|
||||
ON M.participant_id = convo_participants.from_id
|
||||
AND M.conversation_id = convo_participants.conversation_id
|
||||
"""
|
||||
)
|
||||
)
|
||||
self._VIBER_MESSAGE_TYPE = "Viber Message"
|
||||
self._INCOMING_MESSAGE_TYPE = 0
|
||||
self._OUTGOING_MESSAGE_TYPE = 1
|
||||
|
||||
def get_message_type(self):
|
||||
return self._VIBER_MESSAGE_TYPE
|
||||
|
||||
def get_phone_number_from(self):
|
||||
return Account.Address(self.result_set.getString("from_number"),
|
||||
self.result_set.getString("from_number"))
|
||||
|
||||
def get_message_direction(self):
|
||||
direction = self.result_set.getInt("direction")
|
||||
if direction == self._INCOMING_MESSAGE_TYPE:
|
||||
return self.INCOMING
|
||||
return self.OUTGOING
|
||||
|
||||
def get_phone_number_to(self):
|
||||
recipients = []
|
||||
for token in self.result_set.getString("recipients").split(","):
|
||||
recipients.append(Account.Address(token, token))
|
||||
return recipients
|
||||
|
||||
def get_message_date_time(self):
|
||||
#transform from ms to seconds
|
||||
return self.result_set.getLong("msg_date") / 1000
|
||||
|
||||
def get_message_read_status(self):
|
||||
if self.get_message_direction() == self.INCOMING:
|
||||
if self.result_set.getInt("read_status") == 0:
|
||||
return self.READ
|
||||
else:
|
||||
return self.UNREAD
|
||||
return super(ViberMessagesParser, self).get_message_read_status()
|
||||
|
||||
def get_message_text(self):
|
||||
return self.result_set.getString("msg_content")
|
||||
|
||||
def get_thread_id(self):
|
||||
return str(self.result_set.getInt("thread_id"))
|
458
InternalPythonModules/android/whatsapp.py
Normal file
458
InternalPythonModules/android/whatsapp.py
Normal file
@ -0,0 +1,458 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
from java.io import File
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.lang import Long
|
||||
from java.lang import String
|
||||
from java.sql import ResultSet
|
||||
from java.sql import SQLException
|
||||
from java.sql import Statement
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from org.apache.commons.codec.binary import Base64
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
|
||||
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
from org.sleuthkit.datamodel.Blackboard import BlackboardException
|
||||
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
|
||||
from org.sleuthkit.datamodel import Account
|
||||
from org.sleuthkit.datamodel.blackboardutils import CommunicationArtifactsHelper
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import MessageReadStatus
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import CommunicationDirection
|
||||
from TskMessagesParser import TskMessagesParser
|
||||
from TskContactsParser import TskContactsParser
|
||||
from TskCallLogsParser import TskCallLogsParser
|
||||
|
||||
import traceback
|
||||
import general
|
||||
|
||||
class WhatsAppAnalyzer(general.AndroidComponentAnalyzer):
|
||||
"""
|
||||
Parses the WhatsApp databases for TSK contact, message
|
||||
and calllog artifacts.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
self._WHATSAPP_PACKAGE_NAME = "com.whatsapp"
|
||||
self._PARSER_NAME = "WhatsApp Parser"
|
||||
self._VERSION = "2.19.244"
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
"""
|
||||
Extract, Transform and Load all TSK contact, message
|
||||
and calllog artifacts from the WhatsApp databases.
|
||||
"""
|
||||
|
||||
try:
|
||||
contact_dbs = AppSQLiteDB.findAppDatabases(dataSource,
|
||||
"wa.db", True, self._WHATSAPP_PACKAGE_NAME)
|
||||
calllog_and_message_dbs = AppSQLiteDB.findAppDatabases(dataSource,
|
||||
"msgstore.db", True, self._WHATSAPP_PACKAGE_NAME)
|
||||
|
||||
#Extract TSK_CONTACT information
|
||||
for contact_db in contact_dbs:
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
helper = CommunicationArtifactsHelper(
|
||||
current_case.getSleuthkitCase(), self._PARSER_NAME,
|
||||
contact_db.getDBFile(), Account.Type.WHATSAPP)
|
||||
self.parse_contacts(contact_db, helper)
|
||||
|
||||
for calllog_and_message_db in calllog_and_message_dbs:
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
helper = CommunicationArtifactsHelper(
|
||||
current_case.getSleuthkitCase(), self._PARSER_NAME,
|
||||
calllog_and_message_db.getDBFile(), Account.Type.WHATSAPP)
|
||||
self.parse_calllogs(calllog_and_message_db, helper)
|
||||
self.parse_messages(dataSource, calllog_and_message_db, helper)
|
||||
|
||||
except NoCurrentCaseException as ex:
|
||||
#If there is no current case, bail out immediately.
|
||||
self._logger.log(Level.WARNING, "No case currently open.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exec())
|
||||
|
||||
#Clean up open file handles.
|
||||
for contact_db in contact_dbs:
|
||||
contact_db.close()
|
||||
|
||||
for calllog_and_message_db in calllog_and_message_dbs:
|
||||
calllog_and_message_db.close()
|
||||
|
||||
def parse_contacts(self, contacts_db, helper):
|
||||
try:
|
||||
contacts_parser = WhatsAppContactsParser(contacts_db)
|
||||
while contacts_parser.next():
|
||||
helper.addContact(
|
||||
contacts_parser.get_account_name(),
|
||||
contacts_parser.get_contact_name(),
|
||||
contacts_parser.get_phone(),
|
||||
contacts_parser.get_home_phone(),
|
||||
contacts_parser.get_mobile_phone(),
|
||||
contacts_parser.get_email()
|
||||
)
|
||||
contacts_parser.close()
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error querying the whatsapp database for contacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE,
|
||||
"Error adding whatsapp contact artifacts to the case database.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING,
|
||||
"Error posting contact artifact to the blackboard.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
def parse_calllogs(self, calllogs_db, helper):
|
||||
try:
|
||||
single_calllogs_parser = WhatsAppSingleCallLogsParser(calllogs_db)
|
||||
while single_calllogs_parser.next():
|
||||
helper.addCalllog(
|
||||
single_calllogs_parser.get_call_direction(),
|
||||
single_calllogs_parser.get_phone_number_from(),
|
||||
single_calllogs_parser.get_phone_number_to(),
|
||||
single_calllogs_parser.get_call_start_date_time(),
|
||||
single_calllogs_parser.get_call_end_date_time(),
|
||||
single_calllogs_parser.get_call_type()
|
||||
)
|
||||
single_calllogs_parser.close()
|
||||
|
||||
group_calllogs_parser = WhatsAppGroupCallLogsParser(calllogs_db)
|
||||
while group_calllogs_parser.next():
|
||||
helper.addCalllog(
|
||||
group_calllogs_parser.get_call_direction(),
|
||||
group_calllogs_parser.get_phone_number_from(),
|
||||
group_calllogs_parser.get_phone_number_to(),
|
||||
group_calllogs_parser.get_call_start_date_time(),
|
||||
group_calllogs_parser.get_call_end_date_time(),
|
||||
group_calllogs_parser.get_call_type()
|
||||
)
|
||||
group_calllogs_parser.close()
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error querying the whatsapp database for calllogs.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE,
|
||||
"Error adding whatsapp calllog artifacts to the case database.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING,
|
||||
"Error posting calllog artifact to the blackboard.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
def parse_messages(self, dataSource, messages_db, helper):
|
||||
try:
|
||||
messages_db.attachDatabase(dataSource, "wa.db",
|
||||
messages_db.getDBFile().getParentPath(), "wadb")
|
||||
|
||||
messages_parser = WhatsAppMessagesParser(messages_db)
|
||||
while messages_parser.next():
|
||||
helper.addMessage(
|
||||
messages_parser.get_message_type(),
|
||||
messages_parser.get_message_direction(),
|
||||
messages_parser.get_phone_number_from(),
|
||||
messages_parser.get_phone_number_to(),
|
||||
messages_parser.get_message_date_time(),
|
||||
messages_parser.get_message_read_status(),
|
||||
messages_parser.get_message_subject(),
|
||||
messages_parser.get_message_text(),
|
||||
messages_parser.get_thread_id()
|
||||
)
|
||||
messages_parser.close()
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error querying the whatsapp database for contacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE,
|
||||
"Error adding whatsapp contact artifacts to the case database.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING,
|
||||
"Error posting contact artifact to the blackboard.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
class WhatsAppGroupCallLogsParser(TskCallLogsParser):
|
||||
"""
|
||||
Extracts TSK_CALLLOG information from group call logs
|
||||
in the WhatsApp database.
|
||||
"""
|
||||
|
||||
def __init__(self, calllog_db):
|
||||
super(WhatsAppGroupCallLogsParser, self).__init__(calllog_db.runQuery(
|
||||
"""
|
||||
SELECT CL.video_call,
|
||||
CL.timestamp,
|
||||
CL.duration,
|
||||
CL.from_me,
|
||||
J1.raw_string AS from_id,
|
||||
group_concat(J.raw_string) AS group_members
|
||||
FROM call_log_participant_v2 AS CLP
|
||||
JOIN call_log AS CL
|
||||
ON CL._id = CLP.call_log_row_id
|
||||
JOIN jid AS J
|
||||
ON J._id = CLP.jid_row_id
|
||||
JOIN jid as J1
|
||||
ON J1._id = CL.jid_row_id
|
||||
GROUP BY CL._id
|
||||
"""
|
||||
)
|
||||
)
|
||||
self._INCOMING_CALL_TYPE = 0
|
||||
self._OUTGOING_CALL_TYPE = 1
|
||||
self._VIDEO_CALL_TYPE = 1
|
||||
|
||||
def get_call_direction(self):
|
||||
if self.result_set.getInt("from_me") == self._INCOMING_CALL_TYPE:
|
||||
return self.INCOMING_CALL
|
||||
return self.OUTGOING_CALL
|
||||
|
||||
def get_phone_number_from(self):
|
||||
if self.get_call_direction() == self.INCOMING_CALL:
|
||||
sender = self.result_set.getString("from_id")
|
||||
return Account.Address(sender, sender)
|
||||
return super(WhatsAppGroupCallLogsParser, self).get_phone_number_from()
|
||||
|
||||
def get_phone_number_to(self):
|
||||
if self.get_call_direction() == self.OUTGOING_CALL:
|
||||
group = self.result_set.getString("group_members")
|
||||
members = []
|
||||
for token in group.split(","):
|
||||
members.append(Account.Address(token, token))
|
||||
return members
|
||||
return super(WhatsAppGroupCallLogsParser, self).get_phone_number_to()
|
||||
|
||||
def get_call_start_date_time(self):
|
||||
return self.result_set.getLong("timestamp") / 1000
|
||||
|
||||
def get_call_end_date_time(self):
|
||||
start = self.get_call_start_date_time()
|
||||
duration = self.result_set.getInt("duration")
|
||||
return start + duration
|
||||
|
||||
def get_call_type(self):
|
||||
if self.result_set.getInt("video_call") == self._VIDEO_CALL_TYPE:
|
||||
return self.VIDEO_CALL
|
||||
return self.AUDIO_CALL
|
||||
|
||||
class WhatsAppSingleCallLogsParser(TskCallLogsParser):
|
||||
"""
|
||||
Extracts TSK_CALLLOG information from 1 to 1 call logs
|
||||
in the WhatsApp database.
|
||||
"""
|
||||
|
||||
def __init__(self, calllog_db):
|
||||
super(WhatsAppSingleCallLogsParser, self).__init__(calllog_db.runQuery(
|
||||
"""
|
||||
SELECT CL.timestamp,
|
||||
CL.video_call,
|
||||
CL.duration,
|
||||
J.raw_string AS num,
|
||||
CL.from_me
|
||||
FROM call_log AS CL
|
||||
JOIN jid AS J
|
||||
ON J._id = CL.jid_row_id
|
||||
WHERE CL._id NOT IN (SELECT DISTINCT call_log_row_id
|
||||
FROM call_log_participant_v2)
|
||||
"""
|
||||
)
|
||||
)
|
||||
self._INCOMING_CALL_TYPE = 0
|
||||
self._OUTGOING_CALL_TYPE = 1
|
||||
self._VIDEO_CALL_TYPE = 1
|
||||
|
||||
def get_call_direction(self):
|
||||
if self.result_set.getInt("from_me") == self._INCOMING_CALL_TYPE:
|
||||
return self.INCOMING_CALL
|
||||
return self.OUTGOING_CALL
|
||||
|
||||
def get_phone_number_from(self):
|
||||
if self.get_call_direction() == self.INCOMING_CALL:
|
||||
sender = self.result_set.getString("num")
|
||||
return Account.Address(sender, sender)
|
||||
return super(WhatsAppSingleCallLogsParser, self).get_phone_number_from()
|
||||
|
||||
def get_phone_number_to(self):
|
||||
if self.get_call_direction() == self.OUTGOING_CALL:
|
||||
to = self.result_set.getString("num")
|
||||
return Account.Address(to, to)
|
||||
return super(WhatsAppSingleCallLogsParser, self).get_phone_number_to()
|
||||
|
||||
def get_call_start_date_time(self):
|
||||
return self.result_set.getLong("timestamp") / 1000
|
||||
|
||||
def get_call_end_date_time(self):
|
||||
start = self.get_call_start_date_time()
|
||||
duration = self.result_set.getInt("duration")
|
||||
return start + duration
|
||||
|
||||
def get_call_type(self):
|
||||
if self.result_set.getInt("video_call") == self._VIDEO_CALL_TYPE:
|
||||
return self.VIDEO_CALL
|
||||
return self.AUDIO_CALL
|
||||
|
||||
|
||||
class WhatsAppContactsParser(TskContactsParser):
|
||||
"""
|
||||
Extracts TSK_CONTACT information from the WhatsApp database.
|
||||
TSK_CONTACT fields that are not in the WhatsApp database are given
|
||||
a default value inherited from the super class.
|
||||
"""
|
||||
|
||||
def __init__(self, contact_db):
|
||||
super(WhatsAppContactsParser, self).__init__(contact_db.runQuery(
|
||||
"""
|
||||
SELECT jid,
|
||||
CASE
|
||||
WHEN WC.number IS NULL THEN WC.jid
|
||||
WHEN WC.number == "" THEN WC.jid
|
||||
ELSE WC.number
|
||||
END number,
|
||||
CASE
|
||||
WHEN WC.given_name IS NULL
|
||||
AND WC.family_name IS NULL
|
||||
AND WC.display_name IS NULL THEN WC.jid
|
||||
WHEN WC.given_name IS NULL
|
||||
AND WC.family_name IS NULL THEN WC.display_name
|
||||
WHEN WC.given_name IS NULL THEN WC.family_name
|
||||
WHEN WC.family_name IS NULL THEN WC.given_name
|
||||
ELSE WC.given_name
|
||||
|| " "
|
||||
|| WC.family_name
|
||||
END name
|
||||
FROM wa_contacts AS WC
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
def get_account_name(self):
|
||||
return self.result_set.getString("jid")
|
||||
|
||||
def get_contact_name(self):
|
||||
return self.result_set.getString("name")
|
||||
|
||||
def get_phone(self):
|
||||
return self.result_set.getString("number")
|
||||
|
||||
class WhatsAppMessagesParser(TskMessagesParser):
|
||||
"""
|
||||
Extract TSK_MESSAGE information from the WhatsApp database.
|
||||
TSK_CONTACT fields that are not in the WhatsApp database are given
|
||||
a default value inherited from the super class.
|
||||
"""
|
||||
|
||||
def __init__(self, message_db):
|
||||
super(WhatsAppMessagesParser, self).__init__(message_db.runQuery(
|
||||
"""
|
||||
SELECT M.key_remote_jid AS id,
|
||||
contact_info.recipients,
|
||||
key_from_me AS direction,
|
||||
CASE
|
||||
WHEN M.data IS NULL THEN ""
|
||||
ELSE M.data
|
||||
END AS content,
|
||||
M.timestamp AS send_timestamp,
|
||||
M.received_timestamp,
|
||||
M.remote_resource AS group_sender,
|
||||
M.media_url As attachment
|
||||
FROM (SELECT jid,
|
||||
recipients
|
||||
FROM wadb.wa_contacts AS WC
|
||||
LEFT JOIN (SELECT gjid,
|
||||
group_concat(CASE
|
||||
WHEN jid == "" THEN NULL
|
||||
ELSE jid
|
||||
END) AS recipients
|
||||
FROM group_participants
|
||||
GROUP BY gjid) AS group_map
|
||||
ON WC.jid = group_map.gjid
|
||||
GROUP BY jid) AS contact_info
|
||||
JOIN messages AS M
|
||||
ON M.key_remote_jid = contact_info.jid
|
||||
"""
|
||||
)
|
||||
)
|
||||
self._WHATSAPP_MESSAGE_TYPE = "WhatsApp Message"
|
||||
self._INCOMING_MESSAGE_TYPE = 0
|
||||
self._OUTGOING_MESSAGE_TYPE = 1
|
||||
self._message_db = message_db
|
||||
|
||||
def get_message_type(self):
|
||||
return self._WHATSAPP_MESSAGE_TYPE
|
||||
|
||||
def get_phone_number_to(self):
|
||||
if self.get_message_direction() == self.OUTGOING:
|
||||
group = self.result_set.getString("recipients")
|
||||
if group is not None:
|
||||
group = group.split(",")
|
||||
|
||||
recipients = []
|
||||
for token in group:
|
||||
recipients.append(Account.Address(token, token))
|
||||
|
||||
return recipients
|
||||
|
||||
return Account.Address(self.result_set.getString("id"),
|
||||
self.result_set.getString("id"))
|
||||
return super(WhatsAppMessagesParser, self).get_phone_number_to()
|
||||
|
||||
def get_phone_number_from(self):
|
||||
if self.get_message_direction() == self.INCOMING:
|
||||
group_sender = self.result_set.getString("group_sender")
|
||||
group = self.result_set.getString("recipients")
|
||||
if group_sender is not None and group is not None:
|
||||
return Account.Address(group_sender, group_sender)
|
||||
else:
|
||||
return Account.Address(self.result_set.getString("id"),
|
||||
self.result_set.getString("id"))
|
||||
return super(WhatsAppMessagesParser, self).get_phone_number_from()
|
||||
|
||||
def get_message_direction(self):
|
||||
direction = self.result_set.getInt("direction")
|
||||
if direction == self._INCOMING_MESSAGE_TYPE:
|
||||
return self.INCOMING
|
||||
return self.OUTGOING
|
||||
|
||||
def get_message_date_time(self):
|
||||
#transform from ms to seconds
|
||||
if self.get_message_direction() == self.OUTGOING:
|
||||
return self.result_set.getLong("send_timestamp") / 1000
|
||||
return self.result_set.getLong("received_timestamp") / 1000
|
||||
|
||||
def get_message_text(self):
|
||||
message = self.result_set.getString("content")
|
||||
attachment = self.result_set.getString("attachment")
|
||||
if attachment is not None:
|
||||
return general.appendAttachmentList(message, [attachment])
|
||||
return message
|
||||
|
||||
def get_thread_id(self):
|
||||
group = self.result_set.getString("recipients")
|
||||
if group is not None:
|
||||
return self.result_set.getString("id")
|
||||
return super(WhatsAppMessagesParser, self).get_thread_id()
|
136
InternalPythonModules/android/xender.py
Normal file
136
InternalPythonModules/android/xender.py
Normal file
@ -0,0 +1,136 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
from java.io import File
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.lang import Long
|
||||
from java.lang import String
|
||||
from java.sql import ResultSet
|
||||
from java.sql import SQLException
|
||||
from java.sql import Statement
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from org.apache.commons.codec.binary import Base64
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
from org.sleuthkit.datamodel.Blackboard import BlackboardException
|
||||
from org.sleuthkit.datamodel import Account
|
||||
from org.sleuthkit.datamodel.blackboardutils import CommunicationArtifactsHelper
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import MessageReadStatus
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import CommunicationDirection
|
||||
import traceback
|
||||
import general
|
||||
|
||||
"""
|
||||
Finds the SQLite DB for Xender, parses the DB for contacts & messages,
|
||||
and adds artifacts to the case.
|
||||
"""
|
||||
class XenderAnalyzer(general.AndroidComponentAnalyzer):
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
self._PACKAGE_NAME = "cn.xender"
|
||||
self._MODULE_NAME = "Xender Analyzer"
|
||||
self._MESSAGE_TYPE = "Xender Message"
|
||||
self._VERSION = "4.6.5"
|
||||
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
selfAccountAddress = None
|
||||
transactionDbs = AppSQLiteDB.findAppDatabases(dataSource, "trans-history-db", True, self._PACKAGE_NAME)
|
||||
for transactionDb in transactionDbs:
|
||||
try:
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
# get the profile with connection_times 0, that's the self account.
|
||||
profilesResultSet = transactionDb.runQuery("SELECT device_id, nick_name FROM profile WHERE connect_times = 0")
|
||||
if profilesResultSet:
|
||||
while profilesResultSet.next():
|
||||
if not selfAccountAddress:
|
||||
selfAccountAddress = Account.Address(profilesResultSet.getString("device_id"), profilesResultSet.getString("nick_name"))
|
||||
# create artifacts helper
|
||||
if selfAccountAddress is not None:
|
||||
transactionDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, transactionDb.getDBFile(),
|
||||
Account.Type.XENDER, Account.Type.XENDER, selfAccountAddress )
|
||||
else:
|
||||
transactionDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, transactionDb.getDBFile(),
|
||||
Account.Type.XENDER)
|
||||
|
||||
queryString = "SELECT f_path, f_display_name, f_size_str, f_create_time, c_direction, c_session_id, s_name, s_device_id, r_name, r_device_id FROM new_history "
|
||||
messagesResultSet = transactionDb.runQuery(queryString)
|
||||
if messagesResultSet is not None:
|
||||
while messagesResultSet.next():
|
||||
direction = CommunicationDirection.UNKNOWN
|
||||
fromAddress = None
|
||||
toAdddress = None
|
||||
|
||||
if (messagesResultSet.getInt("c_direction") == 1):
|
||||
direction = CommunicationDirection.OUTGOING
|
||||
toAddress = Account.Address(messagesResultSet.getString("r_device_id"), messagesResultSet.getString("r_name"))
|
||||
else:
|
||||
direction = CommunicationDirection.INCOMING
|
||||
fromAddress = Account.Address(messagesResultSet.getString("s_device_id"), messagesResultSet.getString("s_name"))
|
||||
|
||||
msgBody = "" # there is no body.
|
||||
attachments = [messagesResultSet.getString("f_path")]
|
||||
msgBody = general.appendAttachmentList(msgBody, attachments)
|
||||
|
||||
timeStamp = messagesResultSet.getLong("f_create_time") / 1000
|
||||
messageArtifact = transactionDbHelper.addMessage(
|
||||
self._MESSAGE_TYPE,
|
||||
direction,
|
||||
fromAddress,
|
||||
toAddress,
|
||||
timeStamp,
|
||||
MessageReadStatus.UNKNOWN,
|
||||
None, # subject
|
||||
msgBody,
|
||||
messagesResultSet.getString("c_session_id") )
|
||||
|
||||
# TBD: add the file as attachment ??
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query result for profiles", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to create Xender message artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except NoCurrentCaseException as ex:
|
||||
self._logger.log(Level.WARNING, "No case currently open.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
transactionDb.close()
|
||||
|
||||
|
||||
|
124
InternalPythonModules/android/zapya.py
Normal file
124
InternalPythonModules/android/zapya.py
Normal file
@ -0,0 +1,124 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
from java.io import File
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.lang import Long
|
||||
from java.lang import String
|
||||
from java.sql import ResultSet
|
||||
from java.sql import SQLException
|
||||
from java.sql import Statement
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from org.apache.commons.codec.binary import Base64
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
from org.sleuthkit.datamodel.Blackboard import BlackboardException
|
||||
from org.sleuthkit.datamodel import Account
|
||||
from org.sleuthkit.datamodel.blackboardutils import CommunicationArtifactsHelper
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import MessageReadStatus
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import CommunicationDirection
|
||||
|
||||
import traceback
|
||||
import general
|
||||
|
||||
"""
|
||||
Finds the SQLite DB for Zapya, parses the DB for contacts & messages,
|
||||
and adds artifacts to the case.
|
||||
"""
|
||||
class ZapyaAnalyzer(general.AndroidComponentAnalyzer):
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
self._PACKAGE_NAME = "com.dewmobile.kuaiya.play"
|
||||
self._MODULE_NAME = "Zapya Analyzer"
|
||||
self._MESSAGE_TYPE = "Zapya Message"
|
||||
self._VERSION = "5.8.3"
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
transferDbs = AppSQLiteDB.findAppDatabases(dataSource, "transfer20.db", True, self._PACKAGE_NAME)
|
||||
for transferDb in transferDbs:
|
||||
try:
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
#
|
||||
transferDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, transferDb.getDBFile(),
|
||||
Account.Type.ZAPYA)
|
||||
|
||||
queryString = "SELECT device, name, direction, createtime, path, title FROM transfer"
|
||||
transfersResultSet = transferDb.runQuery(queryString)
|
||||
if transfersResultSet is not None:
|
||||
while transfersResultSet.next():
|
||||
direction = CommunicationDirection.UNKNOWN
|
||||
fromAddress = None
|
||||
toAddress = None
|
||||
|
||||
if (transfersResultSet.getInt("direction") == 1):
|
||||
direction = CommunicationDirection.OUTGOING
|
||||
toAddress = Account.Address(transfersResultSet.getString("device"), transfersResultSet.getString("name") )
|
||||
else:
|
||||
direction = CommunicationDirection.INCOMING
|
||||
fromAddress = Account.Address(transfersResultSet.getString("device"), transfersResultSet.getString("name") )
|
||||
|
||||
msgBody = "" # there is no body.
|
||||
attachments = [transfersResultSet.getString("path")]
|
||||
msgBody = general.appendAttachmentList(msgBody, attachments)
|
||||
|
||||
timeStamp = transfersResultSet.getLong("createtime") / 1000
|
||||
messageArtifact = transferDbHelper.addMessage(
|
||||
self._MESSAGE_TYPE,
|
||||
direction,
|
||||
fromAddress,
|
||||
toAddress,
|
||||
timeStamp,
|
||||
MessageReadStatus.UNKNOWN,
|
||||
None, # subject
|
||||
msgBody,
|
||||
None ) # thread id
|
||||
|
||||
# TBD: add the file as attachment ??
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query result for transfer", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to create Zapya message artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except NoCurrentCaseException as ex:
|
||||
self._logger.log(Level.WARNING, "No case currently open.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
transferDb.close()
|
||||
|
||||
|
||||
|
@ -192,33 +192,29 @@ The first question that you must answer is what type of data do you want the use
|
||||
-# Data that is in a big text file or some other report that the user can review. To do this, you will use the Case.addReport() method to make the output available in the directory tree.
|
||||
|
||||
|
||||
\subsection ingest_modules_making_results_bb Posting Results to the Blackboard
|
||||
\subsection ingest_modules_making_results_bb Saving Results to the Blackboard
|
||||
The blackboard is used to store results so that they are displayed in the results tree.
|
||||
See \ref platform_blackboard for details on posting results to it. You use the blackboard when you have specific items to show the user. if you want to just shown them a big report from another library or tool, see \ref mod_report_page.
|
||||
See \ref platform_blackboard for details on saving results to it. You use the blackboard when you have specific items to show the user. If you want to just shown them a big report from another library or tool, see \ref mod_report_page.
|
||||
The blackboard defines artifacts for specific data types (such as web bookmarks).
|
||||
You can use one of the standard artifact types or create your own.
|
||||
|
||||
When modules add data to the blackboard, they should notify listeners of the new
|
||||
data by invoking the org.sleuthkit.autopsy.ingest.IngestServices.fireModuleDataEvent() method.
|
||||
Do so as soon as you have added an artifact to the blackboard.
|
||||
This allows other modules (and the main UI) to know when to query the blackboard
|
||||
for the latest data. However, if you are writing a large number of blackboard
|
||||
artifacts in a loop, it is better to invoke org.sleuthkit.autopsy.ingest.IngestServices.fireModuleDataEvent()
|
||||
only once after the bulk write, so as not to flood the system with events.
|
||||
After you've added an artifact and all of its attributes to the blackboard, you should call <a href="http://sleuthkit.org/sleuthkit/docs/jni-docs/4.6/classorg_1_1sleuthkit_1_1datamodel_1_1_blackboard.html">sleuthkit.Blackboard.postArtifact()</a>, which will:
|
||||
<ul>
|
||||
<li>Analyze the artifact and add any timestamps to the Timeline tables
|
||||
<li>Send an event over the Sleuth Kit event bus that the artifact(s) was added
|
||||
<ul>
|
||||
<li>Autopsy is a listener of this event bus and will rebroadcast the event to other Autopsy modules
|
||||
<li>Keyword search also listens for this event and will index the artifact
|
||||
</ul>
|
||||
</ul>
|
||||
|
||||
Further, when modules create artifacts, they should be indexed for keyword search,
|
||||
using the method org.sleuthkit.autopsy.casemodule.services.Blackboard.indexArtifact(BlackboardArtifact artifact). This can be done
|
||||
in the following way:
|
||||
|
||||
\code
|
||||
Blackboard blackboard = Case.getCurrentCase().getServices().getBlackboard();
|
||||
try {
|
||||
blackboard.indexArtifact(artifact); //Your artifact as the argument.
|
||||
}
|
||||
catch (BlackboardException ex) {
|
||||
//YOUR EXCEPTION BEHAVIOR HERE.
|
||||
}
|
||||
\endcode
|
||||
This means you no longer have to make separate calls to:
|
||||
- Index the artifact
|
||||
- Fire the event to refresh the UI.
|
||||
|
||||
If you are creating a large number of artifacts, you may see better performance if you save all the artifacts you create and do one bulk post at the end using <a href="http://sleuthkit.org/sleuthkit/docs/jni-docs/4.6/classorg_1_1sleuthkit_1_1datamodel_1_1_blackboard.html">sleuthkit.Blackboard.postArtifacts()</a>. You can also post batches of artifacts instead of saving all of them until the end.
|
||||
|
||||
You should not be using the Autopsy version of Blackboard. Those methods have all been deprecated and is another example of us moving "services" into the TSK data model.
|
||||
|
||||
|
||||
\subsection ingest_modules_making_results_report Making a Report
|
||||
|
@ -52,7 +52,7 @@ The blackboard allows modules to communicate with each other and the UI. It has
|
||||
|
||||
The blackboard is not unique to Autopsy. It is part of The Sleuth Kit datamodel and The Sleuth Kit Framework. In the name of reducing the amount of documentation that we need to maintain, we provide links here to those documentation sources.
|
||||
|
||||
- <a href="http://sleuthkit.org/sleuthkit/docs/jni-docs/4.3/mod_bbpage.html">The Blackboard</a>
|
||||
- <a href="http://sleuthkit.org/sleuthkit/docs/jni-docs/4.6/mod_bbpage.html">The Blackboard</a>
|
||||
|
||||
|
||||
\subsection mod_dev_other_services Framework Services and Utilities
|
||||
|
@ -224,8 +224,6 @@ final class VcardParser {
|
||||
if (!tskBlackboard.artifactExists(abstractFile, BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT, attributes)) {
|
||||
artifact = abstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT);
|
||||
artifact.addAttributes(attributes);
|
||||
List<BlackboardArtifact> blackboardArtifacts = new ArrayList<>();
|
||||
blackboardArtifacts.add(artifact);
|
||||
|
||||
extractPhotos(vcard, abstractFile, artifact);
|
||||
|
||||
@ -388,8 +386,12 @@ final class VcardParser {
|
||||
*/
|
||||
private void addPhoneAttributes(Telephone telephone, AbstractFile abstractFile, Collection<BlackboardAttribute> attributes) {
|
||||
String telephoneText = telephone.getText();
|
||||
|
||||
if (telephoneText == null || telephoneText.isEmpty()) {
|
||||
return;
|
||||
telephoneText = telephone.getUri().getNumber();
|
||||
if (telephoneText == null || telephoneText.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Add phone number to collection for later creation of TSK_CONTACT.
|
||||
@ -408,20 +410,25 @@ final class VcardParser {
|
||||
type.getValue().toUpperCase().replaceAll("\\s+","").split(","));
|
||||
|
||||
for (String splitType : splitTelephoneTypes) {
|
||||
String attributeTypeName = "TSK_PHONE_NUMBER_" + splitType;
|
||||
String attributeTypeName = "TSK_PHONE_NUMBER";
|
||||
if(splitType != null && !splitType.isEmpty()) {
|
||||
attributeTypeName = "TSK_PHONE_NUMBER_" + splitType;
|
||||
}
|
||||
|
||||
try {
|
||||
BlackboardAttribute.Type attributeType = tskCase.getAttributeType(attributeTypeName);
|
||||
if (attributeType == null) {
|
||||
// Add this attribute type to the case database.
|
||||
attributeType = tskCase.addArtifactAttributeType(attributeTypeName,
|
||||
BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
|
||||
String.format("Phone (%s)", StringUtils.capitalize(splitType.toLowerCase())));
|
||||
attributeType = tskCase.addArtifactAttributeType(attributeTypeName,
|
||||
BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
|
||||
String.format("Phone Number (%s)", StringUtils.capitalize(splitType.toLowerCase())));
|
||||
|
||||
}
|
||||
ThunderbirdMboxFileIngestModule.addArtifactAttribute(telephone.getText(), attributeType, attributes);
|
||||
ThunderbirdMboxFileIngestModule.addArtifactAttribute(telephoneText, attributeType, attributes);
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Unable to retrieve attribute type '%s' for file '%s' (id=%d).", attributeTypeName, abstractFile.getName(), abstractFile.getId()), ex);
|
||||
logger.log(Level.WARNING, String.format("Unable to retrieve attribute type '%s' for file '%s' (id=%d).", attributeTypeName, abstractFile.getName(), abstractFile.getId()), ex);
|
||||
} catch (TskDataException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Unable to add custom attribute type '%s' for file '%s' (id=%d).", attributeTypeName, abstractFile.getName(), abstractFile.getId()), ex);
|
||||
logger.log(Level.WARNING, String.format("Unable to add custom attribute type '%s' for file '%s' (id=%d).", attributeTypeName, abstractFile.getName(), abstractFile.getId()), ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -490,7 +497,11 @@ final class VcardParser {
|
||||
private void addPhoneAccountInstances(Telephone telephone, AbstractFile abstractFile, Collection<AccountFileInstance> accountInstances) {
|
||||
String telephoneText = telephone.getText();
|
||||
if (telephoneText == null || telephoneText.isEmpty()) {
|
||||
return;
|
||||
telephoneText = telephone.getUri().getNumber();
|
||||
if (telephoneText == null || telephoneText.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// Add phone number as a TSK_ACCOUNT.
|
||||
|
Loading…
x
Reference in New Issue
Block a user