mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-19 11:07:43 +00:00
Remove Python Android Module
Remove Python Android Module
This commit is contained in:
parent
abaf0efac8
commit
d362dc3c69
@ -1,35 +0,0 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
class ResultSetIterator(object):
|
||||
"""
|
||||
Generic base class for iterating through database recordms
|
||||
"""
|
||||
|
||||
def __init__(self, result_set):
|
||||
self.result_set = result_set
|
||||
|
||||
def next(self):
|
||||
if self.result_set is None:
|
||||
return False
|
||||
return self.result_set.next()
|
||||
|
||||
def close(self):
|
||||
if self.result_set is not None:
|
||||
self.result_set.close()
|
@ -1,64 +0,0 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
from ResultSetIterator import ResultSetIterator
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import CallMediaType
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import CommunicationDirection
|
||||
from org.sleuthkit.datamodel import Account
|
||||
|
||||
class TskCallLogsParser(ResultSetIterator):
|
||||
"""
|
||||
Generic TSK_CALLLOG artifact template. Each of these methods
|
||||
will contain the extraction and transformation logic for
|
||||
converting raw database records to the expected TSK_CALLLOG
|
||||
format.
|
||||
|
||||
A simple example of data transformation would be computing
|
||||
the end time of a call when the database only supplies the start
|
||||
time and duration.
|
||||
"""
|
||||
|
||||
def __init__(self, result_set):
|
||||
super(TskCallLogsParser, self).__init__(result_set)
|
||||
self._DEFAULT_STRING = ""
|
||||
self._DEFAULT_DIRECTION = CommunicationDirection.UNKNOWN
|
||||
self._DEFAULT_CALL_TYPE = CallMediaType.UNKNOWN
|
||||
self._DEFAULT_LONG = -1L
|
||||
|
||||
self.INCOMING_CALL = CommunicationDirection.INCOMING
|
||||
self.OUTGOING_CALL = CommunicationDirection.OUTGOING
|
||||
self.AUDIO_CALL = CallMediaType.AUDIO
|
||||
self.VIDEO_CALL = CallMediaType.VIDEO
|
||||
|
||||
def get_call_direction(self):
|
||||
return self._DEFAULT_DIRECTION
|
||||
|
||||
def get_phone_number_from(self):
|
||||
return self._DEFAULT_STRING
|
||||
|
||||
def get_phone_number_to(self):
|
||||
return self._DEFAULT_STRING
|
||||
|
||||
def get_call_start_date_time(self):
|
||||
return self._DEFAULT_LONG
|
||||
|
||||
def get_call_end_date_time(self):
|
||||
return self._DEFAULT_LONG
|
||||
|
||||
def get_call_type(self):
|
||||
return self._DEFAULT_CALL_TYPE
|
@ -1,49 +0,0 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
from ResultSetIterator import ResultSetIterator
|
||||
|
||||
class TskContactsParser(ResultSetIterator):
|
||||
"""
|
||||
Generic TSK_CONTACT artifact template. Each of these methods
|
||||
will contain the extraction and transformation logic for
|
||||
converting raw database records to the expected TSK_CONTACT
|
||||
format.
|
||||
"""
|
||||
|
||||
def __init__(self, result_set):
|
||||
super(TskContactsParser, self).__init__(result_set)
|
||||
self._DEFAULT_VALUE = ""
|
||||
|
||||
def get_contact_name(self):
|
||||
return self._DEFAULT_VALUE
|
||||
|
||||
def get_phone(self):
|
||||
return self._DEFAULT_VALUE
|
||||
|
||||
def get_home_phone(self):
|
||||
return self._DEFAULT_VALUE
|
||||
|
||||
def get_mobile_phone(self):
|
||||
return self._DEFAULT_VALUE
|
||||
|
||||
def get_email(self):
|
||||
return self._DEFAULT_VALUE
|
||||
|
||||
def get_other_attributes(self):
|
||||
return None
|
@ -1,72 +0,0 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
from ResultSetIterator import ResultSetIterator
|
||||
from org.sleuthkit.datamodel import Account
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import MessageReadStatus
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import CommunicationDirection
|
||||
|
||||
class TskMessagesParser(ResultSetIterator):
|
||||
"""
|
||||
Generic TSK_MESSAGE artifact template. Each of these methods
|
||||
will contain the extraction and transformation logic for
|
||||
converting raw database records to the expected TSK_MESSAGE
|
||||
format.
|
||||
|
||||
An easy example of such a transformation would be converting
|
||||
message date time from milliseconds to seconds.
|
||||
"""
|
||||
|
||||
def __init__(self, result_set):
|
||||
super(TskMessagesParser, self).__init__(result_set)
|
||||
self._DEFAULT_TEXT = ""
|
||||
self._DEFAULT_LONG = -1L
|
||||
self._DEFAULT_MSG_READ_STATUS = MessageReadStatus.UNKNOWN
|
||||
self._DEFAULT_COMMUNICATION_DIRECTION = CommunicationDirection.UNKNOWN
|
||||
|
||||
self.INCOMING = CommunicationDirection.INCOMING
|
||||
self.OUTGOING = CommunicationDirection.OUTGOING
|
||||
self.READ = MessageReadStatus.READ
|
||||
self.UNREAD = MessageReadStatus.UNREAD
|
||||
|
||||
def get_message_type(self):
|
||||
return self._DEFAULT_TEXT
|
||||
|
||||
def get_message_direction(self):
|
||||
return self._DEFAULT_COMMUNICATION_DIRECTION
|
||||
|
||||
def get_phone_number_from(self):
|
||||
return self._DEFAULT_TEXT
|
||||
|
||||
def get_phone_number_to(self):
|
||||
return self._DEFAULT_TEXT
|
||||
|
||||
def get_message_date_time(self):
|
||||
return self._DEFAULT_LONG
|
||||
|
||||
def get_message_read_status(self):
|
||||
return self._DEFAULT_MSG_READ_STATUS
|
||||
|
||||
def get_message_subject(self):
|
||||
return self._DEFAULT_TEXT
|
||||
|
||||
def get_message_text(self):
|
||||
return self._DEFAULT_TEXT
|
||||
|
||||
def get_thread_id(self):
|
||||
return self._DEFAULT_TEXT
|
@ -1,130 +0,0 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2016-2018 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
from java.io import File
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.lang import Double
|
||||
from java.lang import Long
|
||||
from java.sql import Connection
|
||||
from java.sql import DriverManager
|
||||
from java.sql import ResultSet
|
||||
from java.sql import SQLException
|
||||
from java.sql import Statement
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.casemodule.services import FileManager
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import Blackboard
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
|
||||
import traceback
|
||||
import general
|
||||
|
||||
"""
|
||||
Analyzes database created by browser that stores GEO location info.
|
||||
"""
|
||||
class BrowserLocationAnalyzer(general.AndroidComponentAnalyzer):
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
try:
|
||||
abstractFiles = fileManager.findFiles(dataSource, "CachedGeoposition%.db")
|
||||
for abstractFile in abstractFiles:
|
||||
if abstractFile.getSize() == 0:
|
||||
continue
|
||||
try:
|
||||
jFile = File(Case.getCurrentCase().getTempDirectory(), str(abstractFile.getId()) + abstractFile.getName())
|
||||
ContentUtils.writeToFile(abstractFile, jFile, context.dataSourceIngestIsCancelled)
|
||||
self.__findGeoLocationsInDB(jFile.toString(), abstractFile)
|
||||
except Exception as ex:
|
||||
self._logger.log(Level.SEVERE, "Error parsing browser location files", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
# Error finding browser location files.
|
||||
pass
|
||||
|
||||
def __findGeoLocationsInDB(self, databasePath, abstractFile):
|
||||
if not databasePath:
|
||||
return
|
||||
|
||||
try:
|
||||
Class.forName("org.sqlite.JDBC") #load JDBC driver
|
||||
connection = DriverManager.getConnection("jdbc:sqlite:" + databasePath)
|
||||
statement = connection.createStatement()
|
||||
except (ClassNotFoundException) as ex:
|
||||
self._logger.log(Level.SEVERE, "Error loading JDBC driver", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
return
|
||||
except (SQLException) as ex:
|
||||
# Error connecting to SQL databse.
|
||||
return
|
||||
|
||||
resultSet = None
|
||||
try:
|
||||
resultSet = statement.executeQuery("SELECT timestamp, latitude, longitude, accuracy FROM CachedPosition;")
|
||||
while resultSet.next():
|
||||
timestamp = Long.valueOf(resultSet.getString("timestamp")) / 1000
|
||||
latitude = Double.valueOf(resultSet.getString("latitude"))
|
||||
longitude = Double.valueOf(resultSet.getString("longitude"))
|
||||
|
||||
attributes = ArrayList()
|
||||
artifact = abstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_BOOKMARK)
|
||||
attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_LATITUDE, general.MODULE_NAME, latitude))
|
||||
attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE, general.MODULE_NAME, longitude))
|
||||
attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, general.MODULE_NAME, timestamp))
|
||||
attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, general.MODULE_NAME, "Browser Location History"))
|
||||
# artifact.addAttribute(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_VALUE.getTypeID(),moduleName, accuracy))
|
||||
# NOTE: originally commented out
|
||||
|
||||
artifact.addAttributes(attributes);
|
||||
try:
|
||||
# index the artifact for keyword search
|
||||
blackboard = Case.getCurrentCase().getSleuthkitCase().getBlackboard()
|
||||
blackboard.postArtifact(artifact, general.MODULE_NAME)
|
||||
except Blackboard.BlackboardException as ex:
|
||||
self._logger.log(Level.SEVERE, "Unable to index blackboard artifact " + str(artifact.getArtifactTypeName()), ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
MessageNotifyUtil.Notify.error("Failed to index GPS trackpoint artifact for keyword search.", artifact.getDisplayName())
|
||||
|
||||
except SQLException as ex:
|
||||
# Unable to execute browser location SQL query against database.
|
||||
pass
|
||||
except Exception as ex:
|
||||
self._logger.log(Level.SEVERE, "Error processing browser location history.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
finally:
|
||||
try:
|
||||
if resultSet is not None:
|
||||
resultSet.close()
|
||||
statement.close()
|
||||
connection.close()
|
||||
except Exception as ex:
|
||||
# Error closing database.
|
||||
pass
|
@ -1,121 +0,0 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2016-2018 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
from java.io import File
|
||||
from java.io import FileInputStream
|
||||
from java.io import InputStream
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.math import BigInteger
|
||||
from java.nio import ByteBuffer
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.casemodule.services import FileManager
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import Blackboard
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
|
||||
import traceback
|
||||
import general
|
||||
import struct
|
||||
import os
|
||||
|
||||
"""
|
||||
Parses cache files that Android maintains for Wifi and cell towers. Adds GPS points to blackboard.
|
||||
"""
|
||||
class CacheLocationAnalyzer(general.AndroidComponentAnalyzer):
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
|
||||
"""
|
||||
cache.cell stores mobile tower GPS locations and cache.wifi stores GPS
|
||||
and MAC info from Wifi points.
|
||||
"""
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
try:
|
||||
abstractFiles = fileManager.findFiles(dataSource, "cache.cell")
|
||||
abstractFiles.addAll(fileManager.findFiles(dataSource, "cache.wifi"))
|
||||
for abstractFile in abstractFiles:
|
||||
if abstractFile.getSize() == 0:
|
||||
continue
|
||||
try:
|
||||
jFile = File(Case.getCurrentCase().getTempDirectory(), str(abstractFile.getId()) + abstractFile.getName())
|
||||
ContentUtils.writeToFile(abstractFile, jFile, context.dataSourceIngestIsCancelled)
|
||||
self.__findGeoLocationsInFile(jFile, abstractFile)
|
||||
except Exception as ex:
|
||||
self._logger.log(Level.SEVERE, "Error parsing cached location files", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
# Error finding cached location files.
|
||||
pass
|
||||
|
||||
def __findGeoLocationsInFile(self, file, abstractFile):
|
||||
|
||||
try:
|
||||
# code to parse the cache.wifi and cache.cell taken from https://forensics.spreitzenbarth.de/2011/10/28/decoding-cache-cell-and-cache-wifi-files/
|
||||
cacheFile = open(str(file), 'rb')
|
||||
(version, entries) = struct.unpack('>hh', cacheFile.read(4))
|
||||
# Check the number of entries * 32 (entry record size) to see if it is bigger then the file, this is a indication the file is malformed or corrupted
|
||||
if ((entries * 32) < abstractFile.getSize()):
|
||||
i = 0
|
||||
self._logger.log(Level.INFO, "Number of Entries is " + str(entries) + " File size is " + str(abstractFile.getSize()))
|
||||
while i < entries:
|
||||
key = cacheFile.read(struct.unpack('>h', cacheFile.read(2))[0])
|
||||
(accuracy, confidence, latitude, longitude, readtime) = struct.unpack('>iiddQ', cacheFile.read(32))
|
||||
timestamp = readtime/1000
|
||||
i = i + 1
|
||||
|
||||
attributes = ArrayList()
|
||||
artifact = abstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_BOOKMARK)
|
||||
attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_LATITUDE, general.MODULE_NAME, latitude))
|
||||
attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE, general.MODULE_NAME, longitude))
|
||||
attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, general.MODULE_NAME, timestamp))
|
||||
attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, general.MODULE_NAME,
|
||||
abstractFile.getName() + " Location History"))
|
||||
|
||||
artifact.addAttributes(attributes)
|
||||
#Not storing these for now.
|
||||
# artifact.addAttribute(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_VALUE.getTypeID(), AndroidModuleFactorymodule.moduleName, accuracy))
|
||||
# artifact.addAttribute(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT.getTypeID(), AndroidModuleFactorymodule.moduleName, confidence))
|
||||
try:
|
||||
# index the artifact for keyword search
|
||||
blackboard = Case.getCurrentCase().getSleuthkitCase().getBlackboard()
|
||||
blackboard.postArtifact(artifact, general.MODULE_NAME)
|
||||
except Blackboard.BlackboardException as ex:
|
||||
self._logger.log(Level.SEVERE, "Unable to index blackboard artifact " + str(artifact.getArtifactID()), ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
MessageNotifyUtil.Notify.error("Failed to index GPS trackpoint artifact for keyword search.", artifact.getDisplayName())
|
||||
cacheFile.close()
|
||||
else:
|
||||
self._logger.log(Level.WARNING, "Number of entries in file exceeds file size of file " + os.path.join(abstractFile.getParentPath(), abstractFile.getName()))
|
||||
except Exception as ex:
|
||||
self._logger.log(Level.SEVERE, "Error parsing Cached GPS locations to blackboard", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
|
||||
def toDouble(byteArray):
|
||||
return ByteBuffer.wrap(byteArray).getDouble()
|
@ -1,148 +0,0 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2016-2020 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
from java.io import File
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.lang import Integer
|
||||
from java.lang import Long
|
||||
from java.sql import Connection
|
||||
from java.sql import DriverManager
|
||||
from java.sql import ResultSet
|
||||
from java.sql import SQLException
|
||||
from java.sql import Statement
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from java.util import UUID
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
|
||||
from org.sleuthkit.autopsy.casemodule.services import FileManager
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.autopsy.ingest import IngestServices
|
||||
from org.sleuthkit.autopsy.ingest import ModuleDataEvent
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import Blackboard
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
from org.sleuthkit.datamodel.Blackboard import BlackboardException
|
||||
from org.sleuthkit.datamodel import Account
|
||||
from org.sleuthkit.datamodel.blackboardutils import CommunicationArtifactsHelper
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import CommunicationDirection
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import CallMediaType
|
||||
|
||||
import traceback
|
||||
import general
|
||||
|
||||
class CallLogAnalyzer(general.AndroidComponentAnalyzer):
|
||||
"""
|
||||
Locates a variety of different call log databases, parses them, and populates the blackboard.
|
||||
"""
|
||||
|
||||
# the names of db files that potentially hold call logs
|
||||
_dbFileNames = ["logs.db", "contacts.db", "contacts2.db"]
|
||||
|
||||
# the names of tables that potentially hold call logs in the dbs
|
||||
_tableNames = ["calls", "logs"]
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
self._PACKAGE_NAME = "com.sec.android.provider.logsprovider"
|
||||
self._PARSER_NAME = "Android CallLog Parser"
|
||||
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
for _dbFileName in CallLogAnalyzer._dbFileNames:
|
||||
selfAccountId = None
|
||||
callLogDbs = AppSQLiteDB.findAppDatabases(dataSource, _dbFileName, True, self._PACKAGE_NAME)
|
||||
for callLogDb in callLogDbs:
|
||||
try:
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
if selfAccountId is not None:
|
||||
callLogDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._PARSER_NAME,
|
||||
callLogDb.getDBFile(),
|
||||
Account.Type.PHONE, Account.Type.PHONE, selfAccountId )
|
||||
else:
|
||||
callLogDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._PARSER_NAME,
|
||||
callLogDb.getDBFile(),
|
||||
Account.Type.PHONE )
|
||||
|
||||
for tableName in CallLogAnalyzer._tableNames:
|
||||
try:
|
||||
tableFound = callLogDb.tableExists(tableName)
|
||||
if tableFound:
|
||||
resultSet = callLogDb.runQuery("SELECT number, date, duration, type, name FROM " + tableName + " ORDER BY date DESC;")
|
||||
self._logger.log(Level.INFO, "Reading call log from table {0} in db {1}", [tableName, callLogDb.getDBFile().getName()])
|
||||
if resultSet is not None:
|
||||
while resultSet.next():
|
||||
direction = ""
|
||||
callerId = None
|
||||
calleeId = None
|
||||
|
||||
timeStamp = resultSet.getLong("date") / 1000
|
||||
number = resultSet.getString("number")
|
||||
|
||||
duration = resultSet.getLong("duration") # duration of call is in seconds
|
||||
name = resultSet.getString("name") # name of person dialed or called. None if unregistered
|
||||
|
||||
calltype = resultSet.getInt("type")
|
||||
if calltype == 1 or calltype == 3:
|
||||
direction = CommunicationDirection.INCOMING
|
||||
callerId = number
|
||||
elif calltype == 2 or calltype == 5:
|
||||
direction = CommunicationDirection.OUTGOING
|
||||
calleeId = number
|
||||
else:
|
||||
direction = CommunicationDirection.UNKNOWN
|
||||
|
||||
|
||||
## add a call log
|
||||
if callerId is not None or calleeId is not None:
|
||||
callLogArtifact = callLogDbHelper.addCalllog( direction,
|
||||
callerId,
|
||||
calleeId,
|
||||
timeStamp, ## start time
|
||||
timeStamp + duration * 1000, ## end time
|
||||
CallMediaType.AUDIO)
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query result for Android messages.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add Android call log artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to create CommunicationArtifactsHelper.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except NoCurrentCaseException as ex:
|
||||
self._logger.log(Level.WARNING, "No case currently open.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
callLogDb.close()
|
@ -1,176 +0,0 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2016-2020 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
from java.io import File
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.lang import Integer
|
||||
from java.lang import Long
|
||||
from java.sql import Connection
|
||||
from java.sql import DatabaseMetaData
|
||||
from java.sql import DriverManager
|
||||
from java.sql import ResultSet
|
||||
from java.sql import SQLException
|
||||
from java.sql import Statement
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from java.util import UUID
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
|
||||
from org.sleuthkit.autopsy.casemodule.services import FileManager
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.autopsy.ingest import IngestServices
|
||||
from org.sleuthkit.autopsy.ingest import ModuleDataEvent
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import Blackboard
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
from org.sleuthkit.datamodel.Blackboard import BlackboardException
|
||||
from org.sleuthkit.datamodel import Account
|
||||
from org.sleuthkit.datamodel.blackboardutils import CommunicationArtifactsHelper
|
||||
|
||||
import traceback
|
||||
import general
|
||||
|
||||
class ContactAnalyzer(general.AndroidComponentAnalyzer):
|
||||
|
||||
"""
|
||||
Finds and parsers Android contacts database, and populates the blackboard with Contacts.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
self._PACKAGE_NAME = "com.android.providers.contacts"
|
||||
self._PARSER_NAME = "Android Contacts Parser"
|
||||
self._VERSION = "53.1.0.1" # icu_version in 'properties' table.
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
try:
|
||||
|
||||
contactsDbs = AppSQLiteDB.findAppDatabases(dataSource, "contacts.db", True, self._PACKAGE_NAME)
|
||||
contactsDbs.addAll(AppSQLiteDB.findAppDatabases(dataSource, "contacts2.db", True, self._PACKAGE_NAME))
|
||||
if contactsDbs.isEmpty():
|
||||
return
|
||||
for contactDb in contactsDbs:
|
||||
try:
|
||||
self.__findContactsInDB(contactDb, dataSource)
|
||||
except Exception as ex:
|
||||
self._logger.log(Level.SEVERE, "Error parsing Contacts", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
# Error finding Contacts.
|
||||
pass
|
||||
|
||||
"""
|
||||
Queries the given contact database and adds Contacts to the case.
|
||||
"""
|
||||
def __findContactsInDB(self, contactDb, dataSource):
|
||||
if not contactDb:
|
||||
return
|
||||
|
||||
try:
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
|
||||
# Create a helper to parse the DB
|
||||
contactDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._PARSER_NAME,
|
||||
contactDb.getDBFile(),
|
||||
Account.Type.PHONE )
|
||||
|
||||
# get display_name, mimetype(email or phone number) and data1 (phonenumber or email address depending on mimetype)
|
||||
# sorted by name, so phonenumber/email would be consecutive for a person if they exist.
|
||||
# check if contacts.name_raw_contact_id exists. Modify the query accordingly.
|
||||
columnFound = contactDb.columnExists("contacts", "name_raw_contact_id")
|
||||
if columnFound:
|
||||
resultSet = contactDb.runQuery(
|
||||
"SELECT mimetype, data1, name_raw_contact.display_name AS display_name \n"
|
||||
+ "FROM raw_contacts JOIN contacts ON (raw_contacts.contact_id=contacts._id) \n"
|
||||
+ "JOIN raw_contacts AS name_raw_contact ON(name_raw_contact_id=name_raw_contact._id) "
|
||||
+ "LEFT OUTER JOIN data ON (data.raw_contact_id=raw_contacts._id) \n"
|
||||
+ "LEFT OUTER JOIN mimetypes ON (data.mimetype_id=mimetypes._id) \n"
|
||||
+ "WHERE mimetype = 'vnd.android.cursor.item/phone_v2' OR mimetype = 'vnd.android.cursor.item/email_v2'\n"
|
||||
+ "ORDER BY name_raw_contact.display_name ASC;")
|
||||
else:
|
||||
resultSet = contactDb.runQuery(
|
||||
"SELECT mimetype, data1, raw_contacts.display_name AS display_name \n"
|
||||
+ "FROM raw_contacts JOIN contacts ON (raw_contacts.contact_id=contacts._id) \n"
|
||||
+ "LEFT OUTER JOIN data ON (data.raw_contact_id=raw_contacts._id) \n"
|
||||
+ "LEFT OUTER JOIN mimetypes ON (data.mimetype_id=mimetypes._id) \n"
|
||||
+ "WHERE mimetype = 'vnd.android.cursor.item/phone_v2' OR mimetype = 'vnd.android.cursor.item/email_v2'\n"
|
||||
+ "ORDER BY raw_contacts.display_name ASC;")
|
||||
|
||||
contactArtifact = None
|
||||
oldName = None
|
||||
phoneNumber = None
|
||||
emailAddr = None
|
||||
name = None
|
||||
while resultSet.next():
|
||||
name = resultSet.getString("display_name")
|
||||
data1 = resultSet.getString("data1") # the phone number or email
|
||||
mimetype = resultSet.getString("mimetype") # either phone or email
|
||||
if oldName and (name != oldName):
|
||||
if phoneNumber or emailAddr:
|
||||
contactArtifact = contactDbHelper.addContact(oldName,
|
||||
phoneNumber, # phoneNumber,
|
||||
None, # homePhoneNumber,
|
||||
None, # mobilePhoneNumber,
|
||||
emailAddr) # emailAddr
|
||||
|
||||
oldName = name
|
||||
phoneNumber = None
|
||||
emailAddr = None
|
||||
name = None
|
||||
|
||||
if mimetype == "vnd.android.cursor.item/phone_v2":
|
||||
phoneNumber = data1
|
||||
else:
|
||||
emailAddr = data1
|
||||
|
||||
if name:
|
||||
oldName = name
|
||||
|
||||
|
||||
# create contact for last row
|
||||
if oldName and (phoneNumber or emailAddr):
|
||||
contactArtifact = contactDbHelper.addContact(oldName,
|
||||
phoneNumber, # phoneNumber,
|
||||
None, # homePhoneNumber,
|
||||
None, # mobilePhoneNumber,
|
||||
emailAddr) # emailAddr
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query result for Android messages.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add Android message artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except NoCurrentCaseException as ex:
|
||||
self._logger.log(Level.WARNING, "No case currently open.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
contactDb.close()
|
@ -1,521 +0,0 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019-2020 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
import json
|
||||
import traceback
|
||||
import general
|
||||
import ast
|
||||
|
||||
from java.io import File
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.lang import Long
|
||||
from java.lang import String
|
||||
from java.sql import ResultSet
|
||||
from java.sql import SQLException
|
||||
from java.sql import Statement
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from org.apache.commons.codec.binary import Base64
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
from org.sleuthkit.datamodel.Blackboard import BlackboardException
|
||||
from org.sleuthkit.datamodel import Account
|
||||
from org.sleuthkit.datamodel.blackboardutils import CommunicationArtifactsHelper
|
||||
from org.sleuthkit.datamodel.blackboardutils.attributes import MessageAttachments
|
||||
from org.sleuthkit.datamodel.blackboardutils.attributes.MessageAttachments import FileAttachment
|
||||
from org.sleuthkit.datamodel.blackboardutils.attributes.MessageAttachments import URLAttachment
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import MessageReadStatus
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import CommunicationDirection
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import CallMediaType
|
||||
|
||||
|
||||
class FBMessengerAnalyzer(general.AndroidComponentAnalyzer):
|
||||
|
||||
"""
|
||||
Facebook Messenger is a messaging application for Facebook users.
|
||||
It can be used to have one-to-one as well as group message conversations -
|
||||
text, send photos videos and other media file. It can also be used to make
|
||||
phone calls - audio as well as video.
|
||||
|
||||
This module finds the SQLite DB for FB messenger, parses the DB for contacts,
|
||||
messages, and call logs and creates artifacts.
|
||||
|
||||
FB messenger requires Facebook accounts. Although Facebook and Facebook Messenger are
|
||||
two different applications with separate packages, their database structure is very similar
|
||||
and FB messenger seems to share the FB database if FB is inatalled.
|
||||
|
||||
FB assigns each user a unique FB id, fbid - a long numeric id.
|
||||
Each user also has a display name.
|
||||
|
||||
FB uses a notion of user key, which is of the form FACEBOOK:<fbid>
|
||||
|
||||
FB messenger version 239.0.0.41 has the following database structure:
|
||||
- contacts_db2
|
||||
-- A contacts table that stores the contacts/friends.
|
||||
|
||||
- threads_db2
|
||||
-- A messages table to store the messages
|
||||
--- A sender column - this is a JSON structure which has a the FB user key of sender.
|
||||
--- A attachments column - a JSON structure that has details of the attachments,
|
||||
--- A msg_type column: message type - indicates whether its a text/mms message or a audio/video call
|
||||
Following values have been observed:
|
||||
-1: UNKNOWN - need more research, have no meaningful text though.
|
||||
observed for 1-to-1, Group message hreads as well as Montage (wall messages)
|
||||
0: User messages in 1-to-1, Group and montage threads
|
||||
8: System generated messages in 1-to-1, Group and montage threads
|
||||
e.g. "You created a the group", "You can now talk to XYZ".....
|
||||
9: System generated event records for one to one calls ??
|
||||
* have no text,
|
||||
* admin_text_thread_rtc_event has the specific event
|
||||
"one-to-one-call-ended", "missed-call" (havent seen a "one-to-one-call-started" event??)
|
||||
203: System generated event records for group calls ??
|
||||
* have no text,
|
||||
* admin_text_thread_rtc_event has the specific event
|
||||
"group-call-started", "group-call_ended"
|
||||
--- A pending_send_media_attachment - a JSON structure that has details of attachments that may or may not have been sent.
|
||||
--- A admin_text_thread_rtc_event column - has specific text events such as- "one-on-one-call-ended"
|
||||
--- A thread_key column - identifies the message thread
|
||||
--- A timestamp_ms column - date/time message was sent
|
||||
--- A text column - message text, if applicable
|
||||
|
||||
-- A thread_participants table to identify participants in a particular thread
|
||||
--- A thread_key column - identifies a message thread
|
||||
--- A user_key column to identify a particpant in the thread
|
||||
|
||||
|
||||
-- A thread_users to identify the user details, primarliy name, of a user that has been a particiapnt in any thread
|
||||
--- A user_key column - identifies a unique user
|
||||
--- A name column - user display name
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
|
||||
self._FB_MESSENGER_PACKAGE_NAME = "com.facebook.orca"
|
||||
self._FACEBOOK_PACKAGE_NAME = "com.facebook.katana"
|
||||
self._MODULE_NAME = "FB Messenger Analyzer"
|
||||
self._MESSAGE_TYPE = "Facebook Messenger"
|
||||
self._VERSION = "239.0.0.41" ## FB version number. Did not find independent version number in FB Messenger
|
||||
|
||||
self.selfAccountId = None
|
||||
self.current_case = None
|
||||
|
||||
## Analyze contacts
|
||||
def analyzeContacts(self, dataSource, fileManager, context):
|
||||
|
||||
## FB messenger and FB have same database structure for contacts.
|
||||
## In our dataset, the FB Messenger database was empty.
|
||||
## But the FB database had the data.
|
||||
|
||||
contactsDbs = AppSQLiteDB.findAppDatabases(dataSource, "contacts_db2", True, self._FACEBOOK_PACKAGE_NAME)
|
||||
for contactsDb in contactsDbs:
|
||||
try:
|
||||
## The device owner's FB account details can be found in the contacts table in a row with added_time_ms of 0.
|
||||
selfAccountResultSet = contactsDb.runQuery("SELECT fbid, display_name FROM contacts WHERE added_time_ms = 0")
|
||||
if selfAccountResultSet:
|
||||
if not self.selfAccountId:
|
||||
self.selfAccountId = selfAccountResultSet.getString("fbid")
|
||||
|
||||
if self.selfAccountId is not None:
|
||||
contactsDBHelper = CommunicationArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, contactsDb.getDBFile(),
|
||||
Account.Type.FACEBOOK, Account.Type.FACEBOOK, self.selfAccountId )
|
||||
else:
|
||||
contactsDBHelper = CommunicationArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, contactsDb.getDBFile(),
|
||||
Account.Type.FACEBOOK)
|
||||
|
||||
## get the other contacts/friends
|
||||
contactsResultSet = contactsDb.runQuery("SELECT fbid, display_name, added_time_ms FROM contacts WHERE added_time_ms <> 0")
|
||||
if contactsResultSet is not None:
|
||||
while contactsResultSet.next():
|
||||
fbid = contactsResultSet.getString("fbid")
|
||||
contactName = contactsResultSet.getString("display_name")
|
||||
dateCreated = contactsResultSet.getLong("added_time_ms") / 1000
|
||||
|
||||
## create additional attributes for contact.
|
||||
additionalAttributes = ArrayList();
|
||||
additionalAttributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ID, self._MODULE_NAME, fbid))
|
||||
additionalAttributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, self._MODULE_NAME, dateCreated))
|
||||
|
||||
contactsDBHelper.addContact( contactName, ## contact name
|
||||
"", ## phone
|
||||
"", ## home phone
|
||||
"", ## mobile
|
||||
"", ## email
|
||||
additionalAttributes)
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query result for account", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add FB Messenger contact artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
contactsDb.close()
|
||||
|
||||
|
||||
## Extracts recipeint id from 'user_key' column and adds recipient to given list,
|
||||
## if the recipeint id is not the same as sender id
|
||||
def addRecipientToList(self, user_key, senderId, recipientList):
|
||||
if user_key is not None:
|
||||
recipientId = user_key.replace('FACEBOOK:', '')
|
||||
if recipientId != senderId:
|
||||
recipientList.append(recipientId)
|
||||
|
||||
|
||||
## Extracts sender id from the json in 'sender' column.
|
||||
def getSenderIdFromJson(self, senderJsonStr):
|
||||
senderId = None;
|
||||
if senderJsonStr is not None:
|
||||
sender_dict = json.loads(senderJsonStr)
|
||||
senderId = sender_dict['user_key']
|
||||
senderId = senderId.replace('FACEBOOK:', '')
|
||||
|
||||
return senderId
|
||||
|
||||
## determines communication direction by comparing senderId with selfAccountId
|
||||
def deduceDirectionFromSenderId(self, senderId):
|
||||
direction = CommunicationDirection.UNKNOWN
|
||||
if senderId is not None:
|
||||
if senderId == self.selfAccountId:
|
||||
direction = CommunicationDirection.OUTGOING
|
||||
else:
|
||||
direction = CommunicationDirection.INCOMING
|
||||
return direction
|
||||
|
||||
## Get the arrayList from the json passed in
|
||||
def getJPGListFromJson(self, jpgJson):
|
||||
jpgArray = ArrayList()
|
||||
# The urls attachment will come across as unicode unless we use ast.literal_eval to change it to a dictionary
|
||||
jpgDict = ast.literal_eval(jpgJson)
|
||||
for jpgPreview in jpgDict.iterkeys():
|
||||
# Need to use ast.literal_eval so that the string can be converted to a dictionary
|
||||
jpgUrlDict = ast.literal_eval(jpgDict[jpgPreview])
|
||||
jpgArray.add(URLAttachment(jpgUrlDict["src"]))
|
||||
return jpgArray
|
||||
|
||||
## Analyzes messages
|
||||
def analyzeMessages(self, threadsDb, threadsDBHelper):
|
||||
try:
|
||||
|
||||
## Messages are found in the messages table.
|
||||
## This query filters messages by msg_type to only get actual user created conversation messages (msg_type 0).
|
||||
## The participant ids can be found in the thread_participants table.
|
||||
## Participant names are found in thread_users table.
|
||||
## Joining these tables produces multiple rows per message, one row for each recipient.
|
||||
## The result set is processed to collect the multiple recipients for a given message.
|
||||
sqlString = """
|
||||
SELECT msg_id, text, sender, timestamp_ms, msg_type, messages.thread_key as thread_key,
|
||||
snippet, thread_participants.user_key as user_key, thread_users.name as name,
|
||||
attachments, pending_send_media_attachment
|
||||
FROM messages
|
||||
JOIN thread_participants ON messages.thread_key = thread_participants.thread_key
|
||||
JOIN thread_users ON thread_participants.user_key = thread_users.user_key
|
||||
WHERE msg_type = 0
|
||||
ORDER BY msg_id
|
||||
"""
|
||||
|
||||
messagesResultSet = threadsDb.runQuery(sqlString)
|
||||
if messagesResultSet is not None:
|
||||
oldMsgId = None
|
||||
|
||||
direction = CommunicationDirection.UNKNOWN
|
||||
fromId = None
|
||||
recipientIdsList = None
|
||||
timeStamp = -1
|
||||
msgText = ""
|
||||
threadId = ""
|
||||
messageAttachments = None
|
||||
currentCase = Case.getCurrentCaseThrows()
|
||||
|
||||
while messagesResultSet.next():
|
||||
msgId = messagesResultSet.getString("msg_id")
|
||||
|
||||
# new msg begins when msgId changes
|
||||
if msgId != oldMsgId:
|
||||
# Create message artifact with collected attributes
|
||||
if oldMsgId is not None:
|
||||
messageArtifact = threadsDBHelper.addMessage(
|
||||
self._MESSAGE_TYPE,
|
||||
direction,
|
||||
fromId,
|
||||
recipientIdsList,
|
||||
timeStamp,
|
||||
MessageReadStatus.UNKNOWN,
|
||||
"", # subject
|
||||
msgText,
|
||||
threadId)
|
||||
|
||||
if (messageAttachments is not None):
|
||||
threadsDBHelper.addAttachments(messageArtifact, messageAttachments)
|
||||
messageAttachments = None
|
||||
|
||||
oldMsgId = msgId
|
||||
|
||||
# New message - collect all attributes
|
||||
recipientIdsList = []
|
||||
|
||||
## get sender id by parsing JSON in sender column
|
||||
fromId = self.getSenderIdFromJson(messagesResultSet.getString("sender"))
|
||||
direction = self.deduceDirectionFromSenderId(fromId)
|
||||
|
||||
# Get recipient and add to list
|
||||
self.addRecipientToList(messagesResultSet.getString("user_key"), fromId,
|
||||
recipientIdsList)
|
||||
|
||||
timeStamp = messagesResultSet.getLong("timestamp_ms") / 1000
|
||||
|
||||
# Get msg text
|
||||
# Sometimes there may not be an explict msg text,
|
||||
# but an app generated snippet instead
|
||||
msgText = messagesResultSet.getString("text")
|
||||
if not msgText:
|
||||
msgText = messagesResultSet.getString("snippet")
|
||||
|
||||
# Get attachments and pending attachments if they exist
|
||||
attachment = messagesResultSet.getString("attachments")
|
||||
pendingAttachment = messagesResultSet.getString("pending_send_media_attachment")
|
||||
|
||||
urlAttachments = ArrayList()
|
||||
fileAttachments = ArrayList()
|
||||
|
||||
if ((attachment is not None) or (pendingAttachment is not None)):
|
||||
if (attachment is not None):
|
||||
attachmentDict = json.loads(attachment)[0]
|
||||
if (attachmentDict["mime_type"] == "image/jpeg"):
|
||||
urls = attachmentDict.get("urls", None)
|
||||
if (urls is not None):
|
||||
urlAttachments = self.getJPGListFromJson(urls)
|
||||
|
||||
elif (attachmentDict["mime_type"] == "video/mp4"):
|
||||
# filename does not have an associated path with it so it will be ignored
|
||||
|
||||
urls = attachmentDict.get("urls", None)
|
||||
if (urls is not None):
|
||||
urlAttachments = self.getJPGListFromJson(urls)
|
||||
|
||||
video_data_url = attachmentDict.get("video_data_url", None)
|
||||
if (video_data_url is not None):
|
||||
urlAttachments.add(URLAttachment(video_data_url))
|
||||
video_data_thumbnail_url = attachmentDict.get("video_data_thumbnail_url", None)
|
||||
|
||||
if (video_data_thumbnail_url is not None):
|
||||
urlAttachments.add(URLAttachment(video_data_thumbnail_url))
|
||||
elif (attachmentDict["mime_type"] == "audio/mpeg"):
|
||||
audioUri = attachmentDict.get("audio_uri", None)
|
||||
if (audioUri is None or audioUri == ""):
|
||||
continue
|
||||
else:
|
||||
fileAttachments.add(FileAttachment(currentCase.getSleuthkitCase(), threadsDb.getDBFile().getDataSource(), audioUri.replace("file://","")))
|
||||
|
||||
else:
|
||||
self._logger.log(Level.INFO, "Attachment type not handled: " + attachmentDict["mime_type"])
|
||||
|
||||
if (pendingAttachment is not None):
|
||||
pendingAttachmentDict = json.loads(pendingAttachment)[0]
|
||||
pendingAttachmentUri = pendingAttachmentDict.get("uri", None)
|
||||
if (pendingAttachmentUri is not None):
|
||||
fileAttachments.add(FileAttachment(currentCase.getSleuthkitCase(), threadsDb.getDBFile().getDataSource(), pendingAttachmentUri.replace("file://","")))
|
||||
|
||||
messageAttachments = MessageAttachments(fileAttachments, urlAttachments)
|
||||
|
||||
threadId = messagesResultSet.getString("thread_key")
|
||||
|
||||
else: # same msgId as last, just collect recipient from current row
|
||||
self.addRecipientToList(messagesResultSet.getString("user_key"), fromId,
|
||||
recipientIdsList)
|
||||
|
||||
|
||||
# at the end of the loop, add last message
|
||||
messageArtifact = threadsDBHelper.addMessage(
|
||||
self._MESSAGE_TYPE,
|
||||
direction,
|
||||
fromId,
|
||||
recipientIdsList,
|
||||
timeStamp,
|
||||
MessageReadStatus.UNKNOWN,
|
||||
"", # subject
|
||||
msgText,
|
||||
threadId)
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query result for FB Messenger messages.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add FB Messenger message artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
## Analyzes call logs
|
||||
def analyzeCallLogs(self, threadsDb, threadsDBHelper):
|
||||
try:
|
||||
|
||||
## Call logs are found in the messages table.
|
||||
## msg_type indicates type of call:
|
||||
## 9: one to one calls
|
||||
## 203: group call
|
||||
## 1-to-1 calls only have a call_ended record.
|
||||
## group calls have a call_started_record as well as call_ended recorded, with *different* message ids.
|
||||
## all the data we need can be found in the call_ended record.
|
||||
|
||||
sqlString = """
|
||||
SELECT msg_id, text, sender, timestamp_ms, msg_type, admin_text_thread_rtc_event,
|
||||
generic_admin_message_extensible_data,
|
||||
messages.thread_key as thread_key,
|
||||
thread_participants.user_key as user_key,
|
||||
thread_users.name as name
|
||||
FROM messages
|
||||
JOIN thread_participants ON messages.thread_key = thread_participants.thread_key
|
||||
JOIN thread_users ON thread_participants.user_key = thread_users.user_key
|
||||
WHERE msg_type = 9 OR (msg_type = 203 AND admin_text_thread_rtc_event = 'group_call_ended')
|
||||
ORDER BY msg_id
|
||||
"""
|
||||
|
||||
messagesResultSet = threadsDb.runQuery(sqlString)
|
||||
if messagesResultSet is not None:
|
||||
oldMsgId = None
|
||||
|
||||
direction = CommunicationDirection.UNKNOWN
|
||||
callerId = None
|
||||
calleeIdsList = None
|
||||
startTimeStamp = -1
|
||||
endTimeStamp = -1
|
||||
duration = 0
|
||||
mediaType = CallMediaType.AUDIO
|
||||
|
||||
while messagesResultSet.next():
|
||||
msgId = messagesResultSet.getString("msg_id")
|
||||
|
||||
# new call begins when msgId changes
|
||||
if msgId != oldMsgId:
|
||||
# Create call log artifact with collected attributes
|
||||
if oldMsgId is not None:
|
||||
messageArtifact = threadsDBHelper.addCalllog(
|
||||
direction,
|
||||
callerId,
|
||||
calleeIdsList,
|
||||
startTimeStamp,
|
||||
endTimeStamp,
|
||||
mediaType )
|
||||
|
||||
oldMsgId = msgId
|
||||
|
||||
# New message - collect all attributes
|
||||
calleeIdsList = []
|
||||
|
||||
## get caller id by parsing JSON in sender column
|
||||
callerId = self.getSenderIdFromJson(messagesResultSet.getString("sender"))
|
||||
direction = self.deduceDirectionFromSenderId(callerId)
|
||||
|
||||
# Get recipient and add to list
|
||||
self.addRecipientToList(messagesResultSet.getString("user_key"), callerId,
|
||||
calleeIdsList)
|
||||
|
||||
# the timestamp from call ended msg is used as end timestamp
|
||||
endTimeStamp = messagesResultSet.getLong("timestamp_ms") / 1000
|
||||
|
||||
# parse the generic_admin_message_extensible_data JSON to extract the duration and video fields
|
||||
adminDataJsonStr = messagesResultSet.getString("generic_admin_message_extensible_data")
|
||||
if adminDataJsonStr is not None:
|
||||
adminData_dict = json.loads(adminDataJsonStr)
|
||||
duration = adminData_dict['call_duration'] # call duration in seconds
|
||||
isVideo = adminData_dict['video']
|
||||
if isVideo:
|
||||
mediaType = CallMediaType.VIDEO
|
||||
|
||||
startTimeStamp = endTimeStamp - duration
|
||||
|
||||
else: # same msgId as last, just collect callee from current row
|
||||
self.addRecipientToList(messagesResultSet.getString("user_key"), callerId,
|
||||
calleeIdsList)
|
||||
|
||||
# at the end of the loop, add last message
|
||||
messageArtifact = threadsDBHelper.addCalllog(
|
||||
direction,
|
||||
callerId,
|
||||
calleeIdsList,
|
||||
startTimeStamp,
|
||||
endTimeStamp,
|
||||
mediaType )
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query result for FB Messenger call logs.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add FB Messenger call log artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post FB Messenger call log artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
|
||||
## Analyze messages and call log threads
|
||||
def analyzeMessagesAndCallLogs(self, dataSource, fileManager, context):
|
||||
threadsDbs = AppSQLiteDB.findAppDatabases(dataSource, "threads_db2", True, self._FB_MESSENGER_PACKAGE_NAME)
|
||||
for threadsDb in threadsDbs:
|
||||
try:
|
||||
if self.selfAccountId is not None:
|
||||
threadsDBHelper = CommunicationArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, threadsDb.getDBFile(),
|
||||
Account.Type.FACEBOOK, Account.Type.FACEBOOK, self.selfAccountId )
|
||||
else:
|
||||
threadsDBHelper = CommunicationArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, threadsDb.getDBFile(),
|
||||
Account.Type.FACEBOOK)
|
||||
|
||||
self.analyzeMessages(threadsDb, threadsDBHelper)
|
||||
self.analyzeCallLogs(threadsDb, threadsDBHelper)
|
||||
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to to create CommunicationArtifactsHelper for FB Messenger.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
finally:
|
||||
threadsDb.close()
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
try:
|
||||
self.current_case = Case.getCurrentCaseThrows()
|
||||
except NoCurrentCaseException as ex:
|
||||
self._logger.log(Level.WARNING, "No case currently open.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
return
|
||||
|
||||
self.analyzeContacts(dataSource, fileManager, context)
|
||||
self.analyzeMessagesAndCallLogs(dataSource, fileManager, context)
|
||||
|
||||
|
@ -1,59 +0,0 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2016-2020 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
"""
|
||||
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
from org.sleuthkit.datamodel import CommunicationsUtils
|
||||
|
||||
MODULE_NAME = "Android Analyzer"
|
||||
|
||||
"""
|
||||
A parent class of the analyzers
|
||||
"""
|
||||
class AndroidComponentAnalyzer:
|
||||
# The Analyzer should implement this method
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
raise NotImplementedError
|
||||
|
||||
"""
|
||||
A utility method to append list of attachments to msg body
|
||||
"""
|
||||
def appendAttachmentList(msgBody, attachmentsList):
|
||||
body = msgBody
|
||||
if attachmentsList:
|
||||
body = body + "\n\n------------Attachments------------\n"
|
||||
body = body + "\n".join(list(filter(None, attachmentsList)))
|
||||
|
||||
return body
|
||||
|
||||
"""
|
||||
Checks if the given string might be a phone number.
|
||||
"""
|
||||
def isValidPhoneNumber(data):
|
||||
return CommunicationsUtils.isValidPhoneNumber(data)
|
||||
|
||||
|
||||
|
||||
"""
|
||||
Checks if the given string is a valid email address.
|
||||
"""
|
||||
def isValidEmailAddress(data):
|
||||
return CommunicationsUtils.isValidEmailAddress(data)
|
||||
|
||||
|
@ -1,155 +0,0 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2016-2018 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
from java.io import File
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.lang import Double
|
||||
from java.lang import Long
|
||||
from java.sql import Connection
|
||||
from java.sql import DriverManager
|
||||
from java.sql import ResultSet
|
||||
from java.sql import SQLException
|
||||
from java.sql import Statement
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.casemodule.services import FileManager
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import Blackboard
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
from org.sleuthkit.datamodel.Blackboard import BlackboardException
|
||||
from org.sleuthkit.datamodel.blackboardutils import GeoArtifactsHelper
|
||||
from org.sleuthkit.datamodel.blackboardutils.attributes import GeoWaypoints
|
||||
from org.sleuthkit.datamodel.blackboardutils.attributes.GeoWaypoints import Waypoint
|
||||
|
||||
import traceback
|
||||
import general
|
||||
|
||||
"""
|
||||
Finds and parses the Google Maps database.
|
||||
"""
|
||||
class GoogleMapLocationAnalyzer(general.AndroidComponentAnalyzer):
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
self.current_case = None
|
||||
self.PROGRAM_NAME = "Google Maps History"
|
||||
self.CAT_DESTINATION = "Destination"
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
try:
|
||||
self.current_case = Case.getCurrentCaseThrows()
|
||||
except NoCurrentCaseException as ex:
|
||||
self._logger.log(Level.WARNING, "No case currently open.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
return
|
||||
|
||||
try:
|
||||
absFiles = fileManager.findFiles(dataSource, "da_destination_history")
|
||||
if absFiles.isEmpty():
|
||||
return
|
||||
for abstractFile in absFiles:
|
||||
try:
|
||||
jFile = File(self.current_case.getTempDirectory(), str(abstractFile.getId()) + abstractFile.getName())
|
||||
ContentUtils.writeToFile(abstractFile, jFile, context.dataSourceIngestIsCancelled)
|
||||
self.__findGeoLocationsInDB(jFile.toString(), abstractFile)
|
||||
except Exception as ex:
|
||||
self._logger.log(Level.SEVERE, "Error parsing Google map locations", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
# Error finding Google map locations.
|
||||
pass
|
||||
|
||||
def __findGeoLocationsInDB(self, databasePath, abstractFile):
|
||||
if not databasePath:
|
||||
return
|
||||
|
||||
try:
|
||||
artifactHelper = GeoArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
general.MODULE_NAME, self.PROGRAM_NAME, abstractFile)
|
||||
Class.forName("org.sqlite.JDBC") # load JDBC driver
|
||||
connection = DriverManager.getConnection("jdbc:sqlite:" + databasePath)
|
||||
statement = connection.createStatement()
|
||||
except (ClassNotFoundException) as ex:
|
||||
self._logger.log(Level.SEVERE, "Error loading JDBC driver", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
return
|
||||
except (SQLException) as ex:
|
||||
# Error opening database.
|
||||
return
|
||||
|
||||
resultSet = None
|
||||
try:
|
||||
resultSet = statement.executeQuery(
|
||||
"SELECT time, dest_lat, dest_lng, dest_title, dest_address, source_lat, source_lng FROM destination_history;")
|
||||
|
||||
while resultSet.next():
|
||||
time = Long.valueOf(resultSet.getString("time")) / 1000
|
||||
dest_title = resultSet.getString("dest_title")
|
||||
dest_address = resultSet.getString("dest_address")
|
||||
|
||||
dest_lat = GoogleMapLocationAnalyzer.convertGeo(resultSet.getString("dest_lat"))
|
||||
dest_lng = GoogleMapLocationAnalyzer.convertGeo(resultSet.getString("dest_lng"))
|
||||
source_lat = GoogleMapLocationAnalyzer.convertGeo(resultSet.getString("source_lat"))
|
||||
source_lng = GoogleMapLocationAnalyzer.convertGeo(resultSet.getString("source_lng"))
|
||||
|
||||
waypointlist = GeoWaypoints()
|
||||
waypointlist.addPoint(Waypoint(source_lat, source_lng, None, None))
|
||||
waypointlist.addPoint(Waypoint(dest_lat, dest_lng, None, dest_address))
|
||||
|
||||
artifactHelper.addRoute(dest_title, time, waypointlist, None)
|
||||
|
||||
except SQLException as ex:
|
||||
# Unable to execute Google map locations SQL query against database.
|
||||
pass
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add route artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except Exception as ex:
|
||||
self._logger.log(Level.SEVERE, "Error processing google maps history.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
finally:
|
||||
try:
|
||||
if resultSet is not None:
|
||||
resultSet.close()
|
||||
statement.close()
|
||||
connection.close()
|
||||
except Exception as ex:
|
||||
# Error closing the database.
|
||||
pass
|
||||
|
||||
# add periods 6 decimal places before the end.
|
||||
@staticmethod
|
||||
def convertGeo(s):
|
||||
length = len(s)
|
||||
if length > 6:
|
||||
return Double.valueOf(s[0 : length-6] + "." + s[length-6 : length])
|
||||
else:
|
||||
return Double.valueOf(s)
|
@ -1,220 +0,0 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019-2020 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
from java.io import File
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.lang import Long
|
||||
from java.lang import String
|
||||
from java.sql import ResultSet
|
||||
from java.sql import SQLException
|
||||
from java.sql import Statement
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from org.apache.commons.codec.binary import Base64
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
|
||||
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
from org.sleuthkit.datamodel.Blackboard import BlackboardException
|
||||
from org.sleuthkit.datamodel import Account
|
||||
from org.sleuthkit.datamodel.blackboardutils.attributes import MessageAttachments
|
||||
from org.sleuthkit.datamodel.blackboardutils.attributes.MessageAttachments import FileAttachment
|
||||
from org.sleuthkit.datamodel.blackboardutils.attributes.MessageAttachments import URLAttachment
|
||||
from org.sleuthkit.datamodel.blackboardutils import CommunicationArtifactsHelper
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import MessageReadStatus
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import CommunicationDirection
|
||||
|
||||
import json
|
||||
import traceback
|
||||
import general
|
||||
|
||||
|
||||
class IMOAnalyzer(general.AndroidComponentAnalyzer):
|
||||
"""
|
||||
Finds the SQLite DB for IMO, parses the DB for contacts & messages,
|
||||
and adds artifacts to the case.
|
||||
|
||||
IMO version 9.8.0 has the following database structure:
|
||||
- accountdb.db
|
||||
-- A 'account' table with the id/name of the IMO account of the owner - used as the self account
|
||||
- imofriends.db - a database with contacts and messages
|
||||
-- A friends table, with id and name of the friends
|
||||
--- buid - application specific unique id
|
||||
--- name of contact
|
||||
-- A messages table which stores the message details
|
||||
--- sender/receiver buid, timestamp, message_type (1: incoming, 0: outgoing), message_read...
|
||||
--- 'imdata' column stores a json structure with all the message details, including attachments
|
||||
---- attachment file path may be specified in local_path or original_path. Original path, if available is a better candidate.
|
||||
---- For sent files, files seem to get uploaded to IMO Servers. There is no URL available in the imdata though.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
self._PACKAGE_NAME = "com.imo.android.imous"
|
||||
self._PARSER_NAME = "IMO Parser"
|
||||
self._MESSAGE_TYPE = "IMO Message"
|
||||
self._VERSION = "9.8.0"
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
selfAccountId = None
|
||||
accountDbs = AppSQLiteDB.findAppDatabases(dataSource, "accountdb.db", True, self._PACKAGE_NAME)
|
||||
for accountDb in accountDbs:
|
||||
try:
|
||||
accountResultSet = accountDb.runQuery("SELECT uid, name FROM account")
|
||||
if accountResultSet:
|
||||
# We can determine the IMO user ID of the device owner.
|
||||
# Therefore we can create and use a app account and use that
|
||||
# as a 'self' account instead of a Device account
|
||||
if not selfAccountId:
|
||||
selfAccountId = accountResultSet.getString("uid")
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query result for account", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
accountDb.close()
|
||||
|
||||
friendsDbs = AppSQLiteDB.findAppDatabases(dataSource, "imofriends.db", True, self._PACKAGE_NAME)
|
||||
for friendsDb in friendsDbs:
|
||||
try:
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
if selfAccountId is not None:
|
||||
friendsDBHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._PARSER_NAME,
|
||||
friendsDb.getDBFile(),
|
||||
Account.Type.IMO, Account.Type.IMO, selfAccountId )
|
||||
else:
|
||||
friendsDBHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._PARSER_NAME,
|
||||
friendsDb.getDBFile(),
|
||||
Account.Type.IMO )
|
||||
contactsResultSet = friendsDb.runQuery("SELECT buid, name FROM friends")
|
||||
if contactsResultSet is not None:
|
||||
while contactsResultSet.next():
|
||||
contactId = contactsResultSet.getString("buid")
|
||||
|
||||
## add a TSK_ID attribute with contact's IMO Id
|
||||
additionalAttributes = ArrayList()
|
||||
additionalAttributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ID, self._PARSER_NAME, contactId))
|
||||
|
||||
friendsDBHelper.addContact( contactsResultSet.getString("name"), ## contact name
|
||||
"", ## phone
|
||||
"", ## home phone
|
||||
"", ## mobile
|
||||
"", ## email
|
||||
additionalAttributes)
|
||||
|
||||
queryString = """
|
||||
SELECT messages.buid AS buid, imdata, last_message, timestamp, message_type, message_read, name
|
||||
FROM messages
|
||||
INNER JOIN friends ON friends.buid = messages.buid
|
||||
"""
|
||||
messagesResultSet = friendsDb.runQuery(queryString)
|
||||
if messagesResultSet is not None:
|
||||
while messagesResultSet.next():
|
||||
direction = ""
|
||||
fromId = None
|
||||
toId = None
|
||||
name = messagesResultSet.getString("name")
|
||||
uniqueId = messagesResultSet.getString("buid")
|
||||
|
||||
if (messagesResultSet.getInt("message_type") == 1):
|
||||
direction = CommunicationDirection.INCOMING
|
||||
fromId = uniqueId
|
||||
else:
|
||||
direction = CommunicationDirection.OUTGOING
|
||||
toId = uniqueId
|
||||
|
||||
|
||||
message_read = messagesResultSet.getInt("message_read")
|
||||
if (message_read == 1):
|
||||
msgReadStatus = MessageReadStatus.READ
|
||||
elif (message_read == 0):
|
||||
msgReadStatus = MessageReadStatus.UNREAD
|
||||
else:
|
||||
msgReadStatus = MessageReadStatus.UNKNOWN
|
||||
|
||||
timeStamp = messagesResultSet.getLong("timestamp") / 1000000000
|
||||
msgBody = messagesResultSet.getString("last_message")
|
||||
|
||||
messageArtifact = friendsDBHelper.addMessage(
|
||||
self._MESSAGE_TYPE,
|
||||
direction,
|
||||
fromId,
|
||||
toId,
|
||||
timeStamp,
|
||||
msgReadStatus,
|
||||
"", # subject
|
||||
msgBody,
|
||||
"") # thread id
|
||||
|
||||
|
||||
# Parse the imdata JSON structure to check if there is an attachment.
|
||||
# If one exists, create an attachment and add to the message.
|
||||
fileAttachments = ArrayList()
|
||||
urlAttachments = ArrayList()
|
||||
|
||||
imdataJsonStr = messagesResultSet.getString("imdata")
|
||||
if imdataJsonStr is not None:
|
||||
imdata_dict = json.loads(imdataJsonStr)
|
||||
|
||||
# set to none if the key doesn't exist in the dict
|
||||
attachmentOriginalPath = imdata_dict.get('original_path', None)
|
||||
attachmentLocalPath = imdata_dict.get('local_path', None)
|
||||
if attachmentOriginalPath:
|
||||
attachmentPath = attachmentOriginalPath
|
||||
else:
|
||||
attachmentPath = attachmentLocalPath
|
||||
|
||||
if attachmentPath:
|
||||
# Create a file attachment with given path
|
||||
fileAttachment = FileAttachment(current_case.getSleuthkitCase(), friendsDb.getDBFile().getDataSource(), attachmentPath)
|
||||
fileAttachments.add(fileAttachment)
|
||||
|
||||
msgAttachments = MessageAttachments(fileAttachments, [])
|
||||
attachmentArtifact = friendsDBHelper.addAttachments(messageArtifact, msgAttachments)
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query result for IMO friends", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add IMO message artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except NoCurrentCaseException as ex:
|
||||
self._logger.log(Level.WARNING, "No case currently open.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
friendsDb.close()
|
||||
|
||||
|
||||
|
@ -1,101 +0,0 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
from java.io import File
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.lang import Long
|
||||
from java.lang import String
|
||||
from java.sql import ResultSet
|
||||
from java.sql import SQLException
|
||||
from java.sql import Statement
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from org.apache.commons.codec.binary import Base64
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
from org.sleuthkit.datamodel.Blackboard import BlackboardException
|
||||
from org.sleuthkit.datamodel.blackboardutils import ArtifactsHelper
|
||||
|
||||
import traceback
|
||||
import general
|
||||
|
||||
|
||||
class InstalledApplicationsAnalyzer(general.AndroidComponentAnalyzer):
|
||||
|
||||
"""
|
||||
Android has a database to track the applications that are
|
||||
purchased and installed on the phone.
|
||||
|
||||
This module finds the SQLite DB for insalled application, and creates artifacts.
|
||||
|
||||
|
||||
Android 5.1.1 has the following database structure istalled applications:
|
||||
- library.db
|
||||
-- A ownership table that stores pplications purchased, with purchase date
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
self._PACKAGE_NAME = "com.android.vending"
|
||||
self._MODULE_NAME = "Android Installed Applications Analyzer"
|
||||
self._VERSION = "5.1.1" ## Android version
|
||||
self.current_case = None
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
libraryDbs = AppSQLiteDB.findAppDatabases(dataSource, "library.db", True, self._PACKAGE_NAME)
|
||||
for libraryDb in libraryDbs:
|
||||
try:
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
libraryDbHelper = ArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, libraryDb.getDBFile())
|
||||
queryString = "SELECT doc_id, purchase_time FROM ownership"
|
||||
ownershipResultSet = libraryDb.runQuery(queryString)
|
||||
if ownershipResultSet is not None:
|
||||
while ownershipResultSet.next():
|
||||
purchase_time = ownershipResultSet.getLong("purchase_time") / 1000
|
||||
libraryDbHelper.addInstalledProgram(ownershipResultSet.getString("doc_id"),
|
||||
purchase_time)
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query result for installed applications. ", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to adding installed application artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except NoCurrentCaseException as ex:
|
||||
self._logger.log(Level.WARNING, "No case currently open.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
libraryDb.close()
|
||||
|
@ -1,442 +0,0 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019-2020 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
from java.io import File
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.lang import Long
|
||||
from java.lang import String
|
||||
from java.sql import ResultSet
|
||||
from java.sql import SQLException
|
||||
from java.sql import Statement
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from org.apache.commons.codec.binary import Base64
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
|
||||
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
from org.sleuthkit.datamodel.Blackboard import BlackboardException
|
||||
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
|
||||
from org.sleuthkit.datamodel import Account
|
||||
from org.sleuthkit.datamodel.blackboardutils import CommunicationArtifactsHelper
|
||||
from org.sleuthkit.datamodel.blackboardutils.attributes import MessageAttachments
|
||||
from org.sleuthkit.datamodel.blackboardutils.attributes.MessageAttachments import FileAttachment
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import MessageReadStatus
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import CommunicationDirection
|
||||
from TskContactsParser import TskContactsParser
|
||||
from TskMessagesParser import TskMessagesParser
|
||||
from TskCallLogsParser import TskCallLogsParser
|
||||
|
||||
import traceback
|
||||
import general
|
||||
|
||||
class LineAnalyzer(general.AndroidComponentAnalyzer):
|
||||
"""
|
||||
Parses the Line App databases for contacts,
|
||||
message and call log artifacts.
|
||||
|
||||
About Line parser for v9.15.1:
|
||||
|
||||
- Line Database Design Details:
|
||||
Line has unique ids associated with their users and with their groups. These ids
|
||||
are referred to as mid in the database.
|
||||
|
||||
Databases:
|
||||
- naver_line: contains contact and msg artifacts
|
||||
- call_history: contains call artifacts
|
||||
|
||||
Tables:
|
||||
- naver_line/groups: This table contains group ids paired with metadata
|
||||
about the group (such as creator, group name, etc).
|
||||
|
||||
- naver_line/membership This table maps user mids to group ids. Each record
|
||||
contains 1 group id and 1 user mid.
|
||||
|
||||
- naver_line/chat_history This table contains all chat history for private
|
||||
(1 to 1) and group conversations. It maps a user mid
|
||||
or group id to the message details. The user mid and
|
||||
group id are stored into the same column "chat_id".
|
||||
If the message direction is incoming, the sender mid
|
||||
is stored in the from_mid column.
|
||||
|
||||
- naver_line/contacts This table contains all Line contacts known to the
|
||||
device.
|
||||
|
||||
- call_history/call_history This table contains all call history for private
|
||||
and group calls. It maps a user mid or a group id
|
||||
to the call details. The user mid and group id are
|
||||
stored in the "caller_mid" column.
|
||||
|
||||
- Implementation Details:
|
||||
1) Both group calls and single calls are extracted in one query. The general approach
|
||||
is to build one result table with both contact mids and group ids.
|
||||
This result is consistently labeled contact_list_with_groups queries below.
|
||||
This table is then joined once onto the messages table to produce all communication
|
||||
data.
|
||||
2) Both group chats and single chats are extracted in one query.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
self._LINE_PACKAGE_NAME = "jp.naver.line.android"
|
||||
self._PARSER_NAME = "Line Parser"
|
||||
self._VERSION = "9.15.1"
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
try:
|
||||
contact_and_message_dbs = AppSQLiteDB.findAppDatabases(dataSource,
|
||||
"naver_line", True, self._LINE_PACKAGE_NAME)
|
||||
calllog_dbs = AppSQLiteDB.findAppDatabases(dataSource,
|
||||
"call_history", True, self._LINE_PACKAGE_NAME)
|
||||
|
||||
for contact_and_message_db in contact_and_message_dbs:
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
helper = CommunicationArtifactsHelper(
|
||||
current_case.getSleuthkitCase(), self._PARSER_NAME,
|
||||
contact_and_message_db.getDBFile(), Account.Type.LINE)
|
||||
self.parse_contacts(contact_and_message_db, helper)
|
||||
self.parse_messages(contact_and_message_db, helper, current_case)
|
||||
|
||||
for calllog_db in calllog_dbs:
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
helper = CommunicationArtifactsHelper(
|
||||
current_case.getSleuthkitCase(), self._PARSER_NAME,
|
||||
calllog_db.getDBFile(), Account.Type.LINE)
|
||||
self.parse_calllogs(dataSource, calllog_db, helper)
|
||||
|
||||
except NoCurrentCaseException as ex:
|
||||
# Error parsing Line databases.
|
||||
self._logger.log(Level.WARNING, "Error parsing the Line App Databases", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
for contact_and_message_db in contact_and_message_dbs:
|
||||
contact_and_message_db.close()
|
||||
|
||||
for calllog_db in calllog_dbs:
|
||||
calllog_db.close()
|
||||
|
||||
def parse_contacts(self, contacts_db, helper):
|
||||
try:
|
||||
contacts_parser = LineContactsParser(contacts_db, self._PARSER_NAME)
|
||||
while contacts_parser.next():
|
||||
helper.addContact(
|
||||
contacts_parser.get_contact_name(),
|
||||
contacts_parser.get_phone(),
|
||||
contacts_parser.get_home_phone(),
|
||||
contacts_parser.get_mobile_phone(),
|
||||
contacts_parser.get_email(),
|
||||
contacts_parser.get_other_attributes()
|
||||
)
|
||||
contacts_parser.close()
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error parsing the Line App Database for contacts", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
#Error adding artifact to case database... case is not complete.
|
||||
self._logger.log(Level.SEVERE,
|
||||
"Error adding Line contact artifacts to the case database.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
#Error posting notification to blackboard
|
||||
self._logger.log(Level.WARNING,
|
||||
"Error posting Line contact artifacts to blackboard.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
def parse_calllogs(self, dataSource, calllogs_db, helper):
|
||||
try:
|
||||
calllogs_db.attachDatabase(
|
||||
dataSource, "naver_line",
|
||||
calllogs_db.getDBFile().getParentPath(), "naver")
|
||||
|
||||
calllog_parser = LineCallLogsParser(calllogs_db)
|
||||
while calllog_parser.next():
|
||||
helper.addCalllog(
|
||||
calllog_parser.get_call_direction(),
|
||||
calllog_parser.get_phone_number_from(),
|
||||
calllog_parser.get_phone_number_to(),
|
||||
calllog_parser.get_call_start_date_time(),
|
||||
calllog_parser.get_call_end_date_time(),
|
||||
calllog_parser.get_call_type()
|
||||
)
|
||||
calllog_parser.close()
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error parsing the Line App Database for calllogs", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
#Error adding artifact to case database... case is not complete.
|
||||
self._logger.log(Level.SEVERE,
|
||||
"Error adding Line calllog artifacts to the case database.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
#Error posting notification to blackboard
|
||||
self._logger.log(Level.WARNING,
|
||||
"Error posting Line calllog artifacts to blackboard.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
def parse_messages(self, messages_db, helper, current_case):
|
||||
try:
|
||||
|
||||
messages_parser = LineMessagesParser(messages_db)
|
||||
while messages_parser.next():
|
||||
message_artifact = helper.addMessage(
|
||||
messages_parser.get_message_type(),
|
||||
messages_parser.get_message_direction(),
|
||||
messages_parser.get_phone_number_from(),
|
||||
messages_parser.get_phone_number_to(),
|
||||
messages_parser.get_message_date_time(),
|
||||
messages_parser.get_message_read_status(),
|
||||
messages_parser.get_message_subject(),
|
||||
messages_parser.get_message_text(),
|
||||
messages_parser.get_thread_id()
|
||||
)
|
||||
if (messages_parser.get_file_attachment() is not None):
|
||||
file_attachments = ArrayList()
|
||||
file_attachments.add(FileAttachment(current_case.getSleuthkitCase(), messages_db.getDBFile().getDataSource(), messages_parser.get_file_attachment()))
|
||||
message_attachments = MessageAttachments(file_attachments, [])
|
||||
helper.addAttachments(message_artifact, message_attachments)
|
||||
|
||||
messages_parser.close()
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error parsing the Line App Database for messages.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
#Error adding artifact to case database... case is not complete.
|
||||
self._logger.log(Level.SEVERE,
|
||||
"Error adding Line message artifacts to the case database.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
#Error posting notification to blackboard
|
||||
self._logger.log(Level.WARNING,
|
||||
"Error posting Line message artifacts to blackboard.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
class LineCallLogsParser(TskCallLogsParser):
|
||||
"""
|
||||
Parses out TSK_CALLLOG information from the Line database.
|
||||
TSK_CALLLOG fields that are not in the line database are given
|
||||
a default value inherited from the super class.
|
||||
"""
|
||||
|
||||
def __init__(self, calllog_db):
|
||||
super(LineCallLogsParser, self).__init__(calllog_db.runQuery(
|
||||
"""
|
||||
SELECT Substr(calls.call_type, -1) AS direction,
|
||||
calls.start_time AS start_time,
|
||||
calls.end_time AS end_time,
|
||||
contact_book_w_groups.members AS group_members,
|
||||
calls.caller_mid,
|
||||
calls.voip_type AS call_type,
|
||||
calls.voip_gc_media_type AS group_call_type
|
||||
FROM (SELECT id,
|
||||
Group_concat(M.m_id) AS members
|
||||
FROM membership AS M
|
||||
GROUP BY id
|
||||
UNION
|
||||
SELECT m_id,
|
||||
NULL
|
||||
FROM naver.contacts) AS contact_book_w_groups
|
||||
JOIN call_history AS calls
|
||||
ON calls.caller_mid = contact_book_w_groups.id
|
||||
"""
|
||||
)
|
||||
)
|
||||
self._OUTGOING_CALL_TYPE = "O"
|
||||
self._INCOMING_CALL_TYPE = "I"
|
||||
self._VIDEO_CALL_TYPE = "V"
|
||||
self._AUDIO_CALL_TYPE = "A"
|
||||
self._GROUP_CALL_TYPE = "G"
|
||||
self._GROUP_VIDEO_CALL_TYPE = "VIDEO"
|
||||
self._GROUP_AUDIO_CALL_TYPE = "AUDIO"
|
||||
|
||||
def get_call_direction(self):
|
||||
direction = self.result_set.getString("direction")
|
||||
if direction == self._OUTGOING_CALL_TYPE:
|
||||
return self.OUTGOING_CALL
|
||||
return self.INCOMING_CALL
|
||||
|
||||
def get_call_start_date_time(self):
|
||||
try:
|
||||
return long(self.result_set.getString("start_time")) / 1000
|
||||
except ValueError as ve:
|
||||
return super(LineCallLogsParser, self).get_call_start_date_time()
|
||||
|
||||
def get_call_end_date_time(self):
|
||||
try:
|
||||
return long(self.result_set.getString("end_time")) / 1000
|
||||
except ValueError as ve:
|
||||
return super(LineCallLogsParser, self).get_call_end_date_time()
|
||||
|
||||
def get_phone_number_to(self):
|
||||
if self.get_call_direction() == self.OUTGOING_CALL:
|
||||
group_members = self.result_set.getString("group_members")
|
||||
if group_members is not None:
|
||||
group_members = group_members.split(",")
|
||||
return group_members
|
||||
|
||||
return self.result_set.getString("caller_mid")
|
||||
return super(LineCallLogsParser, self).get_phone_number_to()
|
||||
|
||||
def get_phone_number_from(self):
|
||||
if self.get_call_direction() == self.INCOMING_CALL:
|
||||
return self.result_set.getString("caller_mid")
|
||||
return super(LineCallLogsParser, self).get_phone_number_from()
|
||||
|
||||
def get_call_type(self):
|
||||
call_type = self.result_set.getString("call_type")
|
||||
if call_type == self._VIDEO_CALL_TYPE:
|
||||
return self.VIDEO_CALL
|
||||
if call_type == self._AUDIO_CALL_TYPE:
|
||||
return self.AUDIO_CALL
|
||||
if call_type == self._GROUP_CALL_TYPE:
|
||||
g_type = self.result_set.getString("group_call_type")
|
||||
if g_type == self._GROUP_VIDEO_CALL_TYPE:
|
||||
return self.VIDEO_CALL
|
||||
if g_type == self._GROUP_AUDIO_CALL_TYPE:
|
||||
return self.AUDIO_CALL
|
||||
return super(LineCallLogsParser, self).get_call_type()
|
||||
|
||||
class LineContactsParser(TskContactsParser):
|
||||
"""
|
||||
Parses out TSK_CONTACT information from the Line database.
|
||||
TSK_CONTACT fields that are not in the line database are given
|
||||
a default value inherited from the super class.
|
||||
"""
|
||||
|
||||
def __init__(self, contact_db, analyzer):
|
||||
super(LineContactsParser, self).__init__(contact_db.runQuery(
|
||||
"""
|
||||
SELECT m_id,
|
||||
server_name
|
||||
FROM contacts
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
self._PARENT_ANALYZER = analyzer
|
||||
|
||||
def get_contact_name(self):
|
||||
return self.result_set.getString("server_name")
|
||||
|
||||
def get_other_attributes(self):
|
||||
return [BlackboardAttribute(
|
||||
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ID,
|
||||
self._PARENT_ANALYZER,
|
||||
self.result_set.getString("m_id"))]
|
||||
|
||||
|
||||
class LineMessagesParser(TskMessagesParser):
|
||||
"""
|
||||
Parse out TSK_MESSAGE information from the Line database.
|
||||
TSK_MESSAGE fields that are not in the line database are given
|
||||
a default value inherited from the super class.
|
||||
"""
|
||||
|
||||
def __init__(self, message_db):
|
||||
super(LineMessagesParser, self).__init__(message_db.runQuery(
|
||||
"""
|
||||
SELECT contact_book_w_groups.id,
|
||||
contact_book_w_groups.members,
|
||||
messages.from_mid,
|
||||
messages.content,
|
||||
messages.created_time,
|
||||
messages.attachement_type,
|
||||
messages.attachement_local_uri,
|
||||
messages.status
|
||||
FROM (SELECT id,
|
||||
Group_concat(M.m_id) AS members
|
||||
FROM membership AS M
|
||||
GROUP BY id
|
||||
UNION
|
||||
SELECT m_id,
|
||||
NULL
|
||||
FROM contacts) AS contact_book_w_groups
|
||||
JOIN chat_history AS messages
|
||||
ON messages.chat_id = contact_book_w_groups.id
|
||||
WHERE attachement_type != 6
|
||||
"""
|
||||
)
|
||||
)
|
||||
self._LINE_MESSAGE_TYPE = "Line Message"
|
||||
#From the limited test data, it appeared that incoming
|
||||
#was only associated with a 1 status. Status # 3 and 7
|
||||
#was only associated with outgoing.
|
||||
self._INCOMING_MESSAGE_TYPE = 1
|
||||
|
||||
def get_message_type(self):
|
||||
return self._LINE_MESSAGE_TYPE
|
||||
|
||||
def get_message_date_time(self):
|
||||
created_time = self.result_set.getString("created_time")
|
||||
try:
|
||||
#Get time in seconds (created_time is stored in ms from epoch)
|
||||
return long(created_time) / 1000
|
||||
except ValueError as ve:
|
||||
return super(LineMessagesParser, self).get_message_date_time()
|
||||
|
||||
def get_message_text(self):
|
||||
content = self.result_set.getString("content")
|
||||
return content
|
||||
|
||||
def get_message_direction(self):
|
||||
if self.result_set.getInt("status") == self._INCOMING_MESSAGE_TYPE:
|
||||
return self.INCOMING
|
||||
return self.OUTGOING
|
||||
|
||||
def get_phone_number_from(self):
|
||||
if self.get_message_direction() == self.INCOMING:
|
||||
from_mid = self.result_set.getString("from_mid")
|
||||
if from_mid is not None:
|
||||
return from_mid
|
||||
return super(LineMessagesParser, self).get_phone_number_from()
|
||||
|
||||
def get_phone_number_to(self):
|
||||
if self.get_message_direction() == self.OUTGOING:
|
||||
group = self.result_set.getString("members")
|
||||
if group is not None:
|
||||
group = group.split(",")
|
||||
return group
|
||||
|
||||
return self.result_set.getString("id")
|
||||
|
||||
return super(LineMessagesParser, self).get_phone_number_to()
|
||||
|
||||
def get_thread_id(self):
|
||||
members = self.result_set.getString("members")
|
||||
if members is not None:
|
||||
return self.result_set.getString("id")
|
||||
return super(LineMessagesParser, self).get_thread_id()
|
||||
|
||||
def get_file_attachment(self):
|
||||
if (self.result_set.getString("attachement_local_uri") is None):
|
||||
return None
|
||||
# If "content:" in the beginning of the string we cannot determine at this point where a file resides. Ignoring for
|
||||
# now unless data can be obtained to determine where the file may reside.
|
||||
elif ("content:" in self.result_set.getString("attachement_local_uri")):
|
||||
return None
|
||||
else:
|
||||
return self.result_set.getString("attachement_local_uri")
|
||||
|
@ -1,151 +0,0 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2016 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
import jarray
|
||||
import inspect
|
||||
import traceback
|
||||
|
||||
from java.util.logging import Level
|
||||
from org.sleuthkit.autopsy.coreutils import Version
|
||||
from org.sleuthkit.autopsy.ingest import IngestModuleFactory
|
||||
from org.sleuthkit.autopsy.ingest import DataSourceIngestModule
|
||||
from org.sleuthkit.autopsy.ingest import IngestModuleFactoryAdapter
|
||||
from org.sleuthkit.autopsy.ingest import IngestModuleIngestJobSettings
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.casemodule.services import FileManager
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.ingest import DataSourceIngestModuleProgress
|
||||
from org.sleuthkit.autopsy.ingest import IngestModule
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.autopsy.ingest import DataSourceIngestModule
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.autopsy.ingest import IngestMessage
|
||||
|
||||
import general
|
||||
import browserlocation
|
||||
import cachelocation
|
||||
import calllog
|
||||
import contact
|
||||
import googlemaplocation
|
||||
import tangomessage
|
||||
import textmessage
|
||||
import wwfmessage
|
||||
import imo
|
||||
import xender
|
||||
import zapya
|
||||
import shareit
|
||||
import viber
|
||||
import skype
|
||||
import line
|
||||
import whatsapp
|
||||
import textnow
|
||||
import sbrowser
|
||||
import operabrowser
|
||||
import oruxmaps
|
||||
import installedapps
|
||||
import fbmessenger
|
||||
|
||||
|
||||
class AndroidModuleFactory(IngestModuleFactoryAdapter):
|
||||
|
||||
moduleName = general.MODULE_NAME
|
||||
|
||||
def getModuleDisplayName(self):
|
||||
return self.moduleName
|
||||
|
||||
def getModuleDescription(self):
|
||||
return "Extracts Android system and third-party app data."
|
||||
|
||||
def getModuleVersionNumber(self):
|
||||
return Version.getVersion()
|
||||
|
||||
def isDataSourceIngestModuleFactory(self):
|
||||
return True
|
||||
|
||||
def createDataSourceIngestModule(self, ingestOptions):
|
||||
return AndroidIngestModule()
|
||||
|
||||
|
||||
class AndroidIngestModule(DataSourceIngestModule):
|
||||
|
||||
_logger = Logger.getLogger(AndroidModuleFactory.moduleName)
|
||||
|
||||
def log(self, level, msg):
|
||||
self._logger.logp(level, self.__class__.__name__, inspect.stack()[1][3], msg)
|
||||
|
||||
def __init__(self):
|
||||
self.context = None
|
||||
|
||||
def startUp(self, context):
|
||||
self.context = context
|
||||
|
||||
# Throw an IngestModule.IngestModuleException exception if there was a problem setting up
|
||||
|
||||
# Where the analysis is done.
|
||||
def process(self, dataSource, progressBar):
|
||||
|
||||
errors = []
|
||||
fileManager = Case.getCurrentCase().getServices().getFileManager()
|
||||
analyzers = [contact.ContactAnalyzer(), calllog.CallLogAnalyzer(), textmessage.TextMessageAnalyzer(),
|
||||
tangomessage.TangoMessageAnalyzer(), wwfmessage.WWFMessageAnalyzer(),
|
||||
googlemaplocation.GoogleMapLocationAnalyzer(), browserlocation.BrowserLocationAnalyzer(),
|
||||
cachelocation.CacheLocationAnalyzer(), imo.IMOAnalyzer(),
|
||||
xender.XenderAnalyzer(), zapya.ZapyaAnalyzer(), shareit.ShareItAnalyzer(),
|
||||
line.LineAnalyzer(), whatsapp.WhatsAppAnalyzer(),
|
||||
textnow.TextNowAnalyzer(), skype.SkypeAnalyzer(), viber.ViberAnalyzer(),
|
||||
fbmessenger.FBMessengerAnalyzer(),
|
||||
sbrowser.SBrowserAnalyzer(), operabrowser.OperaAnalyzer(),
|
||||
oruxmaps.OruxMapsAnalyzer(),
|
||||
installedapps.InstalledApplicationsAnalyzer()]
|
||||
self.log(Level.INFO, "running " + str(len(analyzers)) + " analyzers")
|
||||
progressBar.switchToDeterminate(len(analyzers))
|
||||
|
||||
n = 0
|
||||
for analyzer in analyzers:
|
||||
if self.context.dataSourceIngestIsCancelled():
|
||||
return IngestModule.ProcessResult.OK
|
||||
try:
|
||||
analyzer.analyze(dataSource, fileManager, self.context)
|
||||
n += 1
|
||||
progressBar.progress(n)
|
||||
except Exception as ex:
|
||||
errors.append("Error running " + analyzer.__class__.__name__)
|
||||
self.log(Level.SEVERE, traceback.format_exc())
|
||||
errorMessage = [] # NOTE: this isn't used?
|
||||
errorMessageSubject = "" # NOTE: this isn't used?
|
||||
msgLevel = IngestMessage.MessageType.INFO
|
||||
|
||||
if errors:
|
||||
msgLevel = IngestMessage.MessageType.ERROR
|
||||
errorMessage.append("Errors were encountered")
|
||||
|
||||
errorMessage.append("<ul>") # NOTE: this was missing in the original java code
|
||||
for msg in errors:
|
||||
errorMessage.extend(["<li>", msg, "</li>\n"])
|
||||
errorMessage.append("</ul>\n")
|
||||
|
||||
if len(errors) == 1:
|
||||
errorMsgSubject = "One error was found"
|
||||
else:
|
||||
errorMsgSubject = "errors found: " + str(len(errors))
|
||||
else:
|
||||
errorMessage.append("No errors")
|
||||
errorMsgSubject = "No errors"
|
||||
|
||||
return IngestModule.ProcessResult.OK
|
@ -1,262 +0,0 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
from java.io import File
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.lang import Long
|
||||
from java.lang import String
|
||||
from java.sql import ResultSet
|
||||
from java.sql import SQLException
|
||||
from java.sql import Statement
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from org.apache.commons.codec.binary import Base64
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
from org.sleuthkit.datamodel.Blackboard import BlackboardException
|
||||
from org.sleuthkit.datamodel.blackboardutils import WebBrowserArtifactsHelper
|
||||
|
||||
import traceback
|
||||
import general
|
||||
|
||||
"""
|
||||
Finds the SQLite DB for Opera browser, parses the DB for Bookmarks, Cookies, Web History
|
||||
and adds artifacts to the case.
|
||||
"""
|
||||
class OperaAnalyzer(general.AndroidComponentAnalyzer):
|
||||
|
||||
"""
|
||||
Opera is a web browser on Android phones.
|
||||
|
||||
This module finds the SQLite DB for Opera, parses the DB for bookmarks,
|
||||
downloads, web history, cookies, autofill and creates artifacts.
|
||||
|
||||
|
||||
Opera version 53.1.2569 has the following database structure:
|
||||
|
||||
- cookies
|
||||
-- A cookies table to store cookies
|
||||
- history
|
||||
-- A urls table to store history of visted urls
|
||||
-- A downloads table to store downloads
|
||||
- Web Data
|
||||
-- A autofill table to store discrete autofill name/value pairs
|
||||
-- A autofill_profile_names to store name fields (first name, middle name, last name)
|
||||
-- A autofill_profiles to store the physical snailmail address (street address, city, state, country, zip)
|
||||
-- A autofill_profile_phones to store phone numbers
|
||||
-- A autofill_profile_emails to store email addresses
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
self._PACKAGE_NAME = "com.opera.browser"
|
||||
self._MODULE_NAME = "Opera Analyzer"
|
||||
self._PROGRAM_NAME = "Opera"
|
||||
self._VERSION = "53.1.2569"
|
||||
self.current_case = None
|
||||
|
||||
def analyzeCookies(self, dataSource, fileManager, context):
|
||||
cookiesDbs = AppSQLiteDB.findAppDatabases(dataSource, "Cookies", True, self._PACKAGE_NAME)
|
||||
for cookiesDb in cookiesDbs:
|
||||
try:
|
||||
cookiesDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, cookiesDb.getDBFile())
|
||||
cookiesResultSet = cookiesDb.runQuery("SELECT host_key, name, value, creation_utc FROM cookies")
|
||||
if cookiesResultSet is not None:
|
||||
while cookiesResultSet.next():
|
||||
createTime = cookiesResultSet.getLong("creation_utc") / 1000000 - 11644473600 # Webkit time
|
||||
cookiesDbHelper.addWebCookie( cookiesResultSet.getString("host_key"),
|
||||
createTime,
|
||||
cookiesResultSet.getString("name"),
|
||||
cookiesResultSet.getString("value"),
|
||||
self._PROGRAM_NAME)
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query results for Opera cookies.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add Opera cookie artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
cookiesDb.close()
|
||||
|
||||
|
||||
|
||||
def analyzeHistory(self, dataSource, fileManager, context):
|
||||
historyDbs = AppSQLiteDB.findAppDatabases(dataSource, "History", True, self._PACKAGE_NAME)
|
||||
for historyDb in historyDbs:
|
||||
try:
|
||||
historyDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, historyDb.getDBFile())
|
||||
historyResultSet = historyDb.runQuery("SELECT url, title, last_visit_time FROM urls")
|
||||
if historyResultSet is not None:
|
||||
while historyResultSet.next():
|
||||
accessTime = historyResultSet.getLong("last_visit_time") / 1000000 - 11644473600
|
||||
historyDbHelper.addWebHistory( historyResultSet.getString("url"),
|
||||
accessTime,
|
||||
"", # referrer
|
||||
historyResultSet.getString("title"),
|
||||
self._PROGRAM_NAME)
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query results for Opera history.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add Opera history artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
historyDb.close()
|
||||
|
||||
|
||||
|
||||
def analyzeDownloads(self, dataSource, fileManager, context):
|
||||
downloadsDbs = AppSQLiteDB.findAppDatabases(dataSource, "History", True, self._PACKAGE_NAME)
|
||||
for downloadsDb in downloadsDbs:
|
||||
try:
|
||||
downloadsDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, downloadsDb.getDBFile())
|
||||
queryString = "SELECT target_path, start_time, url FROM downloads"\
|
||||
" INNER JOIN downloads_url_chains ON downloads.id = downloads_url_chains.id"
|
||||
downloadsResultSet = downloadsDb.runQuery(queryString)
|
||||
if downloadsResultSet is not None:
|
||||
while downloadsResultSet.next():
|
||||
startTime = historyResultSet.getLong("start_time") / 1000000 - 11644473600 #Webkit time format
|
||||
downloadsDbHelper.addWebDownload( downloadsResultSet.getString("url"),
|
||||
startTime,
|
||||
downloadsResultSet.getString("target_path"),
|
||||
self._PROGRAM_NAME)
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query results for Opera downloads.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add Opera download artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
downloadsDb.close()
|
||||
|
||||
def analyzeAutofill(self, dataSource, fileManager, context):
|
||||
autofillDbs = AppSQLiteDB.findAppDatabases(dataSource, "Web Data", True, self._PACKAGE_NAME)
|
||||
for autofillDb in autofillDbs:
|
||||
try:
|
||||
autofillDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, autofillDb.getDBFile())
|
||||
autofillsResultSet = autofillDb.runQuery("SELECT name, value, count, date_created FROM autofill")
|
||||
if autofillsResultSet is not None:
|
||||
while autofillsResultSet.next():
|
||||
creationTime = autofillsResultSet.getLong("date_created") / 1000000 - 11644473600 #Webkit time format
|
||||
autofillDbHelper.addWebFormAutofill( autofillsResultSet.getString("name"),
|
||||
autofillsResultSet.getString("value"),
|
||||
creationTime,
|
||||
0,
|
||||
autofillsResultSet.getInt("count"))
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query results for Opera autofill.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add Opera autofill artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
autofillDb.close()
|
||||
|
||||
def analyzeWebFormAddress(self, dataSource, fileManager, context):
|
||||
webFormAddressDbs = AppSQLiteDB.findAppDatabases(dataSource, "Web Data", True, self._PACKAGE_NAME)
|
||||
for webFormAddressDb in webFormAddressDbs:
|
||||
try:
|
||||
webFormAddressDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, webFormAddressDb.getDBFile())
|
||||
queryString = """
|
||||
SELECT street_address, city, state, zipcode, country_code,
|
||||
date_modified, first_name, last_name, number, email
|
||||
FROM autofill_profiles
|
||||
INNER JOIN autofill_profile_names ON autofill_profiles.guid = autofill_profile_names.guid
|
||||
INNER JOIN autofill_profile_phones ON autofill_profiles.guid = autofill_profile_phones.guid
|
||||
INNER JOIN autofill_profile_emails ON autofill_profiles.guid = autofill_profile_emails.guid
|
||||
"""
|
||||
webFormAddressResultSet = webFormAddressDb.runQuery(queryString)
|
||||
if webFormAddressResultSet is not None:
|
||||
while webFormAddressResultSet.next():
|
||||
personName = webFormAddressResultSet.getString("first_name") + " " + webFormAddressResultSet.getString("last_name")
|
||||
address = '\n'.join([ webFormAddressResultSet.getString("street_address"),
|
||||
webFormAddressResultSet.getString("city"),
|
||||
webFormAddressResultSet.getString("state") + " " + webFormAddressResultSet.getString("zipcode"),
|
||||
webFormAddressResultSet.getString("country_code") ])
|
||||
|
||||
creationTime = webFormAddressResultSet.getLong("date_modified") / 1000000 - 11644473600
|
||||
autofillDbHelper.addWebFormAddress( personName,
|
||||
webFormAddressResultSet.getString("email"),
|
||||
webFormAddressResultSet.getString("number"),
|
||||
address,
|
||||
creationTime,
|
||||
0,
|
||||
0)
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query results for Opera web form addresses.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add Opera form address artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
webFormAddressDb.close()
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
|
||||
## open current case
|
||||
try:
|
||||
self.current_case = Case.getCurrentCaseThrows()
|
||||
except NoCurrentCaseException as ex:
|
||||
self._logger.log(Level.WARNING, "No case currently open.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
return
|
||||
|
||||
self.analyzeCookies(dataSource, fileManager, context)
|
||||
self.analyzeHistory(dataSource, fileManager, context)
|
||||
self.analyzeDownloads(dataSource, fileManager, context)
|
||||
self.analyzeAutofill(dataSource, fileManager, context)
|
||||
self.analyzeWebFormAddress(dataSource, fileManager, context)
|
||||
|
@ -1,176 +0,0 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2016-2018 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
from java.io import File
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.lang import Double
|
||||
from java.lang import Long
|
||||
from java.sql import Connection
|
||||
from java.sql import DriverManager
|
||||
from java.sql import ResultSet
|
||||
from java.sql import SQLException
|
||||
from java.sql import Statement
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
|
||||
from org.sleuthkit.autopsy.casemodule.services import FileManager
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import Blackboard
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
from org.sleuthkit.datamodel.Blackboard import BlackboardException
|
||||
from org.sleuthkit.datamodel.blackboardutils import GeoArtifactsHelper
|
||||
from org.sleuthkit.datamodel.blackboardutils.attributes import GeoTrackPoints
|
||||
from org.sleuthkit.datamodel.blackboardutils.attributes.GeoTrackPoints import TrackPoint
|
||||
|
||||
import traceback
|
||||
import general
|
||||
|
||||
"""
|
||||
Analyzes database created by ORUX Maps.
|
||||
"""
|
||||
class OruxMapsAnalyzer(general.AndroidComponentAnalyzer):
|
||||
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
self._PACKAGE_NAME = "oruxmaps"
|
||||
self._MODULE_NAME = "OruxMaps Analyzer"
|
||||
self._PROGRAM_NAME = "OruxMaps"
|
||||
self._VERSION = "7.5.7"
|
||||
self.current_case = None
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
oruxMapsTrackpointsDbs = AppSQLiteDB.findAppDatabases(dataSource, "oruxmapstracks.db", True, self._PACKAGE_NAME)
|
||||
for oruxMapsTrackpointsDb in oruxMapsTrackpointsDbs:
|
||||
try:
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
|
||||
skCase = Case.getCurrentCase().getSleuthkitCase()
|
||||
geoArtifactHelper = GeoArtifactsHelper(skCase, self._MODULE_NAME, self._PROGRAM_NAME, oruxMapsTrackpointsDb.getDBFile())
|
||||
|
||||
poiQueryString = "SELECT poilat, poilon, poialt, poitime, poiname FROM pois"
|
||||
poisResultSet = oruxMapsTrackpointsDb.runQuery(poiQueryString)
|
||||
abstractFile = oruxMapsTrackpointsDb.getDBFile()
|
||||
if poisResultSet is not None:
|
||||
while poisResultSet.next():
|
||||
latitude = poisResultSet.getDouble("poilat")
|
||||
longitude = poisResultSet.getDouble("poilon")
|
||||
time = poisResultSet.getLong("poitime") / 1000 # milliseconds since unix epoch
|
||||
name = poisResultSet.getString("poiname")
|
||||
altitude = poisResultSet.getDouble("poialt")
|
||||
|
||||
attributes = ArrayList()
|
||||
artifact = abstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_BOOKMARK)
|
||||
attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, self._MODULE_NAME, time))
|
||||
attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_LATITUDE, self._MODULE_NAME, latitude))
|
||||
attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE, self._MODULE_NAME, longitude))
|
||||
attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE, self._MODULE_NAME, altitude))
|
||||
attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME, self._MODULE_NAME, name))
|
||||
attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, self._MODULE_NAME, self._PROGRAM_NAME))
|
||||
|
||||
artifact.addAttributes(attributes)
|
||||
try:
|
||||
# index the artifact for keyword search
|
||||
blackboard = Case.getCurrentCase().getSleuthkitCase().getBlackboard()
|
||||
blackboard.postArtifact(artifact, self._MODULE_NAME)
|
||||
except Blackboard.BlackboardException as ex:
|
||||
self._logger.log(Level.SEVERE, "Unable to index blackboard artifact " + str(artifact.getArtifactID()), ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
MessageNotifyUtil.Notify.error("Failed to index trackpoint artifact for keyword search.", artifact.getDisplayName())
|
||||
|
||||
|
||||
# tracks -> segments -> trackpoints
|
||||
#
|
||||
# The reason that the track and the segment are put into arrays is that once the segment query is run an error occurs that it cannot find the
|
||||
# trackname column in the track query. This is avoided if all the tracks/segments are found and put into an array(s) that can then be processed all at once.
|
||||
trackQueryString = "SELECT _id, trackname, trackciudad FROM tracks"
|
||||
trackResultSet = oruxMapsTrackpointsDb.runQuery(trackQueryString)
|
||||
if trackResultSet is not None:
|
||||
trackResults = ArrayList()
|
||||
while trackResultSet.next():
|
||||
tempTrack = ArrayList()
|
||||
trackName = trackResultSet.getString("trackname") + " - " + trackResultSet.getString("trackciudad")
|
||||
trackId = str(trackResultSet.getInt("_id"))
|
||||
tempTrack.append(trackId)
|
||||
tempTrack.append(trackName)
|
||||
trackResults.append(tempTrack)
|
||||
for trackResult in trackResults:
|
||||
trackId = trackResult[0]
|
||||
trackName = trackResult[1]
|
||||
segmentQueryString = "SELECT _id, segname FROM segments WHERE segtrack = " + trackId
|
||||
segmentResultSet = oruxMapsTrackpointsDb.runQuery(segmentQueryString)
|
||||
if segmentResultSet is not None:
|
||||
segmentResults = ArrayList()
|
||||
while segmentResultSet.next():
|
||||
segmentName = trackName + " - " + segmentResultSet.getString("segname")
|
||||
segmentId = str(segmentResultSet.getInt("_id"))
|
||||
tempSegment = ArrayList()
|
||||
tempSegment.append(segmentId)
|
||||
tempSegment.append(segmentName)
|
||||
segmentResults.append(tempSegment)
|
||||
for segmentResult in segmentResults:
|
||||
segmentId = segmentResult[0]
|
||||
segmentName = segmentResult[1]
|
||||
trackpointsQueryString = "SELECT trkptlat, trkptlon, trkptalt, trkpttime FROM trackpoints WHERE trkptseg = " + segmentId
|
||||
trackpointsResultSet = oruxMapsTrackpointsDb.runQuery(trackpointsQueryString)
|
||||
if trackpointsResultSet is not None:
|
||||
geoPointList = GeoTrackPoints()
|
||||
while trackpointsResultSet.next():
|
||||
latitude = trackpointsResultSet.getDouble("trkptlat")
|
||||
longitude = trackpointsResultSet.getDouble("trkptlon")
|
||||
altitude = trackpointsResultSet.getDouble("trkptalt")
|
||||
time = trackpointsResultSet.getLong("trkpttime") / 1000 # milliseconds since unix epoch
|
||||
|
||||
geoPointList.addPoint(TrackPoint(latitude, longitude, altitude, segmentName, 0, 0, 0, time))
|
||||
|
||||
try:
|
||||
geoartifact = geoArtifactHelper.addTrack(segmentName, geoPointList, None)
|
||||
except Blackboard.BlackboardException as ex:
|
||||
self._logger.log(Level.SEVERE, "Error using geo artifact helper with blackboard", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
MessageNotifyUtil.Notify.error("Failed to add track artifact.", "geoArtifactHelper")
|
||||
except TskCoreException as e:
|
||||
self._logger.log(Level.SEVERE, "Error using geo artifact helper with TskCoreException", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
MessageNotifyUtil.Notify.error("Failed to add track artifact with TskCoreException.", "geoArtifactHelper")
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query result for Orux Map trackpoints.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add Orux Map trackpoint artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except NoCurrentCaseException as ex:
|
||||
self._logger.log(Level.WARNING, "No case currently open.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
oruxMapsTrackpointsDb.close()
|
@ -1,296 +0,0 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
from java.io import File
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.lang import Long
|
||||
from java.lang import String
|
||||
from java.sql import ResultSet
|
||||
from java.sql import SQLException
|
||||
from java.sql import Statement
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from org.apache.commons.codec.binary import Base64
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
from org.sleuthkit.datamodel.Blackboard import BlackboardException
|
||||
from org.sleuthkit.datamodel.blackboardutils import WebBrowserArtifactsHelper
|
||||
|
||||
import traceback
|
||||
import general
|
||||
|
||||
|
||||
class SBrowserAnalyzer(general.AndroidComponentAnalyzer):
|
||||
|
||||
"""
|
||||
SBrowser is the default/native browser on Android phones.
|
||||
|
||||
This module finds the SQLite DB for SBrowser, parses the DB for bookmarks,
|
||||
downloads, web history, cookies, autofill and creates artifacts.
|
||||
|
||||
|
||||
SBrowser has the following database structure:
|
||||
- sbrowser.db
|
||||
-- A bookmarks table that store bookmarks
|
||||
- cookies
|
||||
-- A cookies table to store cookies
|
||||
- history
|
||||
-- A urls table to store history of visted urls
|
||||
-- A downloads table to store downloads
|
||||
- Web Data
|
||||
-- A autofill table to store discrete autofill name/value pairs
|
||||
-- A autofill_profile_names to store name fields (first name, middle name, last name)
|
||||
-- A autofill_profiles to store the physical snailmail address (street address, city, state, country, zip)
|
||||
-- A autofill_profile_phones to store phone numbers
|
||||
-- A autofill_profile_emails to store email addresses
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
self._PACKAGE_NAME = "com.sec.android.app.sbrowser"
|
||||
self._MODULE_NAME = "SBrowser Analyzer"
|
||||
self._PROGRAM_NAME = "SBrowser"
|
||||
self._VERSION = "10.1.00.27"
|
||||
self.current_case = None
|
||||
|
||||
def analyzeBookmarks(self, dataSource, fileManager, context):
|
||||
sbrowserDbs = AppSQLiteDB.findAppDatabases(dataSource, "sbrowser.db", True, self._PACKAGE_NAME)
|
||||
for sbrowserDb in sbrowserDbs:
|
||||
try:
|
||||
sbrowserDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, sbrowserDb.getDBFile())
|
||||
bookmarkResultSet = sbrowserDb.runQuery("SELECT url, title, created FROM bookmarks WHERE url IS NOT NULL")
|
||||
if bookmarkResultSet is not None:
|
||||
while bookmarkResultSet.next():
|
||||
createTime = bookmarkResultSet.getLong("created") / 1000
|
||||
sbrowserDbHelper.addWebBookmark( bookmarkResultSet.getString("url"),
|
||||
bookmarkResultSet.getString("title"),
|
||||
createTime,
|
||||
self._PROGRAM_NAME)
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query results for SBrowser bookmarks.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add SBrowser bookmark artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
sbrowserDb.close()
|
||||
|
||||
|
||||
|
||||
def analyzeCookies(self, dataSource, fileManager, context):
|
||||
cookiesDbs = AppSQLiteDB.findAppDatabases(dataSource, "Cookies", True, self._PACKAGE_NAME)
|
||||
for cookiesDb in cookiesDbs:
|
||||
try:
|
||||
cookiesDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, cookiesDb.getDBFile())
|
||||
cookiesResultSet = cookiesDb.runQuery("SELECT host_key, name, value, creation_utc FROM cookies")
|
||||
if cookiesResultSet is not None:
|
||||
while cookiesResultSet.next():
|
||||
createTime = cookiesResultSet.getLong("creation_utc") / 1000000 - 11644473600 # Webkit time
|
||||
cookiesDbHelper.addWebCookie( cookiesResultSet.getString("host_key"),
|
||||
createTime,
|
||||
cookiesResultSet.getString("name"),
|
||||
cookiesResultSet.getString("value"),
|
||||
self._PROGRAM_NAME)
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query results for SBrowser cookies.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add SBrowser cookie artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
cookiesDb.close()
|
||||
|
||||
|
||||
|
||||
def analyzeHistory(self, dataSource, fileManager, context):
|
||||
historyDbs = AppSQLiteDB.findAppDatabases(dataSource, "History", True, self._PACKAGE_NAME)
|
||||
for historyDb in historyDbs:
|
||||
try:
|
||||
historyDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, historyDb.getDBFile())
|
||||
historyResultSet = historyDb.runQuery("SELECT url, title, last_visit_time FROM urls")
|
||||
if historyResultSet is not None:
|
||||
while historyResultSet.next():
|
||||
accessTime = historyResultSet.getLong("last_visit_time") / 1000000 - 11644473600 # Webkit time
|
||||
historyDbHelper.addWebHistory( historyResultSet.getString("url"),
|
||||
accessTime,
|
||||
"", # referrer
|
||||
historyResultSet.getString("title"),
|
||||
self._PROGRAM_NAME)
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query results for SBrowser history.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add SBrowser history artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
historyDb.close()
|
||||
|
||||
|
||||
|
||||
def analyzeDownloads(self, dataSource, fileManager, context):
|
||||
downloadsDbs = AppSQLiteDB.findAppDatabases(dataSource, "History", True, self._PACKAGE_NAME)
|
||||
for downloadsDb in downloadsDbs:
|
||||
try:
|
||||
downloadsDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, downloadsDb.getDBFile())
|
||||
queryString = "SELECT target_path, start_time, url FROM downloads"\
|
||||
" INNER JOIN downloads_url_chains ON downloads.id = downloads_url_chains.id"
|
||||
downloadsResultSet = downloadsDb.runQuery(queryString)
|
||||
if downloadsResultSet is not None:
|
||||
while downloadsResultSet.next():
|
||||
startTime = historyResultSet.getLong("start_time") / 1000000 - 11644473600 # Webkit time
|
||||
downloadsDbHelper.addWebDownload( downloadsResultSet.getString("url"),
|
||||
startTime,
|
||||
downloadsResultSet.getString("target_path"),
|
||||
self._PROGRAM_NAME)
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query results for SBrowser downloads.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add SBrowser download artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
downloadsDb.close()
|
||||
|
||||
def analyzeAutofill(self, dataSource, fileManager, context):
|
||||
autofillDbs = AppSQLiteDB.findAppDatabases(dataSource, "Web Data", True, self._PACKAGE_NAME)
|
||||
for autofillDb in autofillDbs:
|
||||
try:
|
||||
autofillDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, autofillDb.getDBFile())
|
||||
queryString = """
|
||||
SELECT name, value, count, date_created
|
||||
FROM autofill
|
||||
INNER JOIN autofill_dates ON autofill.pair_id = autofill_dates.pair_id
|
||||
"""
|
||||
autofillsResultSet = autofillDb.runQuery(queryString)
|
||||
if autofillsResultSet is not None:
|
||||
while autofillsResultSet.next():
|
||||
creationTime = autofillsResultSet.getLong("date_created") / 1000000 - 11644473600 # Webkit time
|
||||
autofillDbHelper.addWebFormAutofill( autofillsResultSet.getString("name"),
|
||||
autofillsResultSet.getString("value"),
|
||||
creationTime,
|
||||
0,
|
||||
autofillsResultSet.getInt("count"))
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query results for SBrowser autofill.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add SBrowser autofill artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
autofillDb.close()
|
||||
|
||||
def analyzeWebFormAddress(self, dataSource, fileManager, context):
|
||||
webFormAddressDbs = AppSQLiteDB.findAppDatabases(dataSource, "Web Data", True, self._PACKAGE_NAME)
|
||||
for webFormAddressDb in webFormAddressDbs:
|
||||
try:
|
||||
webFormAddressDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, webFormAddressDb.getDBFile())
|
||||
"""
|
||||
Autofill form data is split across multiple tables. The quqery below joins the various tables.
|
||||
"""
|
||||
queryString = """
|
||||
SELECT street_address, city, state, zipcode, country_code, date_modified, first_name, last_name, number, email
|
||||
FROM autofill_profiles
|
||||
INNER JOIN autofill_profile_names ON autofill_profiles.guid = autofill_profile_names.guid
|
||||
INNER JOIN autofill_profile_phones ON autofill_profiles.guid = autofill_profile_phones.guid
|
||||
INNER JOIN autofill_profile_emails ON autofill_profiles.guid = autofill_profile_emails.guid
|
||||
"""
|
||||
webFormAddressResultSet = webFormAddressDb.runQuery(queryString)
|
||||
if webFormAddressResultSet is not None:
|
||||
while webFormAddressResultSet.next():
|
||||
personName = webFormAddressResultSet.getString("first_name") + " " + webFormAddressResultSet.getString("last_name")
|
||||
address = '\n'.join([ webFormAddressResultSet.getString("street_address"),
|
||||
webFormAddressResultSet.getString("city"),
|
||||
webFormAddressResultSet.getString("state") + " " + webFormAddressResultSet.getString("zipcode"),
|
||||
webFormAddressResultSet.getString("country_code") ])
|
||||
|
||||
creationTime = webFormAddressResultSet.getLong("date_modified") / 1000000 - 11644473600 # Webkit time
|
||||
autofillDbHelper.addWebFormAddress( personName,
|
||||
webFormAddressResultSet.getString("email"),
|
||||
webFormAddressResultSet.getString("number"),
|
||||
address,
|
||||
creationTime,
|
||||
0,
|
||||
0)
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query results for SBrowser form addresses.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add SBrowser form address artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
webFormAddressDb.close()
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
## open current case
|
||||
try:
|
||||
self.current_case = Case.getCurrentCaseThrows()
|
||||
except NoCurrentCaseException as ex:
|
||||
self._logger.log(Level.WARNING, "No case currently open.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
return
|
||||
|
||||
|
||||
self.analyzeBookmarks(dataSource, fileManager, context)
|
||||
self.analyzeCookies(dataSource, fileManager, context)
|
||||
self.analyzeHistory(dataSource, fileManager, context)
|
||||
self.analyzeDownloads(dataSource, fileManager, context)
|
||||
self.analyzeAutofill(dataSource, fileManager, context)
|
||||
self.analyzeWebFormAddress(dataSource, fileManager, context)
|
||||
|
@ -1,144 +0,0 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019-2020 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
from java.io import File
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.lang import Long
|
||||
from java.lang import String
|
||||
from java.sql import ResultSet
|
||||
from java.sql import SQLException
|
||||
from java.sql import Statement
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from org.apache.commons.codec.binary import Base64
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
from org.sleuthkit.datamodel.Blackboard import BlackboardException
|
||||
from org.sleuthkit.datamodel import Account
|
||||
from org.sleuthkit.datamodel.blackboardutils import CommunicationArtifactsHelper
|
||||
from org.sleuthkit.datamodel.blackboardutils.attributes import MessageAttachments
|
||||
from org.sleuthkit.datamodel.blackboardutils.attributes.MessageAttachments import FileAttachment
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import MessageReadStatus
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import CommunicationDirection
|
||||
|
||||
import traceback
|
||||
import general
|
||||
|
||||
"""
|
||||
Finds the SQLite DB for ShareIt, parses the DB for contacts & messages,
|
||||
and adds artifacts to the case.
|
||||
"""
|
||||
class ShareItAnalyzer(general.AndroidComponentAnalyzer):
|
||||
|
||||
"""
|
||||
ShareIt is a file transfer utility app.
|
||||
|
||||
This module finds the SQLite DB for Xender, parses the DB for contacts & messages,
|
||||
and adds artifacts to the case.
|
||||
|
||||
ShareIt version 5.0.28 has the following database structure:
|
||||
- history.db
|
||||
-- A history table, with records of file transfers
|
||||
-- An item table with details of the files transfered
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
self._PACKAGE_NAME = "com.lenovo.anyshare.gps"
|
||||
self._MODULE_NAME = "ShareIt Analyzer"
|
||||
self._MESSAGE_TYPE = "ShareIt Message"
|
||||
self._VERSION = "5.0.28_ww"
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
historyDbs = AppSQLiteDB.findAppDatabases(dataSource, "history.db", True, self._PACKAGE_NAME)
|
||||
for historyDb in historyDbs:
|
||||
try:
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
historyDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, historyDb.getDBFile(),
|
||||
Account.Type.SHAREIT)
|
||||
|
||||
queryString = """
|
||||
SELECT history_type, device_id, device_name, description, timestamp, file_path
|
||||
FROM history
|
||||
JOIN item where history.content_id = item.item_id
|
||||
"""
|
||||
historyResultSet = historyDb.runQuery(queryString)
|
||||
if historyResultSet is not None:
|
||||
while historyResultSet.next():
|
||||
direction = ""
|
||||
fromId = None
|
||||
toId = None
|
||||
fileAttachments = ArrayList()
|
||||
|
||||
if (historyResultSet.getInt("history_type") == 1):
|
||||
direction = CommunicationDirection.INCOMING
|
||||
fromId = historyResultSet.getString("device_id")
|
||||
else:
|
||||
direction = CommunicationDirection.OUTGOING
|
||||
toId = historyResultSet.getString("device_id")
|
||||
|
||||
timeStamp = historyResultSet.getLong("timestamp") / 1000
|
||||
messageArtifact = historyDbHelper.addMessage(
|
||||
self._MESSAGE_TYPE,
|
||||
direction,
|
||||
fromId,
|
||||
toId,
|
||||
timeStamp,
|
||||
MessageReadStatus.UNKNOWN,
|
||||
None, # subject
|
||||
None, # message text
|
||||
None ) # thread id
|
||||
|
||||
# add the file as attachment
|
||||
fileAttachments.add(FileAttachment(current_case.getSleuthkitCase(), historyDb.getDBFile().getDataSource(), historyResultSet.getString("file_path")))
|
||||
messageAttachments = MessageAttachments(fileAttachments, [])
|
||||
historyDbHelper.addAttachments(messageArtifact, messageAttachments)
|
||||
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query result for ShareIt history.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to create ShareIt message artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except NoCurrentCaseException as ex:
|
||||
self._logger.log(Level.WARNING, "No case currently open.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
historyDb.close()
|
||||
|
||||
|
||||
|
@ -1,453 +0,0 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019-2020 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
from java.io import File
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.lang import Long
|
||||
from java.lang import String
|
||||
from java.sql import ResultSet
|
||||
from java.sql import SQLException
|
||||
from java.sql import Statement
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from org.apache.commons.codec.binary import Base64
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
|
||||
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
from org.sleuthkit.datamodel.Blackboard import BlackboardException
|
||||
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
|
||||
from org.sleuthkit.datamodel import Account
|
||||
from org.sleuthkit.datamodel.blackboardutils import CommunicationArtifactsHelper
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import MessageReadStatus
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import CommunicationDirection
|
||||
from org.sleuthkit.datamodel.blackboardutils.attributes import MessageAttachments
|
||||
from org.sleuthkit.datamodel.blackboardutils.attributes.MessageAttachments import FileAttachment
|
||||
from TskMessagesParser import TskMessagesParser
|
||||
from TskContactsParser import TskContactsParser
|
||||
from TskCallLogsParser import TskCallLogsParser
|
||||
|
||||
import traceback
|
||||
import general
|
||||
|
||||
class SkypeAnalyzer(general.AndroidComponentAnalyzer):
|
||||
"""
|
||||
Parses the Skype App databases for TSK contacts, message
|
||||
and calllog artifacts.
|
||||
|
||||
About version 8.15.0.428 (9/17/2019) Skype database:
|
||||
- There are 4 tables this parser uses:
|
||||
1) person - this table appears to hold all contacts known to the user.
|
||||
2) user - this table holds information about the user.
|
||||
3) particiapnt - Yes, that is not a typo. This table maps group chat
|
||||
ids to skype ids (1 to many).
|
||||
4) chatItem - This table contains all messages. It maps the group id or
|
||||
skype id (for 1 to 1 communication) to the message content
|
||||
and metadata. Either the group id or skype id is stored in
|
||||
a column named 'conversation_link'.
|
||||
|
||||
More info and implementation details:
|
||||
- The person table does not include groups. To get
|
||||
all 1 to 1 communications, we could simply join the person and chatItem tables.
|
||||
This would mean we'd need to do a second pass to get all the group information
|
||||
as they would be excluded in the join. Since the chatItem table stores both the
|
||||
group id or skype_id in one column, an implementation decision was made to union
|
||||
the person and particiapnt table together so that all rows are matched in one join
|
||||
with chatItem. This result is consistently labeled contact_book_w_groups in the
|
||||
following queries.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
self._SKYPE_PACKAGE_NAME = "com.skype.raider"
|
||||
self._PARSER_NAME = "Skype Parser"
|
||||
self._VERSION = "8.15.0.428"
|
||||
|
||||
def get_user_account(self, skype_db):
|
||||
account_query_result = skype_db.runQuery(
|
||||
"""
|
||||
SELECT entry_id,
|
||||
CASE
|
||||
WHEN Ifnull(first_name, "") == "" AND Ifnull(last_name, "") == "" THEN entry_id
|
||||
WHEN first_name is NULL THEN replace(last_name, ",", "")
|
||||
WHEN last_name is NULL THEN replace(first_name, ",", "")
|
||||
ELSE replace(first_name, ",", "") || " " || replace(last_name, ",", "")
|
||||
END AS name
|
||||
FROM user
|
||||
"""
|
||||
)
|
||||
|
||||
if account_query_result is not None and account_query_result.next():
|
||||
return account_query_result.getString("entry_id")
|
||||
return None
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
#Skype databases are of the form: live:XYZ.db, where
|
||||
#XYZ is the skype id of the user. The following search
|
||||
#does a generic substring match for 'live' in the skype
|
||||
#package.
|
||||
skype_dbs = AppSQLiteDB.findAppDatabases(dataSource,
|
||||
"live:", False, self._SKYPE_PACKAGE_NAME)
|
||||
try:
|
||||
for skype_db in skype_dbs:
|
||||
#Attempt to get the user account id from the database
|
||||
user_account_instance = None
|
||||
try:
|
||||
user_account_instance = self.get_user_account(skype_db)
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING,
|
||||
"Error querying for the user account in the Skype db.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
|
||||
if user_account_instance is None:
|
||||
helper = CommunicationArtifactsHelper(
|
||||
current_case.getSleuthkitCase(), self._PARSER_NAME,
|
||||
skype_db.getDBFile(), Account.Type.SKYPE
|
||||
)
|
||||
else:
|
||||
helper = CommunicationArtifactsHelper(
|
||||
current_case.getSleuthkitCase(), self._PARSER_NAME,
|
||||
skype_db.getDBFile(), Account.Type.SKYPE,
|
||||
Account.Type.SKYPE, user_account_instance
|
||||
)
|
||||
self.parse_contacts(skype_db, helper)
|
||||
self.parse_calllogs(skype_db, helper)
|
||||
self.parse_messages(skype_db, helper, current_case)
|
||||
except NoCurrentCaseException as ex:
|
||||
self._logger.log(Level.WARNING, "No case currently open.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
for skype_db in skype_dbs:
|
||||
skype_db.close()
|
||||
|
||||
def parse_contacts(self, skype_db, helper):
|
||||
#Query for contacts and iterate row by row adding
|
||||
#each contact artifact
|
||||
try:
|
||||
contacts_parser = SkypeContactsParser(skype_db, self._PARSER_NAME)
|
||||
while contacts_parser.next():
|
||||
helper.addContact(
|
||||
contacts_parser.get_contact_name(),
|
||||
contacts_parser.get_phone(),
|
||||
contacts_parser.get_home_phone(),
|
||||
contacts_parser.get_mobile_phone(),
|
||||
contacts_parser.get_email(),
|
||||
contacts_parser.get_other_attributes()
|
||||
)
|
||||
contacts_parser.close()
|
||||
except SQLException as ex:
|
||||
#Error parsing Skype db
|
||||
self._logger.log(Level.WARNING,
|
||||
"Error parsing contact database for call logs artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
#Severe error trying to add to case database.. case is not complete.
|
||||
#These exceptions are thrown by the CommunicationArtifactsHelper.
|
||||
self._logger.log(Level.SEVERE,
|
||||
"Failed to add contact artifacts to the case database.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
#Failed to post notification to blackboard
|
||||
self._logger.log(Level.WARNING,
|
||||
"Failed to post contact artifact to the blackboard", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
def parse_calllogs(self, skype_db, helper):
|
||||
#Query for call logs and iterate row by row adding
|
||||
#each call log artifact
|
||||
try:
|
||||
calllog_parser = SkypeCallLogsParser(skype_db)
|
||||
while calllog_parser.next():
|
||||
helper.addCalllog(
|
||||
calllog_parser.get_call_direction(),
|
||||
calllog_parser.get_phone_number_from(),
|
||||
calllog_parser.get_phone_number_to(),
|
||||
calllog_parser.get_call_start_date_time(),
|
||||
calllog_parser.get_call_end_date_time(),
|
||||
calllog_parser.get_call_type()
|
||||
)
|
||||
calllog_parser.close()
|
||||
except SQLException as ex:
|
||||
#Error parsing Skype db
|
||||
self._logger.log(Level.WARNING,
|
||||
"Error parsing Skype database for call logs artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
#Severe error trying to add to case database.. case is not complete.
|
||||
#These exceptions are thrown by the CommunicationArtifactsHelper.
|
||||
self._logger.log(Level.SEVERE,
|
||||
"Failed to add call log artifacts to the case database.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
#Failed to post notification to blackboard
|
||||
self._logger.log(Level.WARNING,
|
||||
"Failed to post call log artifact to the blackboard", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
def parse_messages(self, skype_db, helper, current_case):
|
||||
#Query for messages and iterate row by row adding
|
||||
#each message artifact
|
||||
try:
|
||||
messages_parser = SkypeMessagesParser(skype_db)
|
||||
while messages_parser.next():
|
||||
message_artifact = helper.addMessage(
|
||||
messages_parser.get_message_type(),
|
||||
messages_parser.get_message_direction(),
|
||||
messages_parser.get_phone_number_from(),
|
||||
messages_parser.get_phone_number_to(),
|
||||
messages_parser.get_message_date_time(),
|
||||
messages_parser.get_message_read_status(),
|
||||
messages_parser.get_message_subject(),
|
||||
messages_parser.get_message_text(),
|
||||
messages_parser.get_thread_id()
|
||||
)
|
||||
|
||||
if (messages_parser.get_file_attachment() is not None):
|
||||
file_attachments = ArrayList()
|
||||
file_attachments.add(FileAttachment(current_case.getSleuthkitCase(), skype_db.getDBFile().getDataSource(), messages_parser.get_file_attachment()))
|
||||
message_attachments = MessageAttachments(file_attachments, [])
|
||||
helper.addAttachments(message_artifact, message_attachments)
|
||||
|
||||
messages_parser.close()
|
||||
except SQLException as ex:
|
||||
#Error parsing Skype db
|
||||
self._logger.log(Level.WARNING,
|
||||
"Error parsing Skype database for message artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
#Severe error trying to add to case database.. case is not complete.
|
||||
#These exceptions are thrown by the CommunicationArtifactsHelper.
|
||||
self._logger.log(Level.SEVERE,
|
||||
"Failed to add message artifacts to the case database.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
#Failed to post notification to blackboard
|
||||
self._logger.log(Level.WARNING,
|
||||
"Failed to post message artifact to the blackboard", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
class SkypeCallLogsParser(TskCallLogsParser):
|
||||
"""
|
||||
Extracts TSK_CALLLOG information from the Skype database.
|
||||
TSK_CALLLOG fields that are not in the Skype database are given
|
||||
a default value inherited from the super class.
|
||||
"""
|
||||
|
||||
def __init__(self, calllog_db):
|
||||
"""
|
||||
Implementation details:
|
||||
- message_type w/ value 3 appeared to be the call type, regardless
|
||||
of if it was audio or video.
|
||||
|
||||
"""
|
||||
super(SkypeCallLogsParser, self).__init__(calllog_db.runQuery(
|
||||
"""
|
||||
SELECT contact_book_w_groups.conversation_id,
|
||||
contact_book_w_groups.participant_ids,
|
||||
messages.time,
|
||||
messages.duration,
|
||||
messages.is_sender_me,
|
||||
messages.person_id AS sender_id
|
||||
FROM (SELECT conversation_id,
|
||||
Group_concat(person_id) AS participant_ids
|
||||
FROM particiapnt
|
||||
GROUP BY conversation_id
|
||||
UNION
|
||||
SELECT entry_id AS conversation_id,
|
||||
NULL
|
||||
FROM person) AS contact_book_w_groups
|
||||
join chatitem AS messages
|
||||
ON messages.conversation_link = contact_book_w_groups.conversation_id
|
||||
WHERE message_type == 3
|
||||
"""
|
||||
)
|
||||
)
|
||||
self._INCOMING_CALL_TYPE = 0
|
||||
self._OUTGOING_CALL_TYPE = 1
|
||||
|
||||
|
||||
def get_phone_number_from(self):
|
||||
if self.get_call_direction() == self.INCOMING_CALL:
|
||||
return self.result_set.getString("sender_id")
|
||||
|
||||
def get_phone_number_to(self):
|
||||
if self.get_call_direction() == self.OUTGOING_CALL:
|
||||
group_ids = self.result_set.getString("participant_ids")
|
||||
|
||||
if group_ids is not None:
|
||||
group_ids = group_ids.split(",")
|
||||
return group_ids
|
||||
|
||||
return self.result_set.getString("conversation_id")
|
||||
|
||||
return super(SkypeCallLogsParser, self).get_phone_number_to()
|
||||
|
||||
def get_call_direction(self):
|
||||
direction = self.result_set.getInt("is_sender_me")
|
||||
if direction == self._INCOMING_CALL_TYPE:
|
||||
return self.INCOMING_CALL
|
||||
if direction == self._OUTGOING_CALL_TYPE:
|
||||
return self.OUTGOING_CALL
|
||||
return super(SkypeCallLogsParser, self).get_call_direction()
|
||||
|
||||
def get_call_start_date_time(self):
|
||||
return self.result_set.getLong("time") / 1000
|
||||
|
||||
def get_call_end_date_time(self):
|
||||
start = self.get_call_start_date_time()
|
||||
duration = self.result_set.getInt("duration") / 1000
|
||||
return start + duration
|
||||
|
||||
class SkypeContactsParser(TskContactsParser):
|
||||
"""
|
||||
Extracts TSK_CONTACT information from the Skype database.
|
||||
TSK_CONTACT fields that are not in the Skype database are given
|
||||
a default value inherited from the super class.
|
||||
"""
|
||||
|
||||
def __init__(self, contact_db, analyzer):
|
||||
super(SkypeContactsParser, self).__init__(contact_db.runQuery(
|
||||
"""
|
||||
SELECT entry_id,
|
||||
CASE
|
||||
WHEN Ifnull(first_name, "") == "" AND Ifnull(last_name, "") == "" THEN entry_id
|
||||
WHEN first_name is NULL THEN replace(last_name, ",", "")
|
||||
WHEN last_name is NULL THEN replace(first_name, ",", "")
|
||||
ELSE replace(first_name, ",", "") || " " || replace(last_name, ",", "")
|
||||
END AS name
|
||||
FROM person
|
||||
"""
|
||||
)
|
||||
)
|
||||
self._PARENT_ANALYZER = analyzer
|
||||
|
||||
def get_contact_name(self):
|
||||
return self.result_set.getString("name")
|
||||
|
||||
def get_other_attributes(self):
|
||||
return [BlackboardAttribute(
|
||||
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ID,
|
||||
self._PARENT_ANALYZER,
|
||||
self.result_set.getString("entry_id"))]
|
||||
|
||||
|
||||
class SkypeMessagesParser(TskMessagesParser):
|
||||
"""
|
||||
Extract TSK_MESSAGE information from the Skype database.
|
||||
TSK_CONTACT fields that are not in the Skype database are given
|
||||
a default value inherited from the super class.
|
||||
"""
|
||||
|
||||
def __init__(self, message_db):
|
||||
"""
|
||||
This query is very similar to the call logs query, the only difference is
|
||||
it grabs more columns in the SELECT and excludes message_types which have
|
||||
the call type value (3).
|
||||
"""
|
||||
super(SkypeMessagesParser, self).__init__(message_db.runQuery(
|
||||
"""
|
||||
SELECT contact_book_w_groups.conversation_id,
|
||||
contact_book_w_groups.participant_ids,
|
||||
messages.time,
|
||||
messages.content,
|
||||
messages.device_gallery_path,
|
||||
messages.is_sender_me,
|
||||
messages.person_id as sender_id
|
||||
FROM (SELECT conversation_id,
|
||||
Group_concat(person_id) AS participant_ids
|
||||
FROM particiapnt
|
||||
GROUP BY conversation_id
|
||||
UNION
|
||||
SELECT entry_id as conversation_id,
|
||||
NULL
|
||||
FROM person) AS contact_book_w_groups
|
||||
JOIN chatitem AS messages
|
||||
ON messages.conversation_link = contact_book_w_groups.conversation_id
|
||||
WHERE message_type != 3
|
||||
"""
|
||||
)
|
||||
)
|
||||
self._SKYPE_MESSAGE_TYPE = "Skype Message"
|
||||
self._OUTGOING_MESSAGE_TYPE = 1
|
||||
self._INCOMING_MESSAGE_TYPE = 0
|
||||
|
||||
def get_message_type(self):
|
||||
return self._SKYPE_MESSAGE_TYPE
|
||||
|
||||
def get_phone_number_from(self):
|
||||
if self.get_message_direction() == self.INCOMING:
|
||||
return self.result_set.getString("sender_id")
|
||||
return super(SkypeMessagesParser, self).get_phone_number_from()
|
||||
|
||||
def get_message_direction(self):
|
||||
direction = self.result_set.getInt("is_sender_me")
|
||||
if direction == self._OUTGOING_MESSAGE_TYPE:
|
||||
return self.OUTGOING
|
||||
if direction == self._INCOMING_MESSAGE_TYPE:
|
||||
return self.INCOMING
|
||||
return super(SkypeMessagesParser, self).get_message_direction()
|
||||
|
||||
def get_phone_number_to(self):
|
||||
if self.get_message_direction() == self.OUTGOING:
|
||||
group_ids = self.result_set.getString("participant_ids")
|
||||
|
||||
if group_ids is not None:
|
||||
group_ids = group_ids.split(",")
|
||||
return group_ids
|
||||
|
||||
return self.result_set.getString("conversation_id")
|
||||
|
||||
return super(SkypeMessagesParser, self).get_phone_number_to()
|
||||
|
||||
def get_message_date_time(self):
|
||||
date = self.result_set.getLong("time")
|
||||
return date / 1000
|
||||
|
||||
def get_message_text(self):
|
||||
content = self.result_set.getString("content")
|
||||
|
||||
if content is not None:
|
||||
return content
|
||||
|
||||
return super(SkypeMessagesParser, self).get_message_text()
|
||||
|
||||
def get_thread_id(self):
|
||||
group_ids = self.result_set.getString("participant_ids")
|
||||
if group_ids is not None:
|
||||
return self.result_set.getString("conversation_id")
|
||||
return super(SkypeMessagesParser, self).get_thread_id()
|
||||
|
||||
|
||||
def get_file_attachment(self):
|
||||
if (self.result_set.getString("device_gallery_path") is None):
|
||||
return None
|
||||
else:
|
||||
return self.result_set.getString("device_gallery_path")
|
||||
|
@ -1,150 +0,0 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2016-2020 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
from java.io import File
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.lang import Long
|
||||
from java.lang import String
|
||||
from java.sql import Connection
|
||||
from java.sql import DriverManager
|
||||
from java.sql import ResultSet
|
||||
from java.sql import SQLException
|
||||
from java.sql import Statement
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from org.apache.commons.codec.binary import Base64
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
|
||||
from org.sleuthkit.autopsy.casemodule.services import FileManager
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import Blackboard
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
from org.sleuthkit.datamodel import Account
|
||||
from org.sleuthkit.datamodel.Blackboard import BlackboardException
|
||||
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
|
||||
from org.sleuthkit.datamodel.blackboardutils import CommunicationArtifactsHelper
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import MessageReadStatus
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import CommunicationDirection
|
||||
|
||||
import traceback
|
||||
import general
|
||||
|
||||
"""
|
||||
Locates database for the Tango app and adds info to blackboard.
|
||||
"""
|
||||
class TangoMessageAnalyzer(general.AndroidComponentAnalyzer):
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
self._PACKAGE_NAME = "com.sgiggle.production"
|
||||
self._PARSER_NAME = "Tango Parser"
|
||||
self._MESSAGE_TYPE = "Tango Message"
|
||||
self._VERSION = "7" # DB_VERSION in 'profiles' table
|
||||
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
try:
|
||||
|
||||
tangoDbFiles = AppSQLiteDB.findAppDatabases(dataSource, "tc.db", True, self._PACKAGE_NAME)
|
||||
for tangoDbFile in tangoDbFiles:
|
||||
try:
|
||||
self.__findTangoMessagesInDB(tangoDbFile, dataSource)
|
||||
except Exception as ex:
|
||||
self._logger.log(Level.SEVERE, "Error parsing Tango messages", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
# Error finding Tango messages.
|
||||
pass
|
||||
|
||||
def __findTangoMessagesInDB(self, tangoDb, dataSource):
|
||||
if not tangoDb:
|
||||
return
|
||||
|
||||
try:
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
|
||||
# Create a helper to parse the DB
|
||||
tangoDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._PARSER_NAME,
|
||||
tangoDb.getDBFile(),
|
||||
Account.Type.TANGO )
|
||||
|
||||
resultSet = tangoDb.runQuery(
|
||||
"SELECT conv_id, create_time, direction, payload FROM messages ORDER BY create_time DESC;")
|
||||
|
||||
while resultSet.next():
|
||||
fromId = None
|
||||
toId = None
|
||||
conv_id = resultSet.getString("conv_id") # seems to wrap around the message found in payload after decoding from base-64
|
||||
create_time = Long.valueOf(resultSet.getString("create_time")) / 1000
|
||||
|
||||
if resultSet.getString("direction") == "1": # 1 incoming, 2 outgoing
|
||||
direction = CommunicationDirection.INCOMING
|
||||
else:
|
||||
direction = CommunicationDirection.OUTGOING
|
||||
|
||||
payload = resultSet.getString("payload")
|
||||
msgBody = TangoMessageAnalyzer.decodeMessage(conv_id, payload)
|
||||
|
||||
messageArtifact = tangoDbHelper.addMessage(
|
||||
self._MESSAGE_TYPE,
|
||||
direction,
|
||||
fromId,
|
||||
toId,
|
||||
create_time,
|
||||
MessageReadStatus.UNKNOWN,
|
||||
"", # subject
|
||||
msgBody,
|
||||
"")
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query result for Tango messages", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add Tango message artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except NoCurrentCaseException as ex:
|
||||
self._logger.log(Level.WARNING, "No case currently open.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
tangoDb.close()
|
||||
|
||||
# take the message string which is wrapped by a certain string, and return the text enclosed.
|
||||
@staticmethod
|
||||
def decodeMessage(wrapper, message):
|
||||
result = ""
|
||||
decoded = Base64.decodeBase64(message)
|
||||
try:
|
||||
Z = String(decoded, "UTF-8")
|
||||
result = Z.split(wrapper)[1]
|
||||
except Exception as ex:
|
||||
# Error decoding a Tango message.
|
||||
pass
|
||||
return result
|
@ -1,147 +0,0 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2016-2020 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
from java.io import File
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.lang import Integer
|
||||
from java.lang import Long
|
||||
from java.sql import Connection
|
||||
from java.sql import DriverManager
|
||||
from java.sql import ResultSet
|
||||
from java.sql import SQLException
|
||||
from java.sql import Statement
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from java.util import UUID
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
|
||||
from org.sleuthkit.autopsy.casemodule.services import FileManager
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.autopsy.ingest import IngestServices
|
||||
from org.sleuthkit.autopsy.ingest import ModuleDataEvent
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import Blackboard
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
from org.sleuthkit.datamodel.Blackboard import BlackboardException
|
||||
from org.sleuthkit.datamodel import Account
|
||||
from org.sleuthkit.datamodel.blackboardutils.attributes import MessageAttachments
|
||||
from org.sleuthkit.datamodel.blackboardutils.attributes.MessageAttachments import FileAttachment
|
||||
from org.sleuthkit.datamodel.blackboardutils.attributes.MessageAttachments import URLAttachment
|
||||
from org.sleuthkit.datamodel.blackboardutils import CommunicationArtifactsHelper
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import MessageReadStatus
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import CommunicationDirection
|
||||
|
||||
import traceback
|
||||
import general
|
||||
|
||||
class TextMessageAnalyzer(general.AndroidComponentAnalyzer):
|
||||
"""
|
||||
Finds and parsers Android SMS/MMS database, and populates the blackboard with messages.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
self._PACKAGE_NAME = "com.android.providers.telephony"
|
||||
self._PARSER_NAME = "Android Message Parser"
|
||||
self._MESSAGE_TYPE = "Android Message"
|
||||
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
selfAccountId = None
|
||||
messageDbs = AppSQLiteDB.findAppDatabases(dataSource, "mmssms.db", True, self._PACKAGE_NAME)
|
||||
for messageDb in messageDbs:
|
||||
try:
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
if selfAccountId is not None:
|
||||
messageDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._PARSER_NAME,
|
||||
messageDb.getDBFile(),
|
||||
Account.Type.PHONE, Account.Type.IMO, selfAccountId )
|
||||
else:
|
||||
messageDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._PARSER_NAME,
|
||||
messageDb.getDBFile(),
|
||||
Account.Type.PHONE )
|
||||
|
||||
uuid = UUID.randomUUID().toString()
|
||||
messagesResultSet = messageDb.runQuery("SELECT address, date, read, type, subject, body, thread_id FROM sms;")
|
||||
if messagesResultSet is not None:
|
||||
while messagesResultSet.next():
|
||||
direction = ""
|
||||
address = None
|
||||
fromId = None
|
||||
toId = None
|
||||
|
||||
address = messagesResultSet.getString("address") # may be phone number, or other addresses
|
||||
timeStamp = Long.valueOf(messagesResultSet.getString("date")) / 1000
|
||||
read = messagesResultSet.getInt("read") # may be unread = 0, read = 1
|
||||
subject = messagesResultSet.getString("subject") # message subject
|
||||
msgBody = messagesResultSet.getString("body") # message body
|
||||
thread_id = "{0}-{1}".format(uuid, messagesResultSet.getInt("thread_id"))
|
||||
if messagesResultSet.getString("type") == "1":
|
||||
direction = CommunicationDirection.INCOMING
|
||||
fromId = address
|
||||
else:
|
||||
direction = CommunicationDirection.OUTGOING
|
||||
toId = address
|
||||
|
||||
message_read = messagesResultSet.getInt("read") # may be unread = 0, read = 1
|
||||
if (message_read == 1):
|
||||
msgReadStatus = MessageReadStatus.READ
|
||||
elif (message_read == 0):
|
||||
msgReadStatus = MessageReadStatus.UNREAD
|
||||
else:
|
||||
msgReadStatus = MessageReadStatus.UNKNOWN
|
||||
|
||||
## add a message
|
||||
if address is not None:
|
||||
messageArtifact = messageDbHelper.addMessage(
|
||||
self._MESSAGE_TYPE,
|
||||
direction,
|
||||
fromId,
|
||||
toId,
|
||||
timeStamp,
|
||||
msgReadStatus,
|
||||
subject, # subject
|
||||
msgBody,
|
||||
thread_id)
|
||||
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query result for Android messages.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add Android message artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except NoCurrentCaseException as ex:
|
||||
self._logger.log(Level.WARNING, "No case currently open.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
messageDb.close()
|
@ -1,402 +0,0 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019-2020 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
from java.io import File
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.lang import Long
|
||||
from java.lang import String
|
||||
from java.sql import ResultSet
|
||||
from java.sql import SQLException
|
||||
from java.sql import Statement
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from org.apache.commons.codec.binary import Base64
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
|
||||
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
from org.sleuthkit.datamodel.Blackboard import BlackboardException
|
||||
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
|
||||
from org.sleuthkit.datamodel import Account
|
||||
from org.sleuthkit.datamodel.blackboardutils import CommunicationArtifactsHelper
|
||||
from org.sleuthkit.datamodel.blackboardutils.attributes import MessageAttachments
|
||||
from org.sleuthkit.datamodel.blackboardutils.attributes.MessageAttachments import FileAttachment
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import MessageReadStatus
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import CommunicationDirection
|
||||
|
||||
from TskMessagesParser import TskMessagesParser
|
||||
from TskContactsParser import TskContactsParser
|
||||
from TskCallLogsParser import TskCallLogsParser
|
||||
|
||||
import traceback
|
||||
import general
|
||||
|
||||
class TextNowAnalyzer(general.AndroidComponentAnalyzer):
|
||||
"""
|
||||
Parses the TextNow App databases for TSK contacts, message
|
||||
and calllog artifacts.
|
||||
|
||||
The TextNow database in v6.41.0.2 is structured as follows:
|
||||
- A messages table, which stores messages from/to a number
|
||||
- A contacts table, which stores phone numbers
|
||||
- A groups table, which stores each group the device owner is a part of
|
||||
- A group_members table, which stores who is in each group
|
||||
|
||||
The messages table contains both call logs and messages, with a type
|
||||
column differentiating the two.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
self._TEXTNOW_PACKAGE_NAME = "com.enflick.android.TextNow"
|
||||
self._PARSER_NAME = "TextNow Parser"
|
||||
self._VERSION = "6.41.0.2"
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
"""
|
||||
Extract, Transform and Load all messages, contacts and
|
||||
calllogs from the TextNow databases.
|
||||
"""
|
||||
|
||||
textnow_dbs = AppSQLiteDB.findAppDatabases(dataSource,
|
||||
"textnow_data.db", True, self._TEXTNOW_PACKAGE_NAME)
|
||||
|
||||
try:
|
||||
for textnow_db in textnow_dbs:
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
helper = CommunicationArtifactsHelper(
|
||||
current_case.getSleuthkitCase(), self._PARSER_NAME,
|
||||
textnow_db.getDBFile(), Account.Type.TEXTNOW
|
||||
)
|
||||
self.parse_contacts(textnow_db, helper)
|
||||
self.parse_calllogs(textnow_db, helper)
|
||||
self.parse_messages(textnow_db, helper, current_case)
|
||||
except NoCurrentCaseException as ex:
|
||||
self._logger.log(Level.WARNING, "No case currently open.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
for textnow_db in textnow_dbs:
|
||||
textnow_db.close()
|
||||
|
||||
def parse_contacts(self, textnow_db, helper):
|
||||
#Query for contacts and iterate row by row adding
|
||||
#each contact artifact
|
||||
try:
|
||||
contacts_parser = TextNowContactsParser(textnow_db)
|
||||
while contacts_parser.next():
|
||||
name = contacts_parser.get_contact_name()
|
||||
phone = contacts_parser.get_phone()
|
||||
home_phone = contacts_parser.get_home_phone()
|
||||
mobile_phone = contacts_parser.get_mobile_phone()
|
||||
email = contacts_parser.get_email()
|
||||
|
||||
# add contact if we have at least one valid phone/email
|
||||
if phone or home_phone or mobile_phone or email:
|
||||
helper.addContact(
|
||||
name,
|
||||
phone,
|
||||
home_phone,
|
||||
mobile_phone,
|
||||
email
|
||||
)
|
||||
contacts_parser.close()
|
||||
except SQLException as ex:
|
||||
#Error parsing TextNow db
|
||||
self._logger.log(Level.WARNING, "Error parsing TextNow databases for contacts", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
#Error adding artifacts to the case database.. case database is not complete.
|
||||
self._logger.log(Level.SEVERE,
|
||||
"Error adding TextNow contacts artifacts to the case database", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
#Error posting notification to blackboard...
|
||||
self._logger.log(Level.WARNING,
|
||||
"Error posting TextNow contacts artifact to the blackboard", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
def parse_calllogs(self, textnow_db, helper):
|
||||
#Query for call logs and iterate row by row adding
|
||||
#each call log artifact
|
||||
try:
|
||||
calllog_parser = TextNowCallLogsParser(textnow_db)
|
||||
while calllog_parser.next():
|
||||
helper.addCalllog(
|
||||
calllog_parser.get_call_direction(),
|
||||
calllog_parser.get_phone_number_from(),
|
||||
calllog_parser.get_phone_number_to(),
|
||||
calllog_parser.get_call_start_date_time(),
|
||||
calllog_parser.get_call_end_date_time(),
|
||||
calllog_parser.get_call_type()
|
||||
)
|
||||
calllog_parser.close()
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error parsing TextNow databases for calllogs", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
#Error adding artifacts to the case database.. case database is not complete.
|
||||
self._logger.log(Level.SEVERE,
|
||||
"Error adding TextNow call log artifacts to the case database", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
#Error posting notification to blackboard...
|
||||
self._logger.log(Level.WARNING,
|
||||
"Error posting TextNow call log artifact to the blackboard", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
def parse_messages(self, textnow_db, helper, current_case):
|
||||
#Query for messages and iterate row by row adding
|
||||
#each message artifact
|
||||
try:
|
||||
messages_parser = TextNowMessagesParser(textnow_db)
|
||||
while messages_parser.next():
|
||||
message_artifact = helper.addMessage(
|
||||
messages_parser.get_message_type(),
|
||||
messages_parser.get_message_direction(),
|
||||
messages_parser.get_phone_number_from(),
|
||||
messages_parser.get_phone_number_to(),
|
||||
messages_parser.get_message_date_time(),
|
||||
messages_parser.get_message_read_status(),
|
||||
messages_parser.get_message_subject(),
|
||||
messages_parser.get_message_text(),
|
||||
messages_parser.get_thread_id()
|
||||
)
|
||||
if (len(messages_parser.get_file_attachment()) > 0):
|
||||
file_attachments = ArrayList()
|
||||
self._logger.log(Level.INFO, "SHow Attachment ==> " + str(len(messages_parser.get_file_attachment())) + " <> " + str(messages_parser.get_file_attachment()))
|
||||
file_attachments.add(FileAttachment(current_case.getSleuthkitCase(), textnow_db.getDBFile().getDataSource(), messages_parser.get_file_attachment()))
|
||||
message_attachments = MessageAttachments(file_attachments, [])
|
||||
helper.addAttachments(message_artifact, message_attachments)
|
||||
|
||||
messages_parser.close()
|
||||
except SQLException as ex:
|
||||
#Error parsing TextNow db
|
||||
self._logger.log(Level.WARNING, "Error parsing TextNow databases for messages.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
#Error adding artifacts to the case database.. case database is not complete.
|
||||
self._logger.log(Level.SEVERE,
|
||||
"Error adding TextNow messages artifacts to the case database", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
#Error posting notification to blackboard...
|
||||
self._logger.log(Level.WARNING,
|
||||
"Error posting TextNow messages artifact to the blackboard", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
class TextNowCallLogsParser(TskCallLogsParser):
|
||||
"""
|
||||
Extracts TSK_CALLLOG information from the TextNow database.
|
||||
TSK_CALLLOG fields that are not in the TextNow database are given
|
||||
a default value inherited from the super class.
|
||||
"""
|
||||
|
||||
def __init__(self, calllog_db):
|
||||
"""
|
||||
message_type of 100 or 102 are for calls (audio, video)
|
||||
"""
|
||||
super(TextNowCallLogsParser, self).__init__(calllog_db.runQuery(
|
||||
"""
|
||||
SELECT contact_value AS num,
|
||||
message_direction AS direction,
|
||||
message_text AS duration,
|
||||
date AS datetime
|
||||
FROM messages AS M
|
||||
WHERE message_type IN ( 100, 102 )
|
||||
"""
|
||||
)
|
||||
)
|
||||
self._INCOMING_CALL_TYPE = 1
|
||||
self._OUTGOING_CALL_TYPE = 2
|
||||
|
||||
def get_phone_number_from(self):
|
||||
if self.get_call_direction() == self.OUTGOING_CALL:
|
||||
return super(TextNowCallLogsParser, self).get_phone_number_from()
|
||||
return self.result_set.getString("num")
|
||||
|
||||
def get_phone_number_to(self):
|
||||
if self.get_call_direction() == self.INCOMING_CALL:
|
||||
return super(TextNowCallLogsParser, self).get_phone_number_to()
|
||||
return self.result_set.getString("num")
|
||||
|
||||
def get_call_direction(self):
|
||||
if self.result_set.getInt("direction") == self._INCOMING_CALL_TYPE:
|
||||
return self.INCOMING_CALL
|
||||
return self.OUTGOING_CALL
|
||||
|
||||
def get_call_start_date_time(self):
|
||||
return self.result_set.getLong("datetime") / 1000
|
||||
|
||||
def get_call_end_date_time(self):
|
||||
start = self.get_call_start_date_time()
|
||||
duration = self.result_set.getString("duration")
|
||||
try:
|
||||
return start + long(duration)
|
||||
except ValueError as ve:
|
||||
return super(TextNowCallLogsParser, self).get_call_end_date_time()
|
||||
|
||||
class TextNowContactsParser(TskContactsParser):
|
||||
"""
|
||||
Extracts TSK_CONTACT information from the TextNow database.
|
||||
TSK_CONTACT fields that are not in the TextNow database are given
|
||||
a default value inherited from the super class.
|
||||
"""
|
||||
|
||||
def __init__(self, contact_db):
|
||||
super(TextNowContactsParser, self).__init__(contact_db.runQuery(
|
||||
"""
|
||||
SELECT C.contact_value AS number,
|
||||
CASE
|
||||
WHEN contact_name IS NULL THEN contact_value
|
||||
WHEN contact_name == "" THEN contact_value
|
||||
ELSE contact_name
|
||||
END name
|
||||
FROM contacts AS C
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
def get_contact_name(self):
|
||||
return self.result_set.getString("name")
|
||||
|
||||
def get_phone(self):
|
||||
number = self.result_set.getString("number")
|
||||
return (number if general.isValidPhoneNumber(number) else None)
|
||||
|
||||
def get_email(self):
|
||||
# occasionally the 'number' column may have an email address instead
|
||||
value = self.result_set.getString("number")
|
||||
return (value if general.isValidEmailAddress(value) else None)
|
||||
|
||||
class TextNowMessagesParser(TskMessagesParser):
|
||||
"""
|
||||
Extract TSK_MESSAGE information from the TextNow database.
|
||||
TSK_CONTACT fields that are not in the TextNow database are given
|
||||
a default value inherited from the super class.
|
||||
"""
|
||||
|
||||
def __init__(self, message_db):
|
||||
"""
|
||||
The query below does the following:
|
||||
- The group_info inner query creates a comma seperated list of group recipients
|
||||
for each group. This result is then joined on the groups table to get the thread id.
|
||||
- The contacts table is unioned with this result so we have a complete map
|
||||
of "from" phone_numbers -> recipients (group or single). This is the
|
||||
'to_from_map' inner query.
|
||||
- Finally, the to_from_map results are joined with the messages table to get all
|
||||
of the communication details.
|
||||
"""
|
||||
super(TextNowMessagesParser, self).__init__(message_db.runQuery(
|
||||
"""
|
||||
SELECT CASE
|
||||
WHEN messages.message_direction == 2 THEN NULL
|
||||
WHEN contact_book_w_groups.to_addresses IS NULL THEN
|
||||
messages.contact_value
|
||||
END from_address,
|
||||
CASE
|
||||
WHEN messages.message_direction == 1 THEN NULL
|
||||
WHEN contact_book_w_groups.to_addresses IS NULL THEN
|
||||
messages.contact_value
|
||||
ELSE contact_book_w_groups.to_addresses
|
||||
END to_address,
|
||||
messages.message_direction,
|
||||
messages.message_text,
|
||||
messages.READ,
|
||||
messages.DATE,
|
||||
messages.attach,
|
||||
thread_id
|
||||
FROM (SELECT GM.contact_value,
|
||||
Group_concat(GM.member_contact_value) AS to_addresses,
|
||||
G.contact_value AS thread_id
|
||||
FROM group_members AS GM
|
||||
join GROUPS AS G
|
||||
ON G.contact_value = GM.contact_value
|
||||
GROUP BY GM.contact_value
|
||||
UNION
|
||||
SELECT contact_value,
|
||||
NULL,
|
||||
NULL
|
||||
FROM contacts) AS contact_book_w_groups
|
||||
join messages
|
||||
ON messages.contact_value = contact_book_w_groups.contact_value
|
||||
WHERE message_type NOT IN ( 102, 100 )
|
||||
"""
|
||||
)
|
||||
)
|
||||
self._TEXTNOW_MESSAGE_TYPE = "TextNow Message"
|
||||
self._INCOMING_MESSAGE_TYPE = 1
|
||||
self._OUTGOING_MESSAGE_TYPE = 2
|
||||
|
||||
def get_message_type(self):
|
||||
return self._TEXTNOW_MESSAGE_TYPE
|
||||
|
||||
def get_phone_number_from(self):
|
||||
if self.result_set.getString("from_address") is None:
|
||||
return super(TextNowMessagesParser, self).get_phone_number_from()
|
||||
return self.result_set.getString("from_address")
|
||||
|
||||
def get_message_direction(self):
|
||||
direction = self.result_set.getInt("message_direction")
|
||||
if direction == self._INCOMING_MESSAGE_TYPE:
|
||||
return self.INCOMING
|
||||
return self.OUTGOING
|
||||
|
||||
def get_phone_number_to(self):
|
||||
if self.result_set.getString("to_address") is None:
|
||||
return super(TextNowMessagesParser, self).get_phone_number_to()
|
||||
return self.result_set.getString("to_address").split(",")
|
||||
|
||||
def get_message_date_time(self):
|
||||
#convert ms to s
|
||||
return self.result_set.getLong("date") / 1000;
|
||||
|
||||
def get_message_read_status(self):
|
||||
read = self.result_set.getBoolean("read")
|
||||
if self.get_message_direction() == self.INCOMING:
|
||||
if read:
|
||||
return self.READ
|
||||
return self.UNREAD
|
||||
|
||||
#read status for outgoing messages cannot be determined, give default
|
||||
return super(TextNowMessagesParser, self).get_message_read_status()
|
||||
|
||||
def get_message_text(self):
|
||||
text = self.result_set.getString("message_text")
|
||||
return text
|
||||
|
||||
def get_thread_id(self):
|
||||
thread_id = self.result_set.getString("thread_id")
|
||||
if thread_id is None:
|
||||
return super(TextNowMessagesParser, self).get_thread_id()
|
||||
return thread_id
|
||||
|
||||
def get_file_attachment(self):
|
||||
attachment = self.result_set.getString("attach")
|
||||
if attachment is None:
|
||||
return None
|
||||
return self.result_set.getString("attach")
|
@ -1,394 +0,0 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019-2020 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
from java.io import File
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.lang import Long
|
||||
from java.lang import String
|
||||
from java.sql import ResultSet
|
||||
from java.sql import SQLException
|
||||
from java.sql import Statement
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from org.apache.commons.codec.binary import Base64
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
|
||||
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
from org.sleuthkit.datamodel.Blackboard import BlackboardException
|
||||
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
|
||||
from org.sleuthkit.datamodel import Account
|
||||
from org.sleuthkit.datamodel.blackboardutils import CommunicationArtifactsHelper
|
||||
from org.sleuthkit.datamodel.blackboardutils.attributes import MessageAttachments
|
||||
from org.sleuthkit.datamodel.blackboardutils.attributes.MessageAttachments import FileAttachment
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import MessageReadStatus
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import CommunicationDirection
|
||||
from TskMessagesParser import TskMessagesParser
|
||||
from TskContactsParser import TskContactsParser
|
||||
from TskCallLogsParser import TskCallLogsParser
|
||||
|
||||
import traceback
|
||||
import general
|
||||
|
||||
class ViberAnalyzer(general.AndroidComponentAnalyzer):
|
||||
"""
|
||||
Parses the Viber App databases for TSK contacts, message
|
||||
and calllog artifacts.
|
||||
|
||||
The Viber v11.5.0 database structure is as follows:
|
||||
- People can take part in N conversation(s). A conversation can have M
|
||||
members and messages are exchanged in a conversation.
|
||||
- Viber has a conversation table, a participant table (the people/members in the above
|
||||
analogy) and a messages table.
|
||||
- Each row of the participants table maps a person to a conversation_id
|
||||
- Each row in the messages table has a from participant id and a conversation id.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
self._VIBER_PACKAGE_NAME = "com.viber.voip"
|
||||
self._PARSER_NAME = "Viber Parser"
|
||||
self._VERSION = "11.5.0"
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
"""
|
||||
Extract, Transform and Load all messages, contacts and
|
||||
calllogs from the Viber databases.
|
||||
"""
|
||||
|
||||
try:
|
||||
contact_and_calllog_dbs = AppSQLiteDB.findAppDatabases(dataSource,
|
||||
"viber_data", True, self._VIBER_PACKAGE_NAME)
|
||||
message_dbs = AppSQLiteDB.findAppDatabases(dataSource,
|
||||
"viber_messages", True, self._VIBER_PACKAGE_NAME)
|
||||
|
||||
#Extract TSK_CONTACT and TSK_CALLLOG information
|
||||
for contact_and_calllog_db in contact_and_calllog_dbs:
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
helper = CommunicationArtifactsHelper(
|
||||
current_case.getSleuthkitCase(), self._PARSER_NAME,
|
||||
contact_and_calllog_db.getDBFile(), Account.Type.VIBER)
|
||||
self.parse_contacts(contact_and_calllog_db, helper)
|
||||
self.parse_calllogs(contact_and_calllog_db, helper)
|
||||
|
||||
#Extract TSK_MESSAGE information
|
||||
for message_db in message_dbs:
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
helper = CommunicationArtifactsHelper(
|
||||
current_case.getSleuthkitCase(), self._PARSER_NAME,
|
||||
message_db.getDBFile(), Account.Type.VIBER)
|
||||
self.parse_messages(message_db, helper, current_case)
|
||||
|
||||
except NoCurrentCaseException as ex:
|
||||
self._logger.log(Level.WARNING, "No case currently open.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
for message_db in message_dbs:
|
||||
message_db.close()
|
||||
|
||||
for contact_and_calllog_db in contact_and_calllog_dbs:
|
||||
contact_and_calllog_db.close()
|
||||
|
||||
def parse_contacts(self, contacts_db, helper):
|
||||
try:
|
||||
contacts_parser = ViberContactsParser(contacts_db)
|
||||
while contacts_parser.next():
|
||||
if (not(not contacts_parser.get_phone() or contacts_parser.get_phone().isspace())):
|
||||
helper.addContact(
|
||||
contacts_parser.get_contact_name(),
|
||||
contacts_parser.get_phone(),
|
||||
contacts_parser.get_home_phone(),
|
||||
contacts_parser.get_mobile_phone(),
|
||||
contacts_parser.get_email()
|
||||
)
|
||||
# Check if contact_name is blank and if it is not create a TSK_CONTACT otherwise ignore as not Contact Info
|
||||
elif (not(not contacts_parser.get_contact_name() or contacts_parser.get_contact_name().isspace())):
|
||||
current_case = Case.getCurrentCase().getSleuthkitCase()
|
||||
attributes = ArrayList()
|
||||
artifact = contacts_db.getDBFile().newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT)
|
||||
attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), self._PARSER_NAME, contacts_parser.get_contact_name()))
|
||||
artifact.addAttributes(attributes)
|
||||
|
||||
# Post the artifact to blackboard
|
||||
current_case.getBlackboard().postArtifact(artifact, self._PARSER_NAME)
|
||||
|
||||
contacts_parser.close()
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error querying the viber database for contacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE,
|
||||
"Error adding viber contacts artifact to case database.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING,
|
||||
"Error posting viber contacts artifact to the blackboard.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
def parse_calllogs(self, calllogs_db, helper):
|
||||
try:
|
||||
calllog_parser = ViberCallLogsParser(calllogs_db)
|
||||
while calllog_parser.next():
|
||||
helper.addCalllog(
|
||||
calllog_parser.get_call_direction(),
|
||||
calllog_parser.get_phone_number_from(),
|
||||
calllog_parser.get_phone_number_to(),
|
||||
calllog_parser.get_call_start_date_time(),
|
||||
calllog_parser.get_call_end_date_time(),
|
||||
calllog_parser.get_call_type()
|
||||
)
|
||||
calllog_parser.close()
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error querying the viber database for calllogs.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE,
|
||||
"Error adding viber calllogs artifact to case database.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING,
|
||||
"Error posting viber calllogs artifact to the blackboard.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
|
||||
def parse_messages(self, messages_db, helper, current_case):
|
||||
try:
|
||||
messages_parser = ViberMessagesParser(messages_db)
|
||||
while messages_parser.next():
|
||||
message_artifact = helper.addMessage(
|
||||
messages_parser.get_message_type(),
|
||||
messages_parser.get_message_direction(),
|
||||
messages_parser.get_phone_number_from(),
|
||||
messages_parser.get_phone_number_to(),
|
||||
messages_parser.get_message_date_time(),
|
||||
messages_parser.get_message_read_status(),
|
||||
messages_parser.get_message_subject(),
|
||||
messages_parser.get_message_text(),
|
||||
messages_parser.get_thread_id()
|
||||
)
|
||||
if (messages_parser.get_file_attachment() is not None):
|
||||
file_attachments = ArrayList()
|
||||
file_attachments.add(FileAttachment(current_case.getSleuthkitCase(), messages_db.getDBFile().getDataSource(), messages_parser.get_file_attachment()))
|
||||
message_attachments = MessageAttachments(file_attachments, [])
|
||||
helper.addAttachments(message_artifact, message_attachments)
|
||||
messages_parser.close()
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error querying the viber database for messages.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE,
|
||||
"Error adding viber messages artifact to case database.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING,
|
||||
"Error posting viber messages artifact to the blackboard.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
class ViberCallLogsParser(TskCallLogsParser):
|
||||
"""
|
||||
Extracts TSK_CALLLOG information from the Viber database.
|
||||
TSK_CALLLOG fields that are not in the Viber database are given
|
||||
a default value inherited from the super class.
|
||||
"""
|
||||
|
||||
def __init__(self, calllog_db):
|
||||
super(ViberCallLogsParser, self).__init__(calllog_db.runQuery(
|
||||
"""
|
||||
SELECT C.canonized_number AS number,
|
||||
C.type AS direction,
|
||||
C.duration AS seconds,
|
||||
C.date AS start_time,
|
||||
C.viber_call_type AS call_type
|
||||
FROM calls AS C
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
self._OUTGOING_CALL_TYPE = 2
|
||||
self._INCOMING_CALL_TYPE = 1
|
||||
self._MISSED_CALL_TYPE = 3
|
||||
self._AUDIO_CALL_TYPE = 1
|
||||
self._VIDEO_CALL_TYPE = 4
|
||||
|
||||
def get_phone_number_from(self):
|
||||
if self.get_call_direction() == self.INCOMING_CALL:
|
||||
return self.result_set.getString("number")
|
||||
#Give default value if the call is outgoing,
|
||||
#the device's # is not stored in the database.
|
||||
return super(ViberCallLogsParser, self).get_phone_number_from()
|
||||
|
||||
def get_phone_number_to(self):
|
||||
if self.get_call_direction() == self.OUTGOING_CALL:
|
||||
return self.result_set.getString("number")
|
||||
#Give default value if the call is incoming,
|
||||
#the device's # is not stored in the database.
|
||||
return super(ViberCallLogsParser, self).get_phone_number_to()
|
||||
|
||||
def get_call_direction(self):
|
||||
direction = self.result_set.getInt("direction")
|
||||
if direction == self._INCOMING_CALL_TYPE or direction == self._MISSED_CALL_TYPE:
|
||||
return self.INCOMING_CALL
|
||||
return self.OUTGOING_CALL
|
||||
|
||||
def get_call_start_date_time(self):
|
||||
return self.result_set.getLong("start_time") / 1000
|
||||
|
||||
def get_call_end_date_time(self):
|
||||
start_time = self.get_call_start_date_time()
|
||||
duration = self.result_set.getLong("seconds")
|
||||
return start_time + duration
|
||||
|
||||
def get_call_type(self):
|
||||
call_type = self.result_set.getInt("call_type")
|
||||
if call_type == self._AUDIO_CALL_TYPE:
|
||||
return self.AUDIO_CALL
|
||||
if call_type == self._VIDEO_CALL_TYPE:
|
||||
return self.VIDEO_CALL
|
||||
return super(ViberCallLogsParser, self).get_call_type()
|
||||
|
||||
class ViberContactsParser(TskContactsParser):
|
||||
"""
|
||||
Extracts TSK_CONTACT information from the Viber database.
|
||||
TSK_CONTACT fields that are not in the Viber database are given
|
||||
a default value inherited from the super class.
|
||||
"""
|
||||
|
||||
def __init__(self, contact_db):
|
||||
super(ViberContactsParser, self).__init__(contact_db.runQuery(
|
||||
"""
|
||||
SELECT C.display_name AS name,
|
||||
coalesce(D.data2, D.data1, D.data3) AS number
|
||||
FROM phonebookcontact AS C
|
||||
JOIN phonebookdata AS D
|
||||
ON C._id = D.contact_id
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
def get_contact_name(self):
|
||||
return self.result_set.getString("name")
|
||||
|
||||
def get_phone(self):
|
||||
return self.result_set.getString("number")
|
||||
|
||||
class ViberMessagesParser(TskMessagesParser):
|
||||
"""
|
||||
Extract TSK_MESSAGE information from the Viber database.
|
||||
TSK_CONTACT fields that are not in the Viber database are given
|
||||
a default value inherited from the super class.
|
||||
"""
|
||||
|
||||
def __init__(self, message_db):
|
||||
"""
|
||||
The query below does the following:
|
||||
- The first two inner joins on participants and participants_info build
|
||||
the 1 to many (M) mappings between the sender and the recipients for each
|
||||
conversation_id. If a and b do private messaging, then 2 rows in the result
|
||||
will be a -> b and b -> a.
|
||||
If a, b, c, d are in a group, then 4 rows containing a -> b,c,d. b -> a,c,d. etc.
|
||||
Participants_info is needed to get phone numbers.
|
||||
- The result of the above step is a look up table for each message. Joining this result
|
||||
onto the messages table lets us know which participant a message originated from and
|
||||
everyone else that received it.
|
||||
"""
|
||||
super(ViberMessagesParser, self).__init__(message_db.runQuery(
|
||||
"""
|
||||
SELECT convo_participants.from_number AS from_number,
|
||||
convo_participants.recipients AS recipients,
|
||||
M.conversation_id AS thread_id,
|
||||
M.body AS msg_content,
|
||||
M.send_type AS direction,
|
||||
M.msg_date AS msg_date,
|
||||
M.unread AS read_status,
|
||||
M.extra_uri AS file_attachment
|
||||
FROM (SELECT *,
|
||||
group_concat(TO_RESULT.number) AS recipients
|
||||
FROM (SELECT P._id AS FROM_ID,
|
||||
P.conversation_id,
|
||||
PI.number AS FROM_NUMBER
|
||||
FROM participants AS P
|
||||
JOIN participants_info AS PI
|
||||
ON P.participant_info_id = PI._id) AS FROM_RESULT
|
||||
JOIN (SELECT P._id AS TO_ID,
|
||||
P.conversation_id,
|
||||
PI.number
|
||||
FROM participants AS P
|
||||
JOIN participants_info AS PI
|
||||
ON P.participant_info_id = PI._id) AS TO_RESULT
|
||||
ON FROM_RESULT.from_id != TO_RESULT.to_id
|
||||
AND FROM_RESULT.conversation_id = TO_RESULT.conversation_id
|
||||
GROUP BY FROM_RESULT.from_id) AS convo_participants
|
||||
JOIN messages AS M
|
||||
ON M.participant_id = convo_participants.from_id
|
||||
AND M.conversation_id = convo_participants.conversation_id
|
||||
"""
|
||||
)
|
||||
)
|
||||
self._VIBER_MESSAGE_TYPE = "Viber Message"
|
||||
self._INCOMING_MESSAGE_TYPE = 0
|
||||
self._OUTGOING_MESSAGE_TYPE = 1
|
||||
|
||||
def get_message_type(self):
|
||||
return self._VIBER_MESSAGE_TYPE
|
||||
|
||||
def get_phone_number_from(self):
|
||||
return self.result_set.getString("from_number")
|
||||
|
||||
def get_message_direction(self):
|
||||
direction = self.result_set.getInt("direction")
|
||||
if direction == self._INCOMING_MESSAGE_TYPE:
|
||||
return self.INCOMING
|
||||
return self.OUTGOING
|
||||
|
||||
def get_phone_number_to(self):
|
||||
return self.result_set.getString("recipients").split(",")
|
||||
|
||||
def get_message_date_time(self):
|
||||
#transform from ms to seconds
|
||||
return self.result_set.getLong("msg_date") / 1000
|
||||
|
||||
def get_message_read_status(self):
|
||||
if self.get_message_direction() == self.INCOMING:
|
||||
if self.result_set.getInt("read_status") == 0:
|
||||
return self.READ
|
||||
else:
|
||||
return self.UNREAD
|
||||
return super(ViberMessagesParser, self).get_message_read_status()
|
||||
|
||||
def get_message_text(self):
|
||||
return self.result_set.getString("msg_content")
|
||||
|
||||
def get_thread_id(self):
|
||||
return str(self.result_set.getInt("thread_id"))
|
||||
|
||||
def get_file_attachment(self):
|
||||
if (self.result_set.getString("file_attachment") is None):
|
||||
return None
|
||||
elif ("content:" in self.result_set.getString("file_attachment")):
|
||||
return self.result_set.getString("msg_content").replace("file://", "")
|
||||
else:
|
||||
return self.result_set.getString("file_attachment").replace("file://", "")
|
@ -1,551 +0,0 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019-2020 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
from java.io import File
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.lang import Long
|
||||
from java.lang import String
|
||||
from java.sql import ResultSet
|
||||
from java.sql import SQLException
|
||||
from java.sql import Statement
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from org.apache.commons.codec.binary import Base64
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
|
||||
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
from org.sleuthkit.datamodel.Blackboard import BlackboardException
|
||||
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
|
||||
from org.sleuthkit.datamodel import Account
|
||||
from org.sleuthkit.datamodel.blackboardutils import CommunicationArtifactsHelper
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import MessageReadStatus
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import CommunicationDirection
|
||||
from org.sleuthkit.datamodel.blackboardutils.attributes import MessageAttachments
|
||||
from org.sleuthkit.datamodel.blackboardutils.attributes.MessageAttachments import FileAttachment
|
||||
from org.sleuthkit.datamodel.blackboardutils.attributes.MessageAttachments import URLAttachment
|
||||
from TskMessagesParser import TskMessagesParser
|
||||
from TskContactsParser import TskContactsParser
|
||||
from TskCallLogsParser import TskCallLogsParser
|
||||
|
||||
import traceback
|
||||
import general
|
||||
|
||||
class WhatsAppAnalyzer(general.AndroidComponentAnalyzer):
|
||||
"""
|
||||
Parses the WhatsApp databases for TSK contact, message
|
||||
and calllog artifacts.
|
||||
|
||||
About WhatsApp parser for v2.19.244:
|
||||
- Database Design Details:
|
||||
There are 2 databases and 6 tables this parser uses.
|
||||
|
||||
1) Prerequisties:
|
||||
Each user is assigned a whatsapp id, refered to as jid in the
|
||||
database. A jid is of the form:
|
||||
|
||||
####...####@whatsapp.net
|
||||
|
||||
where # is a placeholder for an arbitrary length of digits 1-9.
|
||||
|
||||
2) Databases:
|
||||
- databases/msgstore.db: contains msg and call log info
|
||||
- databases/wa.db: contains contact info
|
||||
|
||||
3) Tables:
|
||||
- wa/wa_contacts: Each record maps a jid to a users personal
|
||||
details, such as name and phone number.
|
||||
|
||||
- msgstore/call_log: Each call made on the device is a single row
|
||||
in the call_log table. Each record holds
|
||||
information such as duration, direction, and
|
||||
type (Video or Audio).
|
||||
|
||||
- msgstore/call_log_participant_v2: Each row of this table maps a jid to
|
||||
a call_log record. Multiple rows that
|
||||
share a call_log id indicate a group call.
|
||||
|
||||
- msgstore/messages: Each message is represented as a single row.
|
||||
A row maps a jid or a gjid (group jid) to some
|
||||
message details. Both the jid and gjid are
|
||||
stored in 1 column, called key_remote_jid.
|
||||
gjid's are of the form:
|
||||
|
||||
#####...###-#####...####@g.us
|
||||
|
||||
where # is a place holder for a digit 1-9. The
|
||||
'-' is a fixed character surrounded by digits
|
||||
of arbiturary length n and m.
|
||||
|
||||
If the message is not from a group, the jid the
|
||||
message is to/from is stored in key_remote_jid
|
||||
column. If it is a group, the key_remote_jid
|
||||
column contains the gjid and the 'from' jid is
|
||||
stored in a secondary column called
|
||||
remote_resource.
|
||||
|
||||
- msgstore/group_participants: Each row of this table maps a jid to a gjid.
|
||||
|
||||
- msgstore/jid: This table stores raw jid string. Some tables
|
||||
only store the jid_row. A join must be
|
||||
performed to get the jid value out.
|
||||
- Implementation details:
|
||||
1) Group calls and single calls are extracted in two different queries.
|
||||
2) Group messages and single messages are extracted in 1 query.
|
||||
- The general approach was to build one complete contacts table containing
|
||||
both jid and gjid. A join can be performed once on all of the messages.
|
||||
All jids that are part of a gjid were concatenated into a comma seperated
|
||||
list of jids.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
self._WHATSAPP_PACKAGE_NAME = "com.whatsapp"
|
||||
self._PARSER_NAME = "WhatsApp Parser"
|
||||
self._VERSION = "2.19.244"
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
"""
|
||||
Extract, Transform and Load all TSK contact, message
|
||||
and calllog artifacts from the WhatsApp databases.
|
||||
"""
|
||||
|
||||
try:
|
||||
contact_dbs = AppSQLiteDB.findAppDatabases(dataSource,
|
||||
"wa.db", True, self._WHATSAPP_PACKAGE_NAME)
|
||||
calllog_and_message_dbs = AppSQLiteDB.findAppDatabases(dataSource,
|
||||
"msgstore.db", True, self._WHATSAPP_PACKAGE_NAME)
|
||||
|
||||
#Extract TSK_CONTACT information
|
||||
for contact_db in contact_dbs:
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
helper = CommunicationArtifactsHelper(
|
||||
current_case.getSleuthkitCase(), self._PARSER_NAME,
|
||||
contact_db.getDBFile(), Account.Type.WHATSAPP)
|
||||
self.parse_contacts(contact_db, helper)
|
||||
|
||||
for calllog_and_message_db in calllog_and_message_dbs:
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
helper = CommunicationArtifactsHelper(
|
||||
current_case.getSleuthkitCase(), self._PARSER_NAME,
|
||||
calllog_and_message_db.getDBFile(), Account.Type.WHATSAPP)
|
||||
self.parse_calllogs(calllog_and_message_db, helper)
|
||||
self.parse_messages(dataSource, calllog_and_message_db, helper, current_case)
|
||||
|
||||
except NoCurrentCaseException as ex:
|
||||
#If there is no current case, bail out immediately.
|
||||
self._logger.log(Level.WARNING, "No case currently open.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exec())
|
||||
|
||||
#Clean up open file handles.
|
||||
for contact_db in contact_dbs:
|
||||
contact_db.close()
|
||||
|
||||
for calllog_and_message_db in calllog_and_message_dbs:
|
||||
calllog_and_message_db.close()
|
||||
|
||||
def parse_contacts(self, contacts_db, helper):
|
||||
try:
|
||||
contacts_parser = WhatsAppContactsParser(contacts_db, self._PARSER_NAME)
|
||||
while contacts_parser.next():
|
||||
name = contacts_parser.get_contact_name()
|
||||
phone = contacts_parser.get_phone()
|
||||
home_phone = contacts_parser.get_home_phone()
|
||||
mobile_phone = contacts_parser.get_mobile_phone()
|
||||
email = contacts_parser.get_email()
|
||||
other_attributes = contacts_parser.get_other_attributes()
|
||||
# add contact if we have at least one valid phone/email
|
||||
if phone or home_phone or mobile_phone or email or other_attributes:
|
||||
helper.addContact(
|
||||
name,
|
||||
phone,
|
||||
home_phone,
|
||||
mobile_phone,
|
||||
email,
|
||||
other_attributes
|
||||
)
|
||||
contacts_parser.close()
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error querying the whatsapp database for contacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE,
|
||||
"Error adding whatsapp contact artifacts to the case database.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING,
|
||||
"Error posting contact artifact to the blackboard.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
def parse_calllogs(self, calllogs_db, helper):
|
||||
try:
|
||||
single_calllogs_parser = WhatsAppSingleCallLogsParser(calllogs_db)
|
||||
while single_calllogs_parser.next():
|
||||
helper.addCalllog(
|
||||
single_calllogs_parser.get_call_direction(),
|
||||
single_calllogs_parser.get_phone_number_from(),
|
||||
single_calllogs_parser.get_phone_number_to(),
|
||||
single_calllogs_parser.get_call_start_date_time(),
|
||||
single_calllogs_parser.get_call_end_date_time(),
|
||||
single_calllogs_parser.get_call_type()
|
||||
)
|
||||
single_calllogs_parser.close()
|
||||
|
||||
group_calllogs_parser = WhatsAppGroupCallLogsParser(calllogs_db)
|
||||
while group_calllogs_parser.next():
|
||||
helper.addCalllog(
|
||||
group_calllogs_parser.get_call_direction(),
|
||||
group_calllogs_parser.get_phone_number_from(),
|
||||
group_calllogs_parser.get_phone_number_to(),
|
||||
group_calllogs_parser.get_call_start_date_time(),
|
||||
group_calllogs_parser.get_call_end_date_time(),
|
||||
group_calllogs_parser.get_call_type()
|
||||
)
|
||||
group_calllogs_parser.close()
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error querying the whatsapp database for calllogs.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE,
|
||||
"Error adding whatsapp calllog artifacts to the case database.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING,
|
||||
"Error posting calllog artifact to the blackboard.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
def parse_messages(self, dataSource, messages_db, helper, current_case):
|
||||
try:
|
||||
messages_db.attachDatabase(dataSource, "wa.db",
|
||||
messages_db.getDBFile().getParentPath(), "wadb")
|
||||
|
||||
messages_parser = WhatsAppMessagesParser(messages_db)
|
||||
while messages_parser.next():
|
||||
message_artifact = helper.addMessage(
|
||||
messages_parser.get_message_type(),
|
||||
messages_parser.get_message_direction(),
|
||||
messages_parser.get_phone_number_from(),
|
||||
messages_parser.get_phone_number_to(),
|
||||
messages_parser.get_message_date_time(),
|
||||
messages_parser.get_message_read_status(),
|
||||
messages_parser.get_message_subject(),
|
||||
messages_parser.get_message_text(),
|
||||
messages_parser.get_thread_id()
|
||||
)
|
||||
|
||||
# add attachments, if any
|
||||
if (messages_parser.get_url_attachment() is not None):
|
||||
url_attachments = ArrayList()
|
||||
url_attachments.add(URLAttachment(messages_parser.get_url_attachment()))
|
||||
message_attachments = MessageAttachments([], url_attachments)
|
||||
helper.addAttachments(message_artifact, message_attachments)
|
||||
|
||||
messages_parser.close()
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error querying the whatsapp database for contacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE,
|
||||
"Error adding whatsapp contact artifacts to the case database.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING,
|
||||
"Error posting contact artifact to the blackboard.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
|
||||
class WhatsAppGroupCallLogsParser(TskCallLogsParser):
|
||||
"""
|
||||
Extracts TSK_CALLLOG information from group call logs
|
||||
in the WhatsApp database.
|
||||
"""
|
||||
|
||||
def __init__(self, calllog_db):
|
||||
super(WhatsAppGroupCallLogsParser, self).__init__(calllog_db.runQuery(
|
||||
"""
|
||||
SELECT CL.video_call,
|
||||
CL.timestamp,
|
||||
CL.duration,
|
||||
CL.from_me,
|
||||
J1.raw_string AS from_id,
|
||||
group_concat(J.raw_string) AS group_members
|
||||
FROM call_log_participant_v2 AS CLP
|
||||
JOIN call_log AS CL
|
||||
ON CL._id = CLP.call_log_row_id
|
||||
JOIN jid AS J
|
||||
ON J._id = CLP.jid_row_id
|
||||
JOIN jid as J1
|
||||
ON J1._id = CL.jid_row_id
|
||||
GROUP BY CL._id
|
||||
"""
|
||||
)
|
||||
)
|
||||
self._INCOMING_CALL_TYPE = 0
|
||||
self._OUTGOING_CALL_TYPE = 1
|
||||
self._VIDEO_CALL_TYPE = 1
|
||||
|
||||
def get_call_direction(self):
|
||||
if self.result_set.getInt("from_me") == self._INCOMING_CALL_TYPE:
|
||||
return self.INCOMING_CALL
|
||||
return self.OUTGOING_CALL
|
||||
|
||||
def get_phone_number_from(self):
|
||||
if self.get_call_direction() == self.INCOMING_CALL:
|
||||
sender = self.result_set.getString("from_id")
|
||||
return sender
|
||||
return super(WhatsAppGroupCallLogsParser, self).get_phone_number_from()
|
||||
|
||||
def get_phone_number_to(self):
|
||||
if self.get_call_direction() == self.OUTGOING_CALL:
|
||||
#group_members column stores comma seperated list of groups or single contact
|
||||
group = self.result_set.getString("group_members")
|
||||
return group.split(",")
|
||||
return super(WhatsAppGroupCallLogsParser, self).get_phone_number_to()
|
||||
|
||||
def get_call_start_date_time(self):
|
||||
return self.result_set.getLong("timestamp") / 1000
|
||||
|
||||
def get_call_end_date_time(self):
|
||||
start = self.get_call_start_date_time()
|
||||
duration = self.result_set.getInt("duration")
|
||||
return start + duration
|
||||
|
||||
def get_call_type(self):
|
||||
if self.result_set.getInt("video_call") == self._VIDEO_CALL_TYPE:
|
||||
return self.VIDEO_CALL
|
||||
return self.AUDIO_CALL
|
||||
|
||||
class WhatsAppSingleCallLogsParser(TskCallLogsParser):
|
||||
"""
|
||||
Extracts TSK_CALLLOG information from 1 to 1 call logs
|
||||
in the WhatsApp database.
|
||||
"""
|
||||
|
||||
def __init__(self, calllog_db):
|
||||
super(WhatsAppSingleCallLogsParser, self).__init__(calllog_db.runQuery(
|
||||
"""
|
||||
SELECT CL.timestamp,
|
||||
CL.video_call,
|
||||
CL.duration,
|
||||
J.raw_string AS num,
|
||||
CL.from_me
|
||||
FROM call_log AS CL
|
||||
JOIN jid AS J
|
||||
ON J._id = CL.jid_row_id
|
||||
WHERE CL._id NOT IN (SELECT DISTINCT call_log_row_id
|
||||
FROM call_log_participant_v2)
|
||||
"""
|
||||
)
|
||||
)
|
||||
self._INCOMING_CALL_TYPE = 0
|
||||
self._OUTGOING_CALL_TYPE = 1
|
||||
self._VIDEO_CALL_TYPE = 1
|
||||
|
||||
def get_call_direction(self):
|
||||
if self.result_set.getInt("from_me") == self._INCOMING_CALL_TYPE:
|
||||
return self.INCOMING_CALL
|
||||
return self.OUTGOING_CALL
|
||||
|
||||
def get_phone_number_from(self):
|
||||
if self.get_call_direction() == self.INCOMING_CALL:
|
||||
sender = self.result_set.getString("num")
|
||||
return sender
|
||||
return super(WhatsAppSingleCallLogsParser, self).get_phone_number_from()
|
||||
|
||||
def get_phone_number_to(self):
|
||||
if self.get_call_direction() == self.OUTGOING_CALL:
|
||||
to = self.result_set.getString("num")
|
||||
return to
|
||||
return super(WhatsAppSingleCallLogsParser, self).get_phone_number_to()
|
||||
|
||||
def get_call_start_date_time(self):
|
||||
return self.result_set.getLong("timestamp") / 1000
|
||||
|
||||
def get_call_end_date_time(self):
|
||||
start = self.get_call_start_date_time()
|
||||
duration = self.result_set.getInt("duration")
|
||||
return start + duration
|
||||
|
||||
def get_call_type(self):
|
||||
if self.result_set.getInt("video_call") == self._VIDEO_CALL_TYPE:
|
||||
return self.VIDEO_CALL
|
||||
return self.AUDIO_CALL
|
||||
|
||||
|
||||
class WhatsAppContactsParser(TskContactsParser):
|
||||
"""
|
||||
Extracts TSK_CONTACT information from the WhatsApp database.
|
||||
TSK_CONTACT fields that are not in the WhatsApp database are given
|
||||
a default value inherited from the super class.
|
||||
"""
|
||||
|
||||
def __init__(self, contact_db, analyzer):
|
||||
super(WhatsAppContactsParser, self).__init__(contact_db.runQuery(
|
||||
"""
|
||||
SELECT jid,
|
||||
CASE
|
||||
WHEN WC.number IS NULL THEN WC.jid
|
||||
WHEN WC.number == "" THEN WC.jid
|
||||
ELSE WC.number
|
||||
END number,
|
||||
CASE
|
||||
WHEN WC.given_name IS NULL
|
||||
AND WC.family_name IS NULL
|
||||
AND WC.display_name IS NULL THEN WC.jid
|
||||
WHEN WC.given_name IS NULL
|
||||
AND WC.family_name IS NULL THEN WC.display_name
|
||||
WHEN WC.given_name IS NULL THEN WC.family_name
|
||||
WHEN WC.family_name IS NULL THEN WC.given_name
|
||||
ELSE WC.given_name
|
||||
|| " "
|
||||
|| WC.family_name
|
||||
END name
|
||||
FROM wa_contacts AS WC
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
self._PARENT_ANALYZER = analyzer
|
||||
|
||||
def get_contact_name(self):
|
||||
return self.result_set.getString("name")
|
||||
|
||||
def get_phone(self):
|
||||
number = self.result_set.getString("number")
|
||||
return (number if general.isValidPhoneNumber(number) else None)
|
||||
|
||||
def get_email(self):
|
||||
# occasionally the 'number' column may have an email address instead
|
||||
value = self.result_set.getString("number")
|
||||
return (value if general.isValidEmailAddress(value) else None)
|
||||
|
||||
def get_other_attributes(self):
|
||||
value = self.result_set.getString("jid")
|
||||
if value:
|
||||
return [BlackboardAttribute(
|
||||
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ID,
|
||||
self._PARENT_ANALYZER,
|
||||
value)]
|
||||
else:
|
||||
return []
|
||||
|
||||
class WhatsAppMessagesParser(TskMessagesParser):
|
||||
"""
|
||||
Extract TSK_MESSAGE information from the WhatsApp database.
|
||||
TSK_CONTACT fields that are not in the WhatsApp database are given
|
||||
a default value inherited from the super class.
|
||||
"""
|
||||
|
||||
def __init__(self, message_db):
|
||||
super(WhatsAppMessagesParser, self).__init__(message_db.runQuery(
|
||||
"""
|
||||
SELECT messages.key_remote_jid AS id,
|
||||
contact_book_w_groups.recipients,
|
||||
key_from_me AS direction,
|
||||
messages.data AS content,
|
||||
messages.timestamp AS send_timestamp,
|
||||
messages.received_timestamp,
|
||||
messages.remote_resource AS group_sender,
|
||||
messages.media_url AS attachment
|
||||
FROM (SELECT jid,
|
||||
recipients
|
||||
FROM wadb.wa_contacts AS contacts
|
||||
left join (SELECT gjid,
|
||||
Group_concat(CASE
|
||||
WHEN jid == "" THEN NULL
|
||||
ELSE jid
|
||||
END) AS recipients
|
||||
FROM group_participants
|
||||
GROUP BY gjid) AS groups
|
||||
ON contacts.jid = groups.gjid
|
||||
GROUP BY jid) AS contact_book_w_groups
|
||||
join messages
|
||||
ON messages.key_remote_jid = contact_book_w_groups.jid
|
||||
"""
|
||||
)
|
||||
)
|
||||
self._WHATSAPP_MESSAGE_TYPE = "WhatsApp Message"
|
||||
self._INCOMING_MESSAGE_TYPE = 0
|
||||
self._OUTGOING_MESSAGE_TYPE = 1
|
||||
self._message_db = message_db
|
||||
|
||||
def get_message_type(self):
|
||||
return self._WHATSAPP_MESSAGE_TYPE
|
||||
|
||||
def get_phone_number_to(self):
|
||||
if self.get_message_direction() == self.OUTGOING:
|
||||
group = self.result_set.getString("recipients")
|
||||
if group is not None:
|
||||
group = group.split(",")
|
||||
return group
|
||||
|
||||
return self.result_set.getString("id")
|
||||
return super(WhatsAppMessagesParser, self).get_phone_number_to()
|
||||
|
||||
def get_phone_number_from(self):
|
||||
if self.get_message_direction() == self.INCOMING:
|
||||
group_sender = self.result_set.getString("group_sender")
|
||||
group = self.result_set.getString("recipients")
|
||||
if group_sender is not None and group is not None:
|
||||
return group_sender
|
||||
else:
|
||||
return self.result_set.getString("id")
|
||||
return super(WhatsAppMessagesParser, self).get_phone_number_from()
|
||||
|
||||
def get_message_direction(self):
|
||||
direction = self.result_set.getInt("direction")
|
||||
if direction == self._INCOMING_MESSAGE_TYPE:
|
||||
return self.INCOMING
|
||||
return self.OUTGOING
|
||||
|
||||
def get_message_date_time(self):
|
||||
#transform from ms to seconds
|
||||
if self.get_message_direction() == self.OUTGOING:
|
||||
return self.result_set.getLong("send_timestamp") / 1000
|
||||
return self.result_set.getLong("received_timestamp") / 1000
|
||||
|
||||
def get_message_text(self):
|
||||
message = self.result_set.getString("content")
|
||||
if message is None:
|
||||
message = super(WhatsAppMessagesParser, self).get_message_text()
|
||||
return message
|
||||
|
||||
def get_thread_id(self):
|
||||
group = self.result_set.getString("recipients")
|
||||
if group is not None:
|
||||
return self.result_set.getString("id")
|
||||
return super(WhatsAppMessagesParser, self).get_thread_id()
|
||||
|
||||
|
||||
def get_url_attachment(self):
|
||||
attachment = self.result_set.getString("attachment")
|
||||
if (attachment is None):
|
||||
return None
|
||||
elif (str(attachment).startswith("http:") or str(attachment).startswith("https:") ):
|
||||
return attachment
|
||||
else:
|
||||
return None
|
||||
|
@ -1,139 +0,0 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2016-2020 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
from java.io import File
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.sql import Connection
|
||||
from java.sql import DriverManager
|
||||
from java.sql import ResultSet
|
||||
from java.sql import SQLException
|
||||
from java.sql import Statement
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from java.util import UUID
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
|
||||
from org.sleuthkit.autopsy.casemodule.services import FileManager
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import Blackboard
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
from org.sleuthkit.datamodel import Account
|
||||
from org.sleuthkit.datamodel import Relationship
|
||||
from org.sleuthkit.datamodel.Blackboard import BlackboardException
|
||||
from org.sleuthkit.autopsy.ingest import IngestServices
|
||||
from org.sleuthkit.autopsy.ingest import ModuleDataEvent
|
||||
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
|
||||
from org.sleuthkit.datamodel.blackboardutils import CommunicationArtifactsHelper
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import MessageReadStatus
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import CommunicationDirection
|
||||
|
||||
import traceback
|
||||
import general
|
||||
|
||||
wwfAccountType = None
|
||||
|
||||
|
||||
"""
|
||||
Analyzes messages from Words With Friends
|
||||
"""
|
||||
class WWFMessageAnalyzer(general.AndroidComponentAnalyzer):
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
self._PACKAGE_NAME = "com.zynga.words"
|
||||
self._PARSER_NAME = "Words With Friend Parser"
|
||||
self._MESSAGE_TYPE = "WWF Message"
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
try:
|
||||
|
||||
# Create new account type, if doesnt exist
|
||||
global wwfAccountType
|
||||
wwfAccountType = Case.getCurrentCase().getSleuthkitCase().getCommunicationsManager().addAccountType("WWF", "Words with Friends")
|
||||
|
||||
wwfDbFiles = AppSQLiteDB.findAppDatabases(dataSource, "WordsFramework", True, self._PACKAGE_NAME)
|
||||
for wwfDbFile in wwfDbFiles:
|
||||
try:
|
||||
self.__findWWFMessagesInDB(wwfDbFile, dataSource)
|
||||
except Exception as ex:
|
||||
self._logger.log(Level.SEVERE, "Error parsing WWF messages", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
# Error finding WWF messages.
|
||||
self._logger.log(Level.SEVERE, "Error finding WWF message files.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
pass
|
||||
|
||||
def __findWWFMessagesInDB(self, wwfDb, dataSource):
|
||||
if not wwfDb:
|
||||
return
|
||||
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
|
||||
# Create a helper to parse the DB
|
||||
wwfDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._PARSER_NAME,
|
||||
wwfDb.getDBFile(),
|
||||
wwfAccountType )
|
||||
|
||||
uuid = UUID.randomUUID().toString()
|
||||
|
||||
resultSet = None
|
||||
try:
|
||||
resultSet = wwfDb.runQuery("SELECT message, strftime('%s' ,created_at) as datetime, user_id, game_id FROM chat_messages ORDER BY game_id DESC, created_at DESC;")
|
||||
|
||||
while resultSet.next():
|
||||
message = resultSet.getString("message") # WWF Message
|
||||
created_at = resultSet.getLong("datetime")
|
||||
user_id = resultSet.getString("user_id") # the ID of the user who sent/received the message.
|
||||
game_id = resultSet.getString("game_id") # ID of the game which the the message was sent.
|
||||
thread_id = "{0}-{1}".format(uuid, user_id)
|
||||
|
||||
messageArtifact = wwfDbHelper.addMessage( self._MESSAGE_TYPE,
|
||||
CommunicationDirection.UNKNOWN,
|
||||
user_id, # fromId
|
||||
None, # toId
|
||||
created_at,
|
||||
MessageReadStatus.UNKNOWN,
|
||||
"", # subject
|
||||
message,
|
||||
thread_id)
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query result for WWF messages", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to add WWF message artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except NoCurrentCaseException as ex:
|
||||
self._logger.log(Level.WARNING, "No case currently open.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
wwfDb.close()
|
@ -1,154 +0,0 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019-2020 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
from java.io import File
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.lang import Long
|
||||
from java.lang import String
|
||||
from java.sql import ResultSet
|
||||
from java.sql import SQLException
|
||||
from java.sql import Statement
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from org.apache.commons.codec.binary import Base64
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
from org.sleuthkit.datamodel.Blackboard import BlackboardException
|
||||
from org.sleuthkit.datamodel import Account
|
||||
from org.sleuthkit.datamodel.blackboardutils.attributes import MessageAttachments
|
||||
from org.sleuthkit.datamodel.blackboardutils.attributes.MessageAttachments import FileAttachment
|
||||
from org.sleuthkit.datamodel.blackboardutils import CommunicationArtifactsHelper
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import MessageReadStatus
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import CommunicationDirection
|
||||
import traceback
|
||||
import general
|
||||
|
||||
|
||||
class XenderAnalyzer(general.AndroidComponentAnalyzer):
|
||||
|
||||
"""
|
||||
Xender is a file transfer utility app.
|
||||
|
||||
This module finds the SQLite DB for Xender, parses the DB for contacts & messages,
|
||||
and adds artifacts to the case.
|
||||
|
||||
Xender version 4.6.5 has the following database structure:
|
||||
- trans-history.db
|
||||
-- A profile table with the device_id/name of users interacted with
|
||||
-- A new_history table, with records of files exchanged with other users
|
||||
--- f_path - path of the file sent/received
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
self._PACKAGE_NAME = "cn.xender"
|
||||
self._MODULE_NAME = "Xender Analyzer"
|
||||
self._MESSAGE_TYPE = "Xender Message"
|
||||
self._VERSION = "4.6.5"
|
||||
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
selfAccountId = None
|
||||
transactionDbs = AppSQLiteDB.findAppDatabases(dataSource, "trans-history-db", True, self._PACKAGE_NAME)
|
||||
for transactionDb in transactionDbs:
|
||||
try:
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
# get the profile with connection_times 0, that's the self account.
|
||||
profilesResultSet = transactionDb.runQuery("SELECT device_id, nick_name FROM profile WHERE connect_times = 0")
|
||||
if profilesResultSet:
|
||||
while profilesResultSet.next():
|
||||
if not selfAccountId:
|
||||
selfAccountId = profilesResultSet.getString("device_id")
|
||||
# create artifacts helper
|
||||
if selfAccountId is not None:
|
||||
transactionDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, transactionDb.getDBFile(),
|
||||
Account.Type.XENDER, Account.Type.XENDER, selfAccountId )
|
||||
else:
|
||||
transactionDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, transactionDb.getDBFile(),
|
||||
Account.Type.XENDER)
|
||||
|
||||
queryString = """
|
||||
SELECT f_path, f_display_name, f_size_str, c_start_time, c_direction, c_session_id,
|
||||
s_name, s_device_id, r_name, r_device_id
|
||||
FROM new_history
|
||||
"""
|
||||
messagesResultSet = transactionDb.runQuery(queryString)
|
||||
if messagesResultSet is not None:
|
||||
while messagesResultSet.next():
|
||||
direction = CommunicationDirection.UNKNOWN
|
||||
fromId = None
|
||||
toId = None
|
||||
|
||||
fileAttachments = ArrayList()
|
||||
|
||||
if (messagesResultSet.getInt("c_direction") == 1):
|
||||
direction = CommunicationDirection.OUTGOING
|
||||
toId = messagesResultSet.getString("r_device_id")
|
||||
else:
|
||||
direction = CommunicationDirection.INCOMING
|
||||
fromId = messagesResultSet.getString("s_device_id")
|
||||
|
||||
timeStamp = messagesResultSet.getLong("c_start_time") / 1000
|
||||
messageArtifact = transactionDbHelper.addMessage(
|
||||
self._MESSAGE_TYPE,
|
||||
direction,
|
||||
fromId,
|
||||
toId,
|
||||
timeStamp,
|
||||
MessageReadStatus.UNKNOWN,
|
||||
None, # subject
|
||||
None, # message text
|
||||
messagesResultSet.getString("c_session_id") )
|
||||
|
||||
# add the file as attachment
|
||||
fileAttachments.add(FileAttachment(current_case.getSleuthkitCase(), transactionDb.getDBFile().getDataSource(), messagesResultSet.getString("f_path")))
|
||||
messageAttachments = MessageAttachments(fileAttachments, [])
|
||||
transactionDbHelper.addAttachments(messageArtifact, messageAttachments)
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query result for profiles.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to create Xender message artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except NoCurrentCaseException as ex:
|
||||
self._logger.log(Level.WARNING, "No case currently open.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
transactionDb.close()
|
||||
|
||||
|
||||
|
@ -1,135 +0,0 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019-2020 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
from java.io import File
|
||||
from java.lang import Class
|
||||
from java.lang import ClassNotFoundException
|
||||
from java.lang import Long
|
||||
from java.lang import String
|
||||
from java.sql import ResultSet
|
||||
from java.sql import SQLException
|
||||
from java.sql import Statement
|
||||
from java.util.logging import Level
|
||||
from java.util import ArrayList
|
||||
from org.apache.commons.codec.binary import Base64
|
||||
from org.sleuthkit.autopsy.casemodule import Case
|
||||
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
|
||||
from org.sleuthkit.autopsy.coreutils import Logger
|
||||
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
|
||||
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
|
||||
from org.sleuthkit.autopsy.datamodel import ContentUtils
|
||||
from org.sleuthkit.autopsy.ingest import IngestJobContext
|
||||
from org.sleuthkit.datamodel import AbstractFile
|
||||
from org.sleuthkit.datamodel import BlackboardArtifact
|
||||
from org.sleuthkit.datamodel import BlackboardAttribute
|
||||
from org.sleuthkit.datamodel import Content
|
||||
from org.sleuthkit.datamodel import TskCoreException
|
||||
from org.sleuthkit.datamodel.Blackboard import BlackboardException
|
||||
from org.sleuthkit.datamodel import Account
|
||||
from org.sleuthkit.datamodel.blackboardutils.attributes import MessageAttachments
|
||||
from org.sleuthkit.datamodel.blackboardutils.attributes.MessageAttachments import FileAttachment
|
||||
from org.sleuthkit.datamodel.blackboardutils import CommunicationArtifactsHelper
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import MessageReadStatus
|
||||
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import CommunicationDirection
|
||||
|
||||
import traceback
|
||||
import general
|
||||
|
||||
class ZapyaAnalyzer(general.AndroidComponentAnalyzer):
|
||||
|
||||
"""
|
||||
Zapya is a file transfer utility app.
|
||||
|
||||
This module finds the SQLite DB for Zapya, parses the DB for contacts & messages,
|
||||
and adds artifacts to the case.
|
||||
|
||||
Zapya version 5.8.3 has the following database structure:
|
||||
- transfer20.db
|
||||
-- A transfer table, with records of files exchanged with other users
|
||||
--- path - path of the file sent/received
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._logger = Logger.getLogger(self.__class__.__name__)
|
||||
self._PACKAGE_NAME = "com.dewmobile.kuaiya.play"
|
||||
self._MODULE_NAME = "Zapya Analyzer"
|
||||
self._MESSAGE_TYPE = "Zapya Message"
|
||||
self._VERSION = "5.8.3"
|
||||
|
||||
def analyze(self, dataSource, fileManager, context):
|
||||
transferDbs = AppSQLiteDB.findAppDatabases(dataSource, "transfer20.db", True, self._PACKAGE_NAME)
|
||||
for transferDb in transferDbs:
|
||||
try:
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
#
|
||||
transferDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, transferDb.getDBFile(),
|
||||
Account.Type.ZAPYA)
|
||||
|
||||
queryString = "SELECT device, name, direction, createtime, path, title FROM transfer"
|
||||
transfersResultSet = transferDb.runQuery(queryString)
|
||||
if transfersResultSet is not None:
|
||||
while transfersResultSet.next():
|
||||
direction = CommunicationDirection.UNKNOWN
|
||||
fromId = None
|
||||
toId = None
|
||||
fileAttachments = ArrayList()
|
||||
|
||||
if (transfersResultSet.getInt("direction") == 1):
|
||||
direction = CommunicationDirection.OUTGOING
|
||||
toId = transfersResultSet.getString("device")
|
||||
else:
|
||||
direction = CommunicationDirection.INCOMING
|
||||
fromId = transfersResultSet.getString("device")
|
||||
|
||||
timeStamp = transfersResultSet.getLong("createtime") / 1000
|
||||
messageArtifact = transferDbHelper.addMessage(
|
||||
self._MESSAGE_TYPE,
|
||||
direction,
|
||||
fromId,
|
||||
toId,
|
||||
timeStamp,
|
||||
MessageReadStatus.UNKNOWN,
|
||||
None, # subject
|
||||
None, # message Text
|
||||
None ) # thread id
|
||||
|
||||
# add the file as attachment
|
||||
fileAttachments.add(FileAttachment(current_case.getSleuthkitCase(), transferDb.getDBFile().getDataSource(), transfersResultSet.getString("path")))
|
||||
messageAttachments = MessageAttachments(fileAttachments, [])
|
||||
transferDbHelper.addAttachments(messageArtifact, messageAttachments)
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query result for transfer.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except TskCoreException as ex:
|
||||
self._logger.log(Level.SEVERE, "Failed to create Zapya message artifacts.", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
except BlackboardException as ex:
|
||||
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
except NoCurrentCaseException as ex:
|
||||
self._logger.log(Level.WARNING, "No case currently open.", ex)
|
||||
self._logger.log(Level.WARNING, traceback.format_exc())
|
||||
finally:
|
||||
transferDb.close()
|
||||
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user