mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-08 22:29:33 +00:00
Upgrade gpxpy to newer version
Upgrade gpxpy to version 1.3.5
This commit is contained in:
parent
edd20e506d
commit
dc63b3134c
@ -15,7 +15,9 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
def parse(xml_or_file, parser=None):
|
__version__ = '1.3.5'
|
||||||
|
|
||||||
|
def parse(xml_or_file, version = None):
|
||||||
"""
|
"""
|
||||||
Parse xml (string) or file object. This is just an wrapper for
|
Parse xml (string) or file object. This is just an wrapper for
|
||||||
GPXParser.parse() function.
|
GPXParser.parse() function.
|
||||||
@ -24,11 +26,13 @@ def parse(xml_or_file, parser=None):
|
|||||||
detected, lxml if possible).
|
detected, lxml if possible).
|
||||||
|
|
||||||
xml_or_file must be the xml to parse or a file-object with the XML.
|
xml_or_file must be the xml to parse or a file-object with the XML.
|
||||||
|
|
||||||
|
version may be '1.0', '1.1' or None (then it will be read from the gpx
|
||||||
|
xml node if possible, if not then version 1.0 will be used).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from . import gpx as mod_gpx
|
|
||||||
from . import parser as mod_parser
|
from . import parser as mod_parser
|
||||||
|
|
||||||
parser = mod_parser.GPXParser(xml_or_file, parser=parser)
|
parser = mod_parser.GPXParser(xml_or_file)
|
||||||
|
|
||||||
return parser.parse()
|
return parser.parse(version)
|
||||||
|
@ -14,19 +14,21 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import pdb
|
|
||||||
|
|
||||||
import logging as mod_logging
|
import logging as mod_logging
|
||||||
import math as mod_math
|
import math as mod_math
|
||||||
|
|
||||||
from . import utils as mod_utils
|
from . import utils as mod_utils
|
||||||
|
|
||||||
|
log = mod_logging.getLogger(__name__)
|
||||||
|
|
||||||
# Generic geo related function and class(es)
|
# Generic geo related function and class(es)
|
||||||
|
|
||||||
# One degree in meters:
|
# latitude/longitude in GPX files is always in WGS84 datum
|
||||||
ONE_DEGREE = 1000. * 10000.8 / 90.
|
# WGS84 defined the Earth semi-major axis with 6378.137 km
|
||||||
|
EARTH_RADIUS = 6378.137 * 1000
|
||||||
|
|
||||||
EARTH_RADIUS = 6371 * 1000
|
# One degree in meters:
|
||||||
|
ONE_DEGREE = (2*mod_math.pi*EARTH_RADIUS) / 360 # ==> 111.319 km
|
||||||
|
|
||||||
|
|
||||||
def to_rad(x):
|
def to_rad(x):
|
||||||
@ -66,9 +68,7 @@ def length(locations=None, _3d=None):
|
|||||||
d = location.distance_3d(previous_location)
|
d = location.distance_3d(previous_location)
|
||||||
else:
|
else:
|
||||||
d = location.distance_2d(previous_location)
|
d = location.distance_2d(previous_location)
|
||||||
if d != 0 and not d:
|
if d:
|
||||||
pass
|
|
||||||
else:
|
|
||||||
length += d
|
length += d
|
||||||
return length
|
return length
|
||||||
|
|
||||||
@ -100,15 +100,15 @@ def calculate_max_speed(speeds_and_distances):
|
|||||||
# ...
|
# ...
|
||||||
assert len(speeds_and_distances[-1]) == 2
|
assert len(speeds_and_distances[-1]) == 2
|
||||||
|
|
||||||
size = float(len(speeds_and_distances))
|
size = len(speeds_and_distances)
|
||||||
|
|
||||||
if size < 20:
|
if size < 20:
|
||||||
mod_logging.debug('Segment too small to compute speed, size=%s', size)
|
log.debug('Segment too small to compute speed, size=%s', size)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
distances = list(map(lambda x: x[1], speeds_and_distances))
|
distances = list(map(lambda x: x[1], speeds_and_distances))
|
||||||
average_distance = sum(distances) / float(size)
|
average_distance = sum(distances) / float(size)
|
||||||
standard_distance_deviation = mod_math.sqrt(sum(map(lambda distance: (distance-average_distance)**2, distances))/size)
|
standard_distance_deviation = mod_math.sqrt(sum(map(lambda distance: (distance-average_distance)**2, distances))/float(size))
|
||||||
|
|
||||||
# Ignore items where the distance is too big:
|
# Ignore items where the distance is too big:
|
||||||
filtered_speeds_and_distances = filter(lambda speed_and_distance: abs(speed_and_distance[1] - average_distance) <= standard_distance_deviation * 1.5, speeds_and_distances)
|
filtered_speeds_and_distances = filter(lambda speed_and_distance: abs(speed_and_distance[1] - average_distance) <= standard_distance_deviation * 1.5, speeds_and_distances)
|
||||||
@ -261,9 +261,12 @@ def simplify_polyline(points, max_distance):
|
|||||||
# cases...
|
# cases...
|
||||||
a, b, c = get_line_equation_coefficients(begin, end)
|
a, b, c = get_line_equation_coefficients(begin, end)
|
||||||
|
|
||||||
tmp_max_distance = -1000000
|
# Initialize to safe values
|
||||||
tmp_max_distance_position = None
|
tmp_max_distance = 0
|
||||||
for point_no in range(len(points[1:-1])):
|
tmp_max_distance_position = 1
|
||||||
|
|
||||||
|
# Check distance of all points between begin and end, exclusive
|
||||||
|
for point_no in range(1,len(points)-1):
|
||||||
point = points[point_no]
|
point = points[point_no]
|
||||||
d = abs(a * point.latitude + b * point.longitude + c)
|
d = abs(a * point.latitude + b * point.longitude + c)
|
||||||
if d > tmp_max_distance:
|
if d > tmp_max_distance:
|
||||||
@ -273,11 +276,14 @@ def simplify_polyline(points, max_distance):
|
|||||||
# Now that we have the most distance point, compute its real distance:
|
# Now that we have the most distance point, compute its real distance:
|
||||||
real_max_distance = distance_from_line(points[tmp_max_distance_position], begin, end)
|
real_max_distance = distance_from_line(points[tmp_max_distance_position], begin, end)
|
||||||
|
|
||||||
|
# If furthest point is less than max_distance, remove all points between begin and end
|
||||||
if real_max_distance < max_distance:
|
if real_max_distance < max_distance:
|
||||||
return [begin, end]
|
return [begin, end]
|
||||||
|
|
||||||
return (simplify_polyline(points[:tmp_max_distance_position + 2], max_distance) +
|
# If furthest point is more than max_distance, use it as anchor and run
|
||||||
simplify_polyline(points[tmp_max_distance_position + 1:], max_distance)[1:])
|
# function again using (begin to anchor) and (anchor to end), remove extra anchor
|
||||||
|
return (simplify_polyline(points[:tmp_max_distance_position + 1], max_distance) +
|
||||||
|
simplify_polyline(points[tmp_max_distance_position:], max_distance)[1:])
|
||||||
|
|
||||||
|
|
||||||
class Location:
|
class Location:
|
||||||
@ -362,8 +368,8 @@ class LocationDelta:
|
|||||||
elif (latitude_diff is not None) and (longitude_diff is not None):
|
elif (latitude_diff is not None) and (longitude_diff is not None):
|
||||||
if (distance is not None) or (angle is not None):
|
if (distance is not None) or (angle is not None):
|
||||||
raise Exception('No distance/angle if using lat/lon diff!')
|
raise Exception('No distance/angle if using lat/lon diff!')
|
||||||
this.latitude_diff = latitude_diff
|
self.latitude_diff = latitude_diff
|
||||||
this.longitude_diff = longitude_diff
|
self.longitude_diff = longitude_diff
|
||||||
self.move_function = self.move_by_lat_lon_diff
|
self.move_function = self.move_by_lat_lon_diff
|
||||||
|
|
||||||
def move(self, location):
|
def move(self, location):
|
||||||
|
@ -18,8 +18,6 @@
|
|||||||
GPX related stuff
|
GPX related stuff
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import pdb
|
|
||||||
|
|
||||||
import logging as mod_logging
|
import logging as mod_logging
|
||||||
import math as mod_math
|
import math as mod_math
|
||||||
import collections as mod_collections
|
import collections as mod_collections
|
||||||
@ -30,17 +28,18 @@ from . import utils as mod_utils
|
|||||||
from . import geo as mod_geo
|
from . import geo as mod_geo
|
||||||
from . import gpxfield as mod_gpxfield
|
from . import gpxfield as mod_gpxfield
|
||||||
|
|
||||||
|
log = mod_logging.getLogger(__name__)
|
||||||
|
|
||||||
# GPX date format to be used when writing the GPX output:
|
# GPX date format to be used when writing the GPX output:
|
||||||
DATE_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
|
DATE_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
|
||||||
|
|
||||||
# GPX date format(s) used for parsing. The T between date and time and Z after
|
# GPX date format(s) used for parsing. The T between date and time and Z after
|
||||||
# time are allowed, too:
|
# time are allowed, too:
|
||||||
DATE_FORMATS = [
|
DATE_FORMATS = [
|
||||||
'%Y-%m-%d %H:%M:%S',
|
|
||||||
'%Y-%m-%d %H:%M:%S.%f',
|
'%Y-%m-%d %H:%M:%S.%f',
|
||||||
#'%Y-%m-%d %H:%M:%S%z',
|
'%Y-%m-%d %H:%M:%S',
|
||||||
#'%Y-%m-%d %H:%M:%S.%f%z',
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# Used in smoothing, sum must be 1:
|
# Used in smoothing, sum must be 1:
|
||||||
SMOOTHING_RATIO = (0.4, 0.2, 0.4)
|
SMOOTHING_RATIO = (0.4, 0.2, 0.4)
|
||||||
|
|
||||||
@ -73,6 +72,7 @@ GPX_10_POINT_FIELDS = [
|
|||||||
mod_gpxfield.GPXField('dgps_id', 'dgpsid'),
|
mod_gpxfield.GPXField('dgps_id', 'dgpsid'),
|
||||||
]
|
]
|
||||||
GPX_11_POINT_FIELDS = [
|
GPX_11_POINT_FIELDS = [
|
||||||
|
# See GPX for description of text fields
|
||||||
mod_gpxfield.GPXField('latitude', attribute='lat', type=mod_gpxfield.FLOAT_TYPE, mandatory=True),
|
mod_gpxfield.GPXField('latitude', attribute='lat', type=mod_gpxfield.FLOAT_TYPE, mandatory=True),
|
||||||
mod_gpxfield.GPXField('longitude', attribute='lon', type=mod_gpxfield.FLOAT_TYPE, mandatory=True),
|
mod_gpxfield.GPXField('longitude', attribute='lon', type=mod_gpxfield.FLOAT_TYPE, mandatory=True),
|
||||||
mod_gpxfield.GPXField('elevation', 'ele', type=mod_gpxfield.FLOAT_TYPE),
|
mod_gpxfield.GPXField('elevation', 'ele', type=mod_gpxfield.FLOAT_TYPE),
|
||||||
@ -83,7 +83,7 @@ GPX_11_POINT_FIELDS = [
|
|||||||
mod_gpxfield.GPXField('comment', 'cmt'),
|
mod_gpxfield.GPXField('comment', 'cmt'),
|
||||||
mod_gpxfield.GPXField('description', 'desc'),
|
mod_gpxfield.GPXField('description', 'desc'),
|
||||||
mod_gpxfield.GPXField('source', 'src'),
|
mod_gpxfield.GPXField('source', 'src'),
|
||||||
'link',
|
'link:@link',
|
||||||
mod_gpxfield.GPXField('link', attribute='href'),
|
mod_gpxfield.GPXField('link', attribute='href'),
|
||||||
mod_gpxfield.GPXField('link_text', tag='text'),
|
mod_gpxfield.GPXField('link_text', tag='text'),
|
||||||
mod_gpxfield.GPXField('link_type', tag='type'),
|
mod_gpxfield.GPXField('link_type', tag='type'),
|
||||||
@ -97,7 +97,7 @@ GPX_11_POINT_FIELDS = [
|
|||||||
mod_gpxfield.GPXField('position_dilution', 'pdop', type=mod_gpxfield.FLOAT_TYPE),
|
mod_gpxfield.GPXField('position_dilution', 'pdop', type=mod_gpxfield.FLOAT_TYPE),
|
||||||
mod_gpxfield.GPXField('age_of_dgps_data', 'ageofdgpsdata', type=mod_gpxfield.FLOAT_TYPE),
|
mod_gpxfield.GPXField('age_of_dgps_data', 'ageofdgpsdata', type=mod_gpxfield.FLOAT_TYPE),
|
||||||
mod_gpxfield.GPXField('dgps_id', 'dgpsid'),
|
mod_gpxfield.GPXField('dgps_id', 'dgpsid'),
|
||||||
mod_gpxfield.GPXExtensionsField('extensions'),
|
mod_gpxfield.GPXExtensionsField('extensions', is_list=True),
|
||||||
]
|
]
|
||||||
|
|
||||||
# GPX1.0 track points have two more fields after time
|
# GPX1.0 track points have two more fields after time
|
||||||
@ -132,7 +132,7 @@ PointData = mod_collections.namedtuple(
|
|||||||
|
|
||||||
class GPXException(Exception):
|
class GPXException(Exception):
|
||||||
"""
|
"""
|
||||||
Exception used for invalid GPX files. Is is used when the XML file is
|
Exception used for invalid GPX files. It is used when the XML file is
|
||||||
valid but something is wrong with the GPX data.
|
valid but something is wrong with the GPX data.
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
@ -157,13 +157,10 @@ class GPXBounds:
|
|||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
return (self.min_latitude, self.max_latitude, self.min_longitude, self.max_longitude,).__iter__()
|
return (self.min_latitude, self.max_latitude, self.min_longitude, self.max_longitude,).__iter__()
|
||||||
|
|
||||||
def __hash__(self):
|
|
||||||
return mod_utils.hash_object(self, self.__slots__)
|
|
||||||
|
|
||||||
|
|
||||||
class GPXXMLSyntaxException(GPXException):
|
class GPXXMLSyntaxException(GPXException):
|
||||||
"""
|
"""
|
||||||
Exception used when the the XML syntax is invalid.
|
Exception used when the XML syntax is invalid.
|
||||||
|
|
||||||
The __cause__ can be a minidom or lxml exception (See http://www.python.org/dev/peps/pep-3134/).
|
The __cause__ can be a minidom or lxml exception (See http://www.python.org/dev/peps/pep-3134/).
|
||||||
"""
|
"""
|
||||||
@ -212,7 +209,7 @@ class GPXWaypoint(mod_geo.Location):
|
|||||||
self.position_dilution = position_dilution
|
self.position_dilution = position_dilution
|
||||||
self.age_of_dgps_data = None
|
self.age_of_dgps_data = None
|
||||||
self.dgps_id = None
|
self.dgps_id = None
|
||||||
self.extensions = None
|
self.extensions = []
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return '[wpt{%s}:%s,%s@%s]' % (self.name, self.latitude, self.longitude, self.elevation)
|
return '[wpt{%s}:%s,%s@%s]' % (self.name, self.latitude, self.longitude, self.elevation)
|
||||||
@ -226,15 +223,29 @@ class GPXWaypoint(mod_geo.Location):
|
|||||||
representation += ', %s=%s' % (attribute, repr(value))
|
representation += ', %s=%s' % (attribute, repr(value))
|
||||||
return 'GPXWaypoint(%s)' % representation
|
return 'GPXWaypoint(%s)' % representation
|
||||||
|
|
||||||
|
def adjust_time(self, delta):
|
||||||
|
"""
|
||||||
|
Adjusts the time of the point by the specified delta
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
delta : datetime.timedelta
|
||||||
|
Positive time delta will adjust time into the future
|
||||||
|
Negative time delta will adjust time into the past
|
||||||
|
"""
|
||||||
|
if self.time:
|
||||||
|
self.time += delta
|
||||||
|
|
||||||
|
def remove_time(self):
|
||||||
|
""" Will remove time metadata. """
|
||||||
|
self.time = None
|
||||||
|
|
||||||
def get_max_dilution_of_precision(self):
|
def get_max_dilution_of_precision(self):
|
||||||
"""
|
"""
|
||||||
Only care about the max dop for filtering, no need to go into too much detail
|
Only care about the max dop for filtering, no need to go into too much detail
|
||||||
"""
|
"""
|
||||||
return max(self.horizontal_dilution, self.vertical_dilution, self.position_dilution)
|
return max(self.horizontal_dilution, self.vertical_dilution, self.position_dilution)
|
||||||
|
|
||||||
def __hash__(self):
|
|
||||||
return mod_utils.hash_object(self, self.__slots__)
|
|
||||||
|
|
||||||
|
|
||||||
class GPXRoutePoint(mod_geo.Location):
|
class GPXRoutePoint(mod_geo.Location):
|
||||||
gpx_10_fields = GPX_10_POINT_FIELDS
|
gpx_10_fields = GPX_10_POINT_FIELDS
|
||||||
@ -276,7 +287,7 @@ class GPXRoutePoint(mod_geo.Location):
|
|||||||
self.age_of_dgps_data = None
|
self.age_of_dgps_data = None
|
||||||
self.dgps_id = None
|
self.dgps_id = None
|
||||||
self.link_type = None
|
self.link_type = None
|
||||||
self.extensions = None
|
self.extensions = []
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return '[rtept{%s}:%s,%s@%s]' % (self.name, self.latitude, self.longitude, self.elevation)
|
return '[rtept{%s}:%s,%s@%s]' % (self.name, self.latitude, self.longitude, self.elevation)
|
||||||
@ -290,8 +301,22 @@ class GPXRoutePoint(mod_geo.Location):
|
|||||||
representation += ', %s=%s' % (attribute, repr(value))
|
representation += ', %s=%s' % (attribute, repr(value))
|
||||||
return 'GPXRoutePoint(%s)' % representation
|
return 'GPXRoutePoint(%s)' % representation
|
||||||
|
|
||||||
def __hash__(self):
|
def adjust_time(self, delta):
|
||||||
return mod_utils.hash_object(self, self.__slots__)
|
"""
|
||||||
|
Adjusts the time of the point by the specified delta
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
delta : datetime.timedelta
|
||||||
|
Positive time delta will adjust time into the future
|
||||||
|
Negative time delta will adjust time into the past
|
||||||
|
"""
|
||||||
|
if self.time:
|
||||||
|
self.time += delta
|
||||||
|
|
||||||
|
def remove_time(self):
|
||||||
|
""" Will remove time metadata. """
|
||||||
|
self.time = None
|
||||||
|
|
||||||
|
|
||||||
class GPXRoute:
|
class GPXRoute:
|
||||||
@ -306,18 +331,19 @@ class GPXRoute:
|
|||||||
mod_gpxfield.GPXComplexField('points', tag='rtept', classs=GPXRoutePoint, is_list=True),
|
mod_gpxfield.GPXComplexField('points', tag='rtept', classs=GPXRoutePoint, is_list=True),
|
||||||
]
|
]
|
||||||
gpx_11_fields = [
|
gpx_11_fields = [
|
||||||
|
# See GPX for description of text fields
|
||||||
mod_gpxfield.GPXField('name'),
|
mod_gpxfield.GPXField('name'),
|
||||||
mod_gpxfield.GPXField('comment', 'cmt'),
|
mod_gpxfield.GPXField('comment', 'cmt'),
|
||||||
mod_gpxfield.GPXField('description', 'desc'),
|
mod_gpxfield.GPXField('description', 'desc'),
|
||||||
mod_gpxfield.GPXField('source', 'src'),
|
mod_gpxfield.GPXField('source', 'src'),
|
||||||
'link',
|
'link:@link',
|
||||||
mod_gpxfield.GPXField('link', attribute='href'),
|
mod_gpxfield.GPXField('link', attribute='href'),
|
||||||
mod_gpxfield.GPXField('link_text', tag='text'),
|
mod_gpxfield.GPXField('link_text', tag='text'),
|
||||||
mod_gpxfield.GPXField('link_type', tag='type'),
|
mod_gpxfield.GPXField('link_type', tag='type'),
|
||||||
'/link',
|
'/link',
|
||||||
mod_gpxfield.GPXField('number', type=mod_gpxfield.INT_TYPE),
|
mod_gpxfield.GPXField('number', type=mod_gpxfield.INT_TYPE),
|
||||||
mod_gpxfield.GPXField('type'),
|
mod_gpxfield.GPXField('type'),
|
||||||
mod_gpxfield.GPXExtensionsField('extensions'),
|
mod_gpxfield.GPXExtensionsField('extensions', is_list=True),
|
||||||
mod_gpxfield.GPXComplexField('points', tag='rtept', classs=GPXRoutePoint, is_list=True),
|
mod_gpxfield.GPXComplexField('points', tag='rtept', classs=GPXRoutePoint, is_list=True),
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -336,7 +362,25 @@ class GPXRoute:
|
|||||||
self.points = []
|
self.points = []
|
||||||
self.link_type = None
|
self.link_type = None
|
||||||
self.type = None
|
self.type = None
|
||||||
self.extensions = None
|
self.extensions = []
|
||||||
|
|
||||||
|
def adjust_time(self, delta):
|
||||||
|
"""
|
||||||
|
Adjusts the time of the all the points in the route by the specified delta.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
delta : datetime.timedelta
|
||||||
|
Positive time delta will adjust time into the future
|
||||||
|
Negative time delta will adjust time into the past
|
||||||
|
"""
|
||||||
|
for point in self.points:
|
||||||
|
point.adjust_time(delta)
|
||||||
|
|
||||||
|
def remove_time(self):
|
||||||
|
""" Removes time meta data from route. """
|
||||||
|
for point in self.points:
|
||||||
|
point.remove_time()
|
||||||
|
|
||||||
def remove_elevation(self):
|
def remove_elevation(self):
|
||||||
""" Removes elevation data from route """
|
""" Removes elevation data from route """
|
||||||
@ -430,9 +474,6 @@ class GPXRoute:
|
|||||||
for route_point in self.points:
|
for route_point in self.points:
|
||||||
route_point.move(location_delta)
|
route_point.move(location_delta)
|
||||||
|
|
||||||
def __hash__(self):
|
|
||||||
return mod_utils.hash_object(self, self.__slots__)
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
representation = ''
|
representation = ''
|
||||||
for attribute in 'name', 'description', 'number':
|
for attribute in 'name', 'description', 'number':
|
||||||
@ -483,7 +524,7 @@ class GPXTrackPoint(mod_geo.Location):
|
|||||||
self.position_dilution = position_dilution
|
self.position_dilution = position_dilution
|
||||||
self.age_of_dgps_data = None
|
self.age_of_dgps_data = None
|
||||||
self.dgps_id = None
|
self.dgps_id = None
|
||||||
self.extensions = None
|
self.extensions = []
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
representation = '%s, %s' % (self.latitude, self.longitude)
|
representation = '%s, %s' % (self.latitude, self.longitude)
|
||||||
@ -572,9 +613,6 @@ class GPXTrackPoint(mod_geo.Location):
|
|||||||
def __str__(self):
|
def __str__(self):
|
||||||
return '[trkpt:%s,%s@%s@%s]' % (self.latitude, self.longitude, self.elevation, self.time)
|
return '[trkpt:%s,%s@%s@%s]' % (self.latitude, self.longitude, self.elevation, self.time)
|
||||||
|
|
||||||
def __hash__(self):
|
|
||||||
return mod_utils.hash_object(self, self.__slots__)
|
|
||||||
|
|
||||||
|
|
||||||
class GPXTrackSegment:
|
class GPXTrackSegment:
|
||||||
gpx_10_fields = [
|
gpx_10_fields = [
|
||||||
@ -582,14 +620,14 @@ class GPXTrackSegment:
|
|||||||
]
|
]
|
||||||
gpx_11_fields = [
|
gpx_11_fields = [
|
||||||
mod_gpxfield.GPXComplexField('points', tag='trkpt', classs=GPXTrackPoint, is_list=True),
|
mod_gpxfield.GPXComplexField('points', tag='trkpt', classs=GPXTrackPoint, is_list=True),
|
||||||
mod_gpxfield.GPXExtensionsField('extensions'),
|
mod_gpxfield.GPXExtensionsField('extensions', is_list=True),
|
||||||
]
|
]
|
||||||
|
|
||||||
__slots__ = ('points', 'extensions', )
|
__slots__ = ('points', 'extensions', )
|
||||||
|
|
||||||
def __init__(self, points=None):
|
def __init__(self, points=None):
|
||||||
self.points = points if points else []
|
self.points = points if points else []
|
||||||
self.extensions = None
|
self.extensions = []
|
||||||
|
|
||||||
def simplify(self, max_distance=None):
|
def simplify(self, max_distance=None):
|
||||||
"""
|
"""
|
||||||
@ -776,9 +814,9 @@ class GPXTrackSegment:
|
|||||||
----------
|
----------
|
||||||
moving_data : MovingData : named tuple
|
moving_data : MovingData : named tuple
|
||||||
moving_time : float
|
moving_time : float
|
||||||
time (seconds) of segment in which movement was occuring
|
time (seconds) of segment in which movement was occurring
|
||||||
stopped_time : float
|
stopped_time : float
|
||||||
time (seconds) of segment in which no movement was occuring
|
time (seconds) of segment in which no movement was occurring
|
||||||
stopped_distance : float
|
stopped_distance : float
|
||||||
distance (meters) travelled during stopped times
|
distance (meters) travelled during stopped times
|
||||||
moving_distance : float
|
moving_distance : float
|
||||||
@ -805,7 +843,6 @@ class GPXTrackSegment:
|
|||||||
# Won't compute max_speed for first and last because of common GPS
|
# Won't compute max_speed for first and last because of common GPS
|
||||||
# recording errors, and because smoothing don't work well for those
|
# recording errors, and because smoothing don't work well for those
|
||||||
# points:
|
# points:
|
||||||
first_or_last = i in [0, 1, len(self.points) - 1]
|
|
||||||
if point.time and previous.time:
|
if point.time and previous.time:
|
||||||
timedelta = point.time - previous.time
|
timedelta = point.time - previous.time
|
||||||
|
|
||||||
@ -817,7 +854,7 @@ class GPXTrackSegment:
|
|||||||
seconds = mod_utils.total_seconds(timedelta)
|
seconds = mod_utils.total_seconds(timedelta)
|
||||||
speed_kmh = 0
|
speed_kmh = 0
|
||||||
if seconds > 0:
|
if seconds > 0:
|
||||||
# TODO: compute treshold in m/s instead this to kmh every time:
|
# TODO: compute threshold in m/s instead this to kmh every time:
|
||||||
speed_kmh = (distance / 1000.) / (mod_utils.total_seconds(timedelta) / 60. ** 2)
|
speed_kmh = (distance / 1000.) / (mod_utils.total_seconds(timedelta) / 60. ** 2)
|
||||||
|
|
||||||
#print speed, stopped_speed_threshold
|
#print speed, stopped_speed_threshold
|
||||||
@ -871,11 +908,11 @@ class GPXTrackSegment:
|
|||||||
min_latitude : float
|
min_latitude : float
|
||||||
Minimum latitude of segment in decimal degrees [-90, 90]
|
Minimum latitude of segment in decimal degrees [-90, 90]
|
||||||
max_latitude : float
|
max_latitude : float
|
||||||
Maxium latitude of segment in decimal degrees [-90, 90]
|
Maximum latitude of segment in decimal degrees [-90, 90]
|
||||||
min_longitude : float
|
min_longitude : float
|
||||||
Minium longitude of segment in decimal degrees [-180, 180]
|
Minimum longitude of segment in decimal degrees [-180, 180]
|
||||||
max_longitude : float
|
max_longitude : float
|
||||||
Maxium longitude of segment in decimal degrees [-180, 180]
|
Maximum longitude of segment in decimal degrees [-180, 180]
|
||||||
"""
|
"""
|
||||||
min_lat = None
|
min_lat = None
|
||||||
max_lat = None
|
max_lat = None
|
||||||
@ -915,11 +952,11 @@ class GPXTrackSegment:
|
|||||||
|
|
||||||
if 0 < point_no < len(self.points):
|
if 0 < point_no < len(self.points):
|
||||||
previous_point = self.points[point_no - 1]
|
previous_point = self.points[point_no - 1]
|
||||||
if 0 < point_no < len(self.points) - 1:
|
if 0 <= point_no < len(self.points) - 1:
|
||||||
next_point = self.points[point_no + 1]
|
next_point = self.points[point_no + 1]
|
||||||
|
|
||||||
#mod_logging.debug('previous: %s' % previous_point)
|
#log.debug('previous: %s' % previous_point)
|
||||||
#mod_logging.debug('next: %s' % next_point)
|
#log.debug('next: %s' % next_point)
|
||||||
|
|
||||||
speed_1 = point.speed_between(previous_point)
|
speed_1 = point.speed_between(previous_point)
|
||||||
speed_2 = point.speed_between(next_point)
|
speed_2 = point.speed_between(next_point)
|
||||||
@ -946,7 +983,7 @@ class GPXTrackSegment:
|
|||||||
delta : float
|
delta : float
|
||||||
Elevation delta in meters to apply to track
|
Elevation delta in meters to apply to track
|
||||||
"""
|
"""
|
||||||
mod_logging.debug('delta = %s' % delta)
|
log.debug('delta = %s' % delta)
|
||||||
|
|
||||||
if not delta:
|
if not delta:
|
||||||
return
|
return
|
||||||
@ -1039,11 +1076,11 @@ class GPXTrackSegment:
|
|||||||
last = self.points[-2]
|
last = self.points[-2]
|
||||||
|
|
||||||
if not last.time or not first.time:
|
if not last.time or not first.time:
|
||||||
mod_logging.debug('Can\'t find time')
|
log.debug('Can\'t find time')
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if last.time < first.time:
|
if last.time < first.time:
|
||||||
mod_logging.debug('Not enough time data')
|
log.debug('Not enough time data')
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return mod_utils.total_seconds(last.time - first.time)
|
return mod_utils.total_seconds(last.time - first.time)
|
||||||
@ -1111,11 +1148,11 @@ class GPXTrackSegment:
|
|||||||
last_time = self.points[-1].time
|
last_time = self.points[-1].time
|
||||||
|
|
||||||
if not first_time and not last_time:
|
if not first_time and not last_time:
|
||||||
mod_logging.debug('No times for track segment')
|
log.debug('No times for track segment')
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if not first_time <= time <= last_time:
|
if not first_time <= time <= last_time:
|
||||||
mod_logging.debug('Not in track (search for:%s, start:%s, end:%s)' % (time, first_time, last_time))
|
log.debug('Not in track (search for:%s, start:%s, end:%s)' % (time, first_time, last_time))
|
||||||
return None
|
return None
|
||||||
|
|
||||||
for point in self.points:
|
for point in self.points:
|
||||||
@ -1296,8 +1333,6 @@ class GPXTrackSegment:
|
|||||||
|
|
||||||
return len(self.points) > 2 and float(found) / float(len(self.points)) > .75
|
return len(self.points) > 2 and float(found) / float(len(self.points)) > .75
|
||||||
|
|
||||||
def __hash__(self):
|
|
||||||
return mod_utils.hash_object(self, self.__slots__)
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return 'GPXTrackSegment(points=[%s])' % ('...' if self.points else '')
|
return 'GPXTrackSegment(points=[%s])' % ('...' if self.points else '')
|
||||||
@ -1318,18 +1353,19 @@ class GPXTrack:
|
|||||||
mod_gpxfield.GPXComplexField('segments', tag='trkseg', classs=GPXTrackSegment, is_list=True),
|
mod_gpxfield.GPXComplexField('segments', tag='trkseg', classs=GPXTrackSegment, is_list=True),
|
||||||
]
|
]
|
||||||
gpx_11_fields = [
|
gpx_11_fields = [
|
||||||
|
# See GPX for text field description
|
||||||
mod_gpxfield.GPXField('name'),
|
mod_gpxfield.GPXField('name'),
|
||||||
mod_gpxfield.GPXField('comment', 'cmt'),
|
mod_gpxfield.GPXField('comment', 'cmt'),
|
||||||
mod_gpxfield.GPXField('description', 'desc'),
|
mod_gpxfield.GPXField('description', 'desc'),
|
||||||
mod_gpxfield.GPXField('source', 'src'),
|
mod_gpxfield.GPXField('source', 'src'),
|
||||||
'link',
|
'link:@link',
|
||||||
mod_gpxfield.GPXField('link', attribute='href'),
|
mod_gpxfield.GPXField('link', attribute='href'),
|
||||||
mod_gpxfield.GPXField('link_text', tag='text'),
|
mod_gpxfield.GPXField('link_text', tag='text'),
|
||||||
mod_gpxfield.GPXField('link_type', tag='type'),
|
mod_gpxfield.GPXField('link_type', tag='type'),
|
||||||
'/link',
|
'/link',
|
||||||
mod_gpxfield.GPXField('number', type=mod_gpxfield.INT_TYPE),
|
mod_gpxfield.GPXField('number', type=mod_gpxfield.INT_TYPE),
|
||||||
mod_gpxfield.GPXField('type'),
|
mod_gpxfield.GPXField('type'),
|
||||||
mod_gpxfield.GPXExtensionsField('extensions'),
|
mod_gpxfield.GPXExtensionsField('extensions', is_list=True),
|
||||||
mod_gpxfield.GPXComplexField('segments', tag='trkseg', classs=GPXTrackSegment, is_list=True),
|
mod_gpxfield.GPXComplexField('segments', tag='trkseg', classs=GPXTrackSegment, is_list=True),
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -1348,7 +1384,7 @@ class GPXTrack:
|
|||||||
self.segments = []
|
self.segments = []
|
||||||
self.link_type = None
|
self.link_type = None
|
||||||
self.type = None
|
self.type = None
|
||||||
self.extensions = None
|
self.extensions = []
|
||||||
|
|
||||||
def simplify(self, max_distance=None):
|
def simplify(self, max_distance=None):
|
||||||
"""
|
"""
|
||||||
@ -1455,11 +1491,11 @@ class GPXTrack:
|
|||||||
min_latitude : float
|
min_latitude : float
|
||||||
Minimum latitude of track in decimal degrees [-90, 90]
|
Minimum latitude of track in decimal degrees [-90, 90]
|
||||||
max_latitude : float
|
max_latitude : float
|
||||||
Maxium latitude of track in decimal degrees [-90, 90]
|
Maximum latitude of track in decimal degrees [-90, 90]
|
||||||
min_longitude : float
|
min_longitude : float
|
||||||
Minium longitude of track in decimal degrees [-180, 180]
|
Minimum longitude of track in decimal degrees [-180, 180]
|
||||||
max_longitude : float
|
max_longitude : float
|
||||||
Maxium longitude of track in decimal degrees [-180, 180]
|
Maximum longitude of track in decimal degrees [-180, 180]
|
||||||
"""
|
"""
|
||||||
min_lat = None
|
min_lat = None
|
||||||
max_lat = None
|
max_lat = None
|
||||||
@ -1618,9 +1654,9 @@ class GPXTrack:
|
|||||||
----------
|
----------
|
||||||
moving_data : MovingData : named tuple
|
moving_data : MovingData : named tuple
|
||||||
moving_time : float
|
moving_time : float
|
||||||
time (seconds) of track in which movement was occuring
|
time (seconds) of track in which movement was occurring
|
||||||
stopped_time : float
|
stopped_time : float
|
||||||
time (seconds) of track in which no movement was occuring
|
time (seconds) of track in which no movement was occurring
|
||||||
stopped_distance : float
|
stopped_distance : float
|
||||||
distance (meters) travelled during stopped times
|
distance (meters) travelled during stopped times
|
||||||
moving_distance : float
|
moving_distance : float
|
||||||
@ -1853,8 +1889,6 @@ class GPXTrack:
|
|||||||
def clone(self):
|
def clone(self):
|
||||||
return mod_copy.deepcopy(self)
|
return mod_copy.deepcopy(self)
|
||||||
|
|
||||||
def __hash__(self):
|
|
||||||
return mod_utils.hash_object(self, self.__slots__)
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
representation = ''
|
representation = ''
|
||||||
@ -1883,27 +1917,34 @@ class GPX:
|
|||||||
mod_gpxfield.GPXComplexField('routes', classs=GPXRoute, tag='rte', is_list=True),
|
mod_gpxfield.GPXComplexField('routes', classs=GPXRoute, tag='rte', is_list=True),
|
||||||
mod_gpxfield.GPXComplexField('tracks', classs=GPXTrack, tag='trk', is_list=True),
|
mod_gpxfield.GPXComplexField('tracks', classs=GPXTrack, tag='trk', is_list=True),
|
||||||
]
|
]
|
||||||
|
# Text fields serialize as empty container tags, dependents are
|
||||||
|
# are listed after as 'tag:dep1:dep2:dep3'. If no dependents are
|
||||||
|
# listed, it will always serialize. The container is closed with
|
||||||
|
# '/tag'. Required dependents are preceded by an @. If a required
|
||||||
|
# dependent is empty, nothing in the container will serialize. The
|
||||||
|
# format is 'tag:@dep2'. No optional dependents need to be listed.
|
||||||
|
# Extensions not yet supported
|
||||||
gpx_11_fields = [
|
gpx_11_fields = [
|
||||||
mod_gpxfield.GPXField('version', attribute=True),
|
mod_gpxfield.GPXField('version', attribute=True),
|
||||||
mod_gpxfield.GPXField('creator', attribute=True),
|
mod_gpxfield.GPXField('creator', attribute=True),
|
||||||
'metadata',
|
'metadata:name:description:author_name:author_email:author_link:copyright_author:copyright_year:copyright_license:link:time:keywords:bounds',
|
||||||
mod_gpxfield.GPXField('name', 'name'),
|
mod_gpxfield.GPXField('name', 'name'),
|
||||||
mod_gpxfield.GPXField('description', 'desc'),
|
mod_gpxfield.GPXField('description', 'desc'),
|
||||||
'author',
|
'author:author_name:author_email:author_link',
|
||||||
mod_gpxfield.GPXField('author_name', 'name'),
|
mod_gpxfield.GPXField('author_name', 'name'),
|
||||||
mod_gpxfield.GPXEmailField('author_email', 'email'),
|
mod_gpxfield.GPXEmailField('author_email', 'email'),
|
||||||
'link',
|
'link:@author_link',
|
||||||
mod_gpxfield.GPXField('author_link', attribute='href'),
|
mod_gpxfield.GPXField('author_link', attribute='href'),
|
||||||
mod_gpxfield.GPXField('author_link_text', tag='text'),
|
mod_gpxfield.GPXField('author_link_text', tag='text'),
|
||||||
mod_gpxfield.GPXField('author_link_type', tag='type'),
|
mod_gpxfield.GPXField('author_link_type', tag='type'),
|
||||||
'/link',
|
'/link',
|
||||||
'/author',
|
'/author',
|
||||||
'copyright',
|
'copyright:copyright_author:copyright_year:copyright_license',
|
||||||
mod_gpxfield.GPXField('copyright_author', attribute='author'),
|
mod_gpxfield.GPXField('copyright_author', attribute='author'),
|
||||||
mod_gpxfield.GPXField('copyright_year', tag='year'),
|
mod_gpxfield.GPXField('copyright_year', tag='year'),
|
||||||
mod_gpxfield.GPXField('copyright_license', tag='license'),
|
mod_gpxfield.GPXField('copyright_license', tag='license'),
|
||||||
'/copyright',
|
'/copyright',
|
||||||
'link',
|
'link:@link',
|
||||||
mod_gpxfield.GPXField('link', attribute='href'),
|
mod_gpxfield.GPXField('link', attribute='href'),
|
||||||
mod_gpxfield.GPXField('link_text', tag='text'),
|
mod_gpxfield.GPXField('link_text', tag='text'),
|
||||||
mod_gpxfield.GPXField('link_type', tag='type'),
|
mod_gpxfield.GPXField('link_type', tag='type'),
|
||||||
@ -1916,7 +1957,7 @@ class GPX:
|
|||||||
mod_gpxfield.GPXComplexField('waypoints', classs=GPXWaypoint, tag='wpt', is_list=True),
|
mod_gpxfield.GPXComplexField('waypoints', classs=GPXWaypoint, tag='wpt', is_list=True),
|
||||||
mod_gpxfield.GPXComplexField('routes', classs=GPXRoute, tag='rte', is_list=True),
|
mod_gpxfield.GPXComplexField('routes', classs=GPXRoute, tag='rte', is_list=True),
|
||||||
mod_gpxfield.GPXComplexField('tracks', classs=GPXTrack, tag='trk', is_list=True),
|
mod_gpxfield.GPXComplexField('tracks', classs=GPXTrack, tag='trk', is_list=True),
|
||||||
mod_gpxfield.GPXExtensionsField('extensions'),
|
mod_gpxfield.GPXExtensionsField('extensions', is_list=True),
|
||||||
]
|
]
|
||||||
|
|
||||||
__slots__ = ('version', 'creator', 'name', 'description', 'author_name',
|
__slots__ = ('version', 'creator', 'name', 'description', 'author_name',
|
||||||
@ -1924,7 +1965,8 @@ class GPX:
|
|||||||
'bounds', 'waypoints', 'routes', 'tracks', 'author_link',
|
'bounds', 'waypoints', 'routes', 'tracks', 'author_link',
|
||||||
'author_link_text', 'author_link_type', 'copyright_author',
|
'author_link_text', 'author_link_type', 'copyright_author',
|
||||||
'copyright_year', 'copyright_license', 'link_type',
|
'copyright_year', 'copyright_license', 'link_type',
|
||||||
'metadata_extensions', 'extensions')
|
'metadata_extensions', 'extensions', 'nsmap',
|
||||||
|
'schema_locations')
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.version = None
|
self.version = None
|
||||||
@ -1945,11 +1987,13 @@ class GPX:
|
|||||||
self.copyright_author = None
|
self.copyright_author = None
|
||||||
self.copyright_year = None
|
self.copyright_year = None
|
||||||
self.copyright_license = None
|
self.copyright_license = None
|
||||||
self.metadata_extensions = None
|
self.metadata_extensions = []
|
||||||
self.extensions = None
|
self.extensions = []
|
||||||
self.waypoints = []
|
self.waypoints = []
|
||||||
self.routes = []
|
self.routes = []
|
||||||
self.tracks = []
|
self.tracks = []
|
||||||
|
self.nsmap = {}
|
||||||
|
self.schema_locations = []
|
||||||
|
|
||||||
def simplify(self, max_distance=None):
|
def simplify(self, max_distance=None):
|
||||||
"""
|
"""
|
||||||
@ -1993,29 +2037,54 @@ class GPX:
|
|||||||
track.reduce_points(min_distance)
|
track.reduce_points(min_distance)
|
||||||
|
|
||||||
# TODO
|
# TODO
|
||||||
mod_logging.debug('Track reduced to %s points' % self.get_track_points_no())
|
log.debug('Track reduced to %s points' % self.get_track_points_no())
|
||||||
|
|
||||||
def adjust_time(self, delta):
|
def adjust_time(self, delta, all=False):
|
||||||
"""
|
"""
|
||||||
Adjusts the time of all points in all of the segments of all tracks by
|
Adjusts the time of all points in all of the segments of all tracks by
|
||||||
the specified delta.
|
the specified delta.
|
||||||
|
|
||||||
|
If all=True, waypoints and routes will also be adjusted by the specified delta.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
delta : datetime.timedelta
|
delta : datetime.timedelta
|
||||||
Positive time delta will adjust times into the future
|
Positive time delta will adjust times into the future
|
||||||
Negative time delta will adjust times into the past
|
Negative time delta will adjust times into the past
|
||||||
|
all : bool
|
||||||
|
When true, also adjusts time for waypoints and routes.
|
||||||
"""
|
"""
|
||||||
if self.time:
|
if self.time:
|
||||||
self.time += delta
|
self.time += delta
|
||||||
for track in self.tracks:
|
for track in self.tracks:
|
||||||
track.adjust_time(delta)
|
track.adjust_time(delta)
|
||||||
|
|
||||||
def remove_time(self):
|
if all:
|
||||||
""" Removes time data. """
|
for waypoint in self.waypoints:
|
||||||
|
waypoint.adjust_time(delta)
|
||||||
|
for route in self.routes:
|
||||||
|
route.adjust_time(delta)
|
||||||
|
|
||||||
|
def remove_time(self, all=False):
|
||||||
|
"""
|
||||||
|
Removes time data of all points in all of the segments of all tracks.
|
||||||
|
|
||||||
|
If all=True, time date will also be removed from waypoints and routes.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
all : bool
|
||||||
|
When true, also removes time data for waypoints and routes.
|
||||||
|
"""
|
||||||
for track in self.tracks:
|
for track in self.tracks:
|
||||||
track.remove_time()
|
track.remove_time()
|
||||||
|
|
||||||
|
if all:
|
||||||
|
for waypoint in self.waypoints:
|
||||||
|
waypoint.remove_time()
|
||||||
|
for route in self.routes:
|
||||||
|
route.remove_time()
|
||||||
|
|
||||||
def remove_elevation(self, tracks=True, routes=False, waypoints=False):
|
def remove_elevation(self, tracks=True, routes=False, waypoints=False):
|
||||||
""" Removes elevation data. """
|
""" Removes elevation data. """
|
||||||
if tracks:
|
if tracks:
|
||||||
@ -2062,11 +2131,11 @@ class GPX:
|
|||||||
min_latitude : float
|
min_latitude : float
|
||||||
Minimum latitude of track in decimal degrees [-90, 90]
|
Minimum latitude of track in decimal degrees [-90, 90]
|
||||||
max_latitude : float
|
max_latitude : float
|
||||||
Maxium latitude of track in decimal degrees [-90, 90]
|
Maximum latitude of track in decimal degrees [-90, 90]
|
||||||
min_longitude : float
|
min_longitude : float
|
||||||
Minium longitude of track in decimal degrees [-180, 180]
|
Minimum longitude of track in decimal degrees [-180, 180]
|
||||||
max_longitude : float
|
max_longitude : float
|
||||||
Maxium longitude of track in decimal degrees [-180, 180]
|
Maximum longitude of track in decimal degrees [-180, 180]
|
||||||
"""
|
"""
|
||||||
min_lat = None
|
min_lat = None
|
||||||
max_lat = None
|
max_lat = None
|
||||||
@ -2191,7 +2260,7 @@ class GPX:
|
|||||||
def length_2d(self):
|
def length_2d(self):
|
||||||
"""
|
"""
|
||||||
Computes 2-dimensional length of the GPX file (only latitude and
|
Computes 2-dimensional length of the GPX file (only latitude and
|
||||||
longitude, no elevation). This is the sum of 3D length of all segments
|
longitude, no elevation). This is the sum of 2D length of all segments
|
||||||
in all tracks.
|
in all tracks.
|
||||||
|
|
||||||
Returns
|
Returns
|
||||||
@ -2202,7 +2271,7 @@ class GPX:
|
|||||||
result = 0
|
result = 0
|
||||||
for track in self.tracks:
|
for track in self.tracks:
|
||||||
length = track.length_2d()
|
length = track.length_2d()
|
||||||
if length or length == 0:
|
if length:
|
||||||
result += length
|
result += length
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@ -2219,7 +2288,7 @@ class GPX:
|
|||||||
result = 0
|
result = 0
|
||||||
for track in self.tracks:
|
for track in self.tracks:
|
||||||
length = track.length_3d()
|
length = track.length_3d()
|
||||||
if length or length == 0:
|
if length:
|
||||||
result += length
|
result += length
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@ -2392,7 +2461,7 @@ class GPX:
|
|||||||
Returns a list of locations of elements like
|
Returns a list of locations of elements like
|
||||||
consisting of points where the location may be on the track
|
consisting of points where the location may be on the track
|
||||||
|
|
||||||
threshold_distance is the the minimum distance from the track
|
threshold_distance is the minimum distance from the track
|
||||||
so that the point *may* be counted as to be "on the track".
|
so that the point *may* be counted as to be "on the track".
|
||||||
For example 0.01 means 1% of the track distance.
|
For example 0.01 means 1% of the track distance.
|
||||||
"""
|
"""
|
||||||
@ -2484,10 +2553,10 @@ class GPX:
|
|||||||
|
|
||||||
def add_missing_elevations(self):
|
def add_missing_elevations(self):
|
||||||
def _add(interval, start, end, distances_ratios):
|
def _add(interval, start, end, distances_ratios):
|
||||||
|
if (start.elevation is None) or (end.elevation is None):
|
||||||
|
return
|
||||||
assert start
|
assert start
|
||||||
assert end
|
assert end
|
||||||
assert start.elevation is not None
|
|
||||||
assert end.elevation is not None
|
|
||||||
assert interval
|
assert interval
|
||||||
assert len(interval) == len(distances_ratios)
|
assert len(interval) == len(distances_ratios)
|
||||||
for i in range(len(interval)):
|
for i in range(len(interval)):
|
||||||
@ -2534,7 +2603,7 @@ class GPX:
|
|||||||
time_dist_after = (interval[-1].time_difference(end),
|
time_dist_after = (interval[-1].time_difference(end),
|
||||||
interval[-1].distance_3d(end))
|
interval[-1].distance_3d(end))
|
||||||
|
|
||||||
# Assemble list of times and distance to neighboring points
|
# Assemble list of times and distance to neighbour points
|
||||||
times_dists = [(interval[i].time_difference(interval[i+1]),
|
times_dists = [(interval[i].time_difference(interval[i+1]),
|
||||||
interval[i].distance_3d(interval[i+1]))
|
interval[i].distance_3d(interval[i+1]))
|
||||||
for i in range(len(interval) - 1)]
|
for i in range(len(interval) - 1)]
|
||||||
@ -2549,6 +2618,54 @@ class GPX:
|
|||||||
self.add_missing_data(get_data_function=lambda point: point.speed,
|
self.add_missing_data(get_data_function=lambda point: point.speed,
|
||||||
add_missing_function=_add)
|
add_missing_function=_add)
|
||||||
|
|
||||||
|
def fill_time_data_with_regular_intervals(self, start_time=None, time_delta=None, end_time=None, force=True):
|
||||||
|
"""
|
||||||
|
Fills the time data for all points in the GPX file. At least two of the parameters start_time, time_delta, and
|
||||||
|
end_time have to be provided. If the three are provided, time_delta will be ignored and will be recalculated
|
||||||
|
using start_time and end_time.
|
||||||
|
|
||||||
|
The first GPX point will have a time equal to start_time. Then points are assumed to be recorded at regular
|
||||||
|
intervals time_delta.
|
||||||
|
|
||||||
|
If the GPX file currently contains time data, it will be overwritten, unless the force flag is set to False, in
|
||||||
|
which case the function will return a GPXException error.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
start_time: datetime.datetime object
|
||||||
|
Start time of the GPX file (corresponds to the time of the first point)
|
||||||
|
time_delta: datetime.timedelta object
|
||||||
|
Time interval between two points in the GPX file
|
||||||
|
end_time: datetime.datetime object
|
||||||
|
End time of the GPX file (corresponds to the time of the last point)
|
||||||
|
force: bool
|
||||||
|
Overwrite current data if the GPX file currently contains time data
|
||||||
|
"""
|
||||||
|
if not (start_time and end_time) and not (start_time and time_delta) and not (time_delta and end_time):
|
||||||
|
raise GPXException('You must provide at least two parameters among start_time, time_step, and end_time')
|
||||||
|
|
||||||
|
if self.has_times() and not force:
|
||||||
|
raise GPXException('GPX file currently contains time data. Use force=True to overwrite.')
|
||||||
|
|
||||||
|
point_no = self.get_points_no()
|
||||||
|
|
||||||
|
if start_time and end_time:
|
||||||
|
if start_time > end_time:
|
||||||
|
raise GPXException('Invalid parameters: end_time must occur after start_time')
|
||||||
|
time_delta = (end_time - start_time) / (point_no - 1)
|
||||||
|
elif not start_time:
|
||||||
|
start_time = end_time - (point_no - 1) * time_delta
|
||||||
|
|
||||||
|
self.time = start_time
|
||||||
|
|
||||||
|
i = 0
|
||||||
|
for point in self.walk(only_points=True):
|
||||||
|
if i == 0:
|
||||||
|
point.time = start_time
|
||||||
|
else:
|
||||||
|
point.time = start_time + i * time_delta
|
||||||
|
i += 1
|
||||||
|
|
||||||
def move(self, location_delta):
|
def move(self, location_delta):
|
||||||
"""
|
"""
|
||||||
Moves each point in the gpx file (routes, waypoints, tracks).
|
Moves each point in the gpx file (routes, waypoints, tracks).
|
||||||
@ -2567,7 +2684,7 @@ class GPX:
|
|||||||
for track in self.tracks:
|
for track in self.tracks:
|
||||||
track.move(location_delta)
|
track.move(location_delta)
|
||||||
|
|
||||||
def to_xml(self, version=None):
|
def to_xml(self, version=None, prettyprint=True):
|
||||||
"""
|
"""
|
||||||
FIXME: Note, this method will change self.version
|
FIXME: Note, this method will change self.version
|
||||||
"""
|
"""
|
||||||
@ -2575,7 +2692,7 @@ class GPX:
|
|||||||
if self.version:
|
if self.version:
|
||||||
version = self.version
|
version = self.version
|
||||||
else:
|
else:
|
||||||
version = '1.0'
|
version = '1.1'
|
||||||
|
|
||||||
if version != '1.0' and version != '1.1':
|
if version != '1.0' and version != '1.1':
|
||||||
raise GPXException('Invalid version %s' % version)
|
raise GPXException('Invalid version %s' % version)
|
||||||
@ -2584,21 +2701,33 @@ class GPX:
|
|||||||
if not self.creator:
|
if not self.creator:
|
||||||
self.creator = 'gpx.py -- https://github.com/tkrajina/gpxpy'
|
self.creator = 'gpx.py -- https://github.com/tkrajina/gpxpy'
|
||||||
|
|
||||||
v = version.replace('.', '/')
|
self.nsmap['xsi'] = 'http://www.w3.org/2001/XMLSchema-instance'
|
||||||
xml_attributes = {
|
|
||||||
'xmlns:xsi': 'http://www.w3.org/2001/XMLSchema-instance',
|
|
||||||
'xmlns': 'http://www.topografix.com/GPX/%s' % v,
|
|
||||||
'xsi:schemaLocation': 'http://www.topografix.com/GPX/%s http://www.topografix.com/GPX/%s/gpx.xsd' % (v, v)
|
|
||||||
}
|
|
||||||
|
|
||||||
content = mod_gpxfield.gpx_fields_to_xml(self, 'gpx', version, custom_attributes=xml_attributes)
|
version_path = version.replace('.', '/')
|
||||||
|
|
||||||
|
self.nsmap['defaultns'] = 'http://www.topografix.com/GPX/{0}'.format(
|
||||||
|
version_path
|
||||||
|
)
|
||||||
|
|
||||||
|
if not self.schema_locations:
|
||||||
|
self.schema_locations = [
|
||||||
|
p.format(version_path) for p in (
|
||||||
|
'http://www.topografix.com/GPX/{0}',
|
||||||
|
'http://www.topografix.com/GPX/{0}/gpx.xsd',
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
content = mod_gpxfield.gpx_fields_to_xml(
|
||||||
|
self, 'gpx', version,
|
||||||
|
custom_attributes={
|
||||||
|
'xsi:schemaLocation': ' '.join(self.schema_locations)
|
||||||
|
},
|
||||||
|
nsmap=self.nsmap,
|
||||||
|
prettyprint=prettyprint
|
||||||
|
)
|
||||||
|
|
||||||
return '<?xml version="1.0" encoding="UTF-8"?>\n' + content.strip()
|
return '<?xml version="1.0" encoding="UTF-8"?>\n' + content.strip()
|
||||||
|
|
||||||
def smooth(self, vertical=True, horizontal=False, remove_extremes=False):
|
|
||||||
for track in self.tracks:
|
|
||||||
track.smooth(vertical, horizontal, remove_extremes)
|
|
||||||
|
|
||||||
def has_times(self):
|
def has_times(self):
|
||||||
""" See GPXTrackSegment.has_times() """
|
""" See GPXTrackSegment.has_times() """
|
||||||
if not self.tracks:
|
if not self.tracks:
|
||||||
@ -2621,9 +2750,6 @@ class GPX:
|
|||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def __hash__(self):
|
|
||||||
return mod_utils.hash_object(self, self.__slots__)
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
representation = ''
|
representation = ''
|
||||||
for attribute in 'waypoints', 'routes', 'tracks':
|
for attribute in 'waypoints', 'routes', 'tracks':
|
||||||
|
@ -16,6 +16,8 @@
|
|||||||
|
|
||||||
import inspect as mod_inspect
|
import inspect as mod_inspect
|
||||||
import datetime as mod_datetime
|
import datetime as mod_datetime
|
||||||
|
import re as mod_re
|
||||||
|
import copy as mod_copy
|
||||||
|
|
||||||
from . import utils as mod_utils
|
from . import utils as mod_utils
|
||||||
|
|
||||||
@ -26,20 +28,78 @@ class GPXFieldTypeConverter:
|
|||||||
self.to_string = to_string
|
self.to_string = to_string
|
||||||
|
|
||||||
|
|
||||||
|
RE_TIMESTAMP = mod_re.compile(
|
||||||
|
r'^([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})[T ]([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2})'
|
||||||
|
r'(\.[0-9]{1,8})?(Z|[+-−][0-9]{2}:?(?:[0-9]{2})?)?$')
|
||||||
|
|
||||||
|
|
||||||
|
class SimpleTZ(mod_datetime.tzinfo):
|
||||||
|
__slots__ = ('offset',)
|
||||||
|
|
||||||
|
def __init__(self, s=None):
|
||||||
|
self.offset = 0
|
||||||
|
if s and len(s) >= 2:
|
||||||
|
if s[0] in ('−', '-'):
|
||||||
|
mult = -1
|
||||||
|
s = s[1:]
|
||||||
|
else:
|
||||||
|
if s[0] == '+':
|
||||||
|
s = s[1:]
|
||||||
|
mult = 1
|
||||||
|
hour = int(s[:2]) if s[:2].isdigit() else 0
|
||||||
|
if len(s) >= 4:
|
||||||
|
minute = int(s[-2:]) if s[-2:].isdigit() else 0
|
||||||
|
else:
|
||||||
|
minute = 0
|
||||||
|
self.offset = mult * (hour * 60 + minute)
|
||||||
|
|
||||||
|
def utcoffset(self, dt):
|
||||||
|
return mod_datetime.timedelta(minutes=self.offset)
|
||||||
|
|
||||||
|
def dst(self, dt):
|
||||||
|
return mod_datetime.timedelta(0)
|
||||||
|
|
||||||
|
def tzname(self, dt):
|
||||||
|
if self.offset == 0:
|
||||||
|
return 'Z'
|
||||||
|
return '{:02}:{:02}'.format(self.offset // 60, self.offset % 60)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'SimpleTZ("{}")'.format(self.tzname(None))
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return self.offset == other.offset
|
||||||
|
|
||||||
|
|
||||||
def parse_time(string):
|
def parse_time(string):
|
||||||
from . import gpx as mod_gpx
|
from . import gpx as mod_gpx
|
||||||
if not string:
|
if not string:
|
||||||
return None
|
return None
|
||||||
if 'T' in string:
|
m = RE_TIMESTAMP.match(string)
|
||||||
string = string.replace('T', ' ')
|
if m:
|
||||||
if 'Z' in string:
|
dt = [int(m.group(i)) for i in range(1, 7)]
|
||||||
string = string.replace('Z', '')
|
if m.group(7):
|
||||||
for date_format in mod_gpx.DATE_FORMATS:
|
f = m.group(7)[1:7]
|
||||||
try:
|
dt.append(int(f + "0" * (6 - len(f))))
|
||||||
return mod_datetime.datetime.strptime(string, date_format)
|
else:
|
||||||
except ValueError as e:
|
dt.append(0)
|
||||||
pass
|
dt.append(SimpleTZ(m.group(8)))
|
||||||
raise GPXException('Invalid time: %s' % string)
|
return mod_datetime.datetime(*dt)
|
||||||
|
raise mod_gpx.GPXException('Invalid time: {0}'.format(string))
|
||||||
|
|
||||||
|
|
||||||
|
def format_time(time):
|
||||||
|
offset = time.utcoffset()
|
||||||
|
if not offset or offset == 0:
|
||||||
|
tz = 'Z'
|
||||||
|
else:
|
||||||
|
tz = time.strftime('%z')
|
||||||
|
if time.microsecond:
|
||||||
|
ms = time.strftime('.%f')
|
||||||
|
else:
|
||||||
|
ms = ''
|
||||||
|
return ''.join((time.strftime('%Y-%m-%dT%H:%M:%S'), ms, tz))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# ----------------------------------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------------------------------
|
||||||
@ -50,33 +110,24 @@ def parse_time(string):
|
|||||||
class FloatConverter:
|
class FloatConverter:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.from_string = lambda string : None if string is None else float(string.strip())
|
self.from_string = lambda string : None if string is None else float(string.strip())
|
||||||
self.to_string = lambda flt : str(flt)
|
self.to_string = lambda flt : mod_utils.make_str(flt)
|
||||||
|
|
||||||
|
|
||||||
class IntConverter:
|
class IntConverter:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.from_string = lambda string : None if string is None else int(string.strip())
|
self.from_string = lambda string: None if string is None else int(string.strip())
|
||||||
self.to_string = lambda flt : str(flt)
|
self.to_string = lambda flt: str(flt)
|
||||||
|
|
||||||
|
|
||||||
class TimeConverter:
|
class TimeConverter:
|
||||||
def from_string(self, string):
|
def from_string(self, string):
|
||||||
from . import gpx as mod_gpx
|
|
||||||
if not string:
|
|
||||||
return None
|
|
||||||
if 'T' in string:
|
|
||||||
string = string.replace('T', ' ')
|
|
||||||
if 'Z' in string:
|
|
||||||
string = string.replace('Z', '')
|
|
||||||
for date_format in mod_gpx.DATE_FORMATS:
|
|
||||||
try:
|
try:
|
||||||
return mod_datetime.datetime.strptime(string, date_format)
|
return parse_time(string)
|
||||||
except ValueError as e:
|
except:
|
||||||
pass
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def to_string(self, time):
|
def to_string(self, time):
|
||||||
from . import gpx as mod_gpx
|
return format_time(time) if time else None
|
||||||
return time.strftime(mod_gpx.DATE_FORMAT) if time else None
|
|
||||||
|
|
||||||
|
|
||||||
INT_TYPE = IntConverter()
|
INT_TYPE = IntConverter()
|
||||||
@ -95,10 +146,10 @@ class AbstractGPXField:
|
|||||||
self.is_list = is_list
|
self.is_list = is_list
|
||||||
self.attribute = False
|
self.attribute = False
|
||||||
|
|
||||||
def from_xml(self, parser, node, version):
|
def from_xml(self, node, version):
|
||||||
raise Exception('Not implemented')
|
raise Exception('Not implemented')
|
||||||
|
|
||||||
def to_xml(self, value, version):
|
def to_xml(self, value, version, nsmap):
|
||||||
raise Exception('Not implemented')
|
raise Exception('Not implemented')
|
||||||
|
|
||||||
|
|
||||||
@ -106,11 +157,13 @@ class GPXField(AbstractGPXField):
|
|||||||
"""
|
"""
|
||||||
Used for to (de)serialize fields with simple field<->xml_tag mapping.
|
Used for to (de)serialize fields with simple field<->xml_tag mapping.
|
||||||
"""
|
"""
|
||||||
def __init__(self, name, tag=None, attribute=None, type=None, possible=None, mandatory=None):
|
def __init__(self, name, tag=None, attribute=None, type=None,
|
||||||
|
possible=None, mandatory=None):
|
||||||
AbstractGPXField.__init__(self)
|
AbstractGPXField.__init__(self)
|
||||||
self.name = name
|
self.name = name
|
||||||
if tag and attribute:
|
if tag and attribute:
|
||||||
raise GPXException('Only tag *or* attribute may be given!')
|
from . import gpx as mod_gpx
|
||||||
|
raise mod_gpx.GPXException('Only tag *or* attribute may be given!')
|
||||||
if attribute:
|
if attribute:
|
||||||
self.tag = None
|
self.tag = None
|
||||||
self.attribute = name if attribute is True else attribute
|
self.attribute = name if attribute is True else attribute
|
||||||
@ -124,17 +177,20 @@ class GPXField(AbstractGPXField):
|
|||||||
self.possible = possible
|
self.possible = possible
|
||||||
self.mandatory = mandatory
|
self.mandatory = mandatory
|
||||||
|
|
||||||
def from_xml(self, parser, node, version):
|
def from_xml(self, node, version):
|
||||||
if self.attribute:
|
if self.attribute:
|
||||||
result = parser.get_node_attribute(node, self.attribute)
|
if node is not None:
|
||||||
|
result = node.get(self.attribute)
|
||||||
else:
|
else:
|
||||||
__node = parser.get_first_child(node, self.tag)
|
__node = node.find(self.tag)
|
||||||
result = parser.get_node_data(__node)
|
if __node is not None:
|
||||||
|
result = __node.text
|
||||||
|
else:
|
||||||
|
result = None
|
||||||
if result is None:
|
if result is None:
|
||||||
if self.mandatory:
|
if self.mandatory:
|
||||||
from . import gpx as mod_gpx
|
from . import gpx as mod_gpx
|
||||||
raise mod_gpx.GPXException('%s is mandatory in %s' % (self.name, self.tag))
|
raise mod_gpx.GPXException('{0} is mandatory in {1} (got {2})'.format(self.name, self.tag, result))
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if self.type_converter:
|
if self.type_converter:
|
||||||
@ -142,27 +198,26 @@ class GPXField(AbstractGPXField):
|
|||||||
result = self.type_converter.from_string(result)
|
result = self.type_converter.from_string(result)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
from . import gpx as mod_gpx
|
from . import gpx as mod_gpx
|
||||||
raise mod_gpx.GPXException('Invalid value for <%s>... %s (%s)' % (self.tag, result, e))
|
raise mod_gpx.GPXException('Invalid value for <{0}>... {1} ({2})'.format(self.tag, result, e))
|
||||||
|
|
||||||
if self.possible:
|
if self.possible:
|
||||||
if not (result in self.possible):
|
if not (result in self.possible):
|
||||||
from . import gpx as mod_gpx
|
from . import gpx as mod_gpx
|
||||||
raise mod_gpx.GPXException('Invalid value "%s", possible: %s' % (result, self.possible))
|
raise mod_gpx.GPXException('Invalid value "{0}", possible: {1}'.format(result, self.possible))
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def to_xml(self, value, version):
|
def to_xml(self, value, version, nsmap=None, prettyprint=True, indent=''):
|
||||||
if not value:
|
if value is None:
|
||||||
return ''
|
return ''
|
||||||
|
if not prettyprint:
|
||||||
|
indent = ''
|
||||||
if self.attribute:
|
if self.attribute:
|
||||||
return '%s="%s"' % (self.attribute, mod_utils.make_str(value))
|
return '{0}="{1}"'.format(self.attribute, mod_utils.make_str(value))
|
||||||
else:
|
elif self.type_converter:
|
||||||
if self.type_converter:
|
|
||||||
value = self.type_converter.to_string(value)
|
value = self.type_converter.to_string(value)
|
||||||
if isinstance(self.tag, list) or isinstance(self.tag, tuple):
|
return mod_utils.to_xml(self.tag, content=value, escape=True,
|
||||||
raise Exception('Not yet implemented')
|
prettyprint=prettyprint, indent=indent)
|
||||||
return mod_utils.to_xml(self.tag, content=value, escape=True)
|
|
||||||
|
|
||||||
|
|
||||||
class GPXComplexField(AbstractGPXField):
|
class GPXComplexField(AbstractGPXField):
|
||||||
@ -172,27 +227,34 @@ class GPXComplexField(AbstractGPXField):
|
|||||||
self.tag = tag or name
|
self.tag = tag or name
|
||||||
self.classs = classs
|
self.classs = classs
|
||||||
|
|
||||||
def from_xml(self, parser, node, version):
|
def from_xml(self, node, version):
|
||||||
if self.is_list:
|
if self.is_list:
|
||||||
result = []
|
result = []
|
||||||
for child_node in parser.get_children(node):
|
for child in node:
|
||||||
if parser.get_node_name(child_node) == self.tag:
|
if child.tag == self.tag:
|
||||||
result.append(gpx_fields_from_xml(self.classs, parser, child_node, version))
|
result.append(gpx_fields_from_xml(self.classs, child,
|
||||||
|
version))
|
||||||
return result
|
return result
|
||||||
else:
|
else:
|
||||||
field_node = parser.get_first_child(node, self.tag)
|
field_node = node.find(self.tag)
|
||||||
if field_node is None:
|
if field_node is None:
|
||||||
return None
|
return None
|
||||||
return gpx_fields_from_xml(self.classs, parser, field_node, version)
|
return gpx_fields_from_xml(self.classs, field_node, version)
|
||||||
|
|
||||||
def to_xml(self, value, version):
|
def to_xml(self, value, version, nsmap=None, prettyprint=True, indent=''):
|
||||||
|
if not prettyprint:
|
||||||
|
indent = ''
|
||||||
if self.is_list:
|
if self.is_list:
|
||||||
result = ''
|
result = []
|
||||||
for obj in value:
|
for obj in value:
|
||||||
result += gpx_fields_to_xml(obj, self.tag, version)
|
result.append(gpx_fields_to_xml(obj, self.tag, version,
|
||||||
return result
|
nsmap=nsmap,
|
||||||
|
prettyprint=prettyprint,
|
||||||
|
indent=indent))
|
||||||
|
return ''.join(result)
|
||||||
else:
|
else:
|
||||||
return gpx_fields_to_xml(value, self.tag, version)
|
return gpx_fields_to_xml(value, self.tag, version,
|
||||||
|
prettyprint=prettyprint, indent=indent)
|
||||||
|
|
||||||
|
|
||||||
class GPXEmailField(AbstractGPXField):
|
class GPXEmailField(AbstractGPXField):
|
||||||
@ -200,26 +262,47 @@ class GPXEmailField(AbstractGPXField):
|
|||||||
Converts GPX1.1 email tag group from/to string.
|
Converts GPX1.1 email tag group from/to string.
|
||||||
"""
|
"""
|
||||||
def __init__(self, name, tag=None):
|
def __init__(self, name, tag=None):
|
||||||
self.attribute = False
|
AbstractGPXField.__init__(self, is_list=False)
|
||||||
self.is_list = False
|
|
||||||
self.name = name
|
self.name = name
|
||||||
self.tag = tag or name
|
self.tag = tag or name
|
||||||
|
|
||||||
def from_xml(self, parser, node, version):
|
def from_xml(self, node, version):
|
||||||
email_node = parser.get_first_child(node, self.tag)
|
"""
|
||||||
|
Extract email address.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
node: ETree node with child node containing self.tag
|
||||||
|
version: str of the gpx output version "1.0" or "1.1"
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A string containing the email address.
|
||||||
|
"""
|
||||||
|
email_node = node.find(self.tag)
|
||||||
if email_node is None:
|
if email_node is None:
|
||||||
return None
|
return ''
|
||||||
|
|
||||||
email_id = parser.get_node_attribute(email_node, 'id')
|
email_id = email_node.get('id')
|
||||||
email_domain = parser.get_node_attribute(email_node, 'domain')
|
email_domain = email_node.get('domain')
|
||||||
|
return '{0}@{1}'.format(email_id, email_domain)
|
||||||
|
|
||||||
return '%s@%s' % (email_id, email_domain)
|
def to_xml(self, value, version, nsmap=None, prettyprint=True, indent=''):
|
||||||
|
"""
|
||||||
|
Write email address to XML
|
||||||
|
|
||||||
def to_xml(self, value, version):
|
Args:
|
||||||
|
value: str representing an email address
|
||||||
|
version: str of the gpx output version "1.0" or "1.1"
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
None if value is empty or str of XML representation of the
|
||||||
|
address. Representation starts with a \n.
|
||||||
|
"""
|
||||||
if not value:
|
if not value:
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
|
if not prettyprint:
|
||||||
|
indent = ''
|
||||||
|
|
||||||
if '@' in value:
|
if '@' in value:
|
||||||
pos = value.find('@')
|
pos = value.find('@')
|
||||||
email_id = value[:pos]
|
email_id = value[:pos]
|
||||||
@ -228,100 +311,252 @@ class GPXEmailField(AbstractGPXField):
|
|||||||
email_id = value
|
email_id = value
|
||||||
email_domain = 'unknown'
|
email_domain = 'unknown'
|
||||||
|
|
||||||
return '\n<%s id="%s" domain="%s" />' % (self.tag, email_id, email_domain)
|
return ('\n' + indent +
|
||||||
|
'<{0} id="{1}" domain="{2}" />'.format(self.tag,
|
||||||
|
email_id, email_domain))
|
||||||
|
|
||||||
|
|
||||||
class GPXExtensionsField(AbstractGPXField):
|
class GPXExtensionsField(AbstractGPXField):
|
||||||
"""
|
"""
|
||||||
GPX1.1 extensions <extensions>...</extensions> key-value type.
|
GPX1.1 extensions <extensions>...</extensions> key-value type.
|
||||||
"""
|
"""
|
||||||
def __init__(self, name, tag=None):
|
def __init__(self, name, tag=None, is_list=True):
|
||||||
self.attribute = False
|
AbstractGPXField.__init__(self, is_list=is_list)
|
||||||
self.name = name
|
self.name = name
|
||||||
self.is_list = False
|
|
||||||
self.tag = tag or 'extensions'
|
self.tag = tag or 'extensions'
|
||||||
|
|
||||||
def from_xml(self, parser, node, version):
|
def from_xml(self, node, version):
|
||||||
result = {}
|
"""
|
||||||
|
Build a list of extension Elements.
|
||||||
|
|
||||||
if node is None:
|
Args:
|
||||||
return result
|
node: Element at the root of the extensions
|
||||||
|
version: unused, only 1.1 supports extensions
|
||||||
extensions_node = parser.get_first_child(node, self.tag)
|
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
a list of Element objects
|
||||||
|
"""
|
||||||
|
result = []
|
||||||
|
extensions_node = node.find(self.tag)
|
||||||
if extensions_node is None:
|
if extensions_node is None:
|
||||||
return result
|
return result
|
||||||
|
for child in extensions_node:
|
||||||
children = parser.get_children(extensions_node)
|
result.append(mod_copy.deepcopy(child))
|
||||||
if children is None:
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
for child in children:
|
def _resolve_prefix(self, qname, nsmap):
|
||||||
result[parser.get_node_name(child)] = parser.get_node_data(child)
|
"""
|
||||||
|
Convert a tag from Clark notation into prefix notation.
|
||||||
|
|
||||||
return result
|
Convert a tag from Clark notation using the nsmap into a
|
||||||
|
prefixed tag. If the tag isn't in Clark notation, return the
|
||||||
|
qname back. Converts {namespace}tag -> prefix:tag
|
||||||
|
|
||||||
def to_xml(self, value, version):
|
Args:
|
||||||
if value is None or not value:
|
qname: string with the fully qualified name in Clark notation
|
||||||
|
nsmap: a dict of prefix, namespace pairs
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
string of the tag ready to be serialized.
|
||||||
|
"""
|
||||||
|
if nsmap is not None and '}' in qname:
|
||||||
|
uri, _, localname = qname.partition("}")
|
||||||
|
uri = uri.lstrip("{")
|
||||||
|
qname = uri + ':' + localname
|
||||||
|
for prefix, namespace in nsmap.items():
|
||||||
|
if uri == namespace:
|
||||||
|
qname = prefix + ':' + localname
|
||||||
|
break
|
||||||
|
return qname
|
||||||
|
|
||||||
|
def _ETree_to_xml(self, node, nsmap=None, prettyprint=True, indent=''):
|
||||||
|
"""
|
||||||
|
Serialize ETree element and all subelements.
|
||||||
|
|
||||||
|
Creates a string of the ETree and all children. The prefixes are
|
||||||
|
resolved through the nsmap for easier to read XML.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
node: ETree with the extension data
|
||||||
|
version: string of GPX version, must be 1.1
|
||||||
|
nsmap: dict of prefixes and URIs
|
||||||
|
prettyprint: boolean, when true, indent line
|
||||||
|
indent: string prepended to tag, usually 2 spaces per level
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
string with all the prefixed tags and data for the node
|
||||||
|
and its children as XML.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not prettyprint:
|
||||||
|
indent = ''
|
||||||
|
|
||||||
|
# Build element tag and text
|
||||||
|
result = []
|
||||||
|
prefixedname = self._resolve_prefix(node.tag, nsmap)
|
||||||
|
result.append('\n' + indent + '<' + prefixedname)
|
||||||
|
for attrib, value in node.attrib.items():
|
||||||
|
attrib = self._resolve_prefix(attrib, nsmap)
|
||||||
|
result.append(' {0}="{1}"'.format(attrib, value))
|
||||||
|
result.append('>')
|
||||||
|
if node.text is not None:
|
||||||
|
result.append(node.text.strip())
|
||||||
|
|
||||||
|
|
||||||
|
# Build subelement nodes
|
||||||
|
for child in node:
|
||||||
|
result.append(self._ETree_to_xml(child, nsmap,
|
||||||
|
prettyprint=prettyprint,
|
||||||
|
indent=indent+' '))
|
||||||
|
|
||||||
|
# Add tail and close tag
|
||||||
|
tail = node.tail
|
||||||
|
if tail is not None:
|
||||||
|
tail = tail.strip()
|
||||||
|
else:
|
||||||
|
tail = ''
|
||||||
|
if len(node) > 0:
|
||||||
|
result.append('\n' + indent)
|
||||||
|
result.append('</' + prefixedname + '>' + tail)
|
||||||
|
|
||||||
|
return ''.join(result)
|
||||||
|
|
||||||
|
def to_xml(self, value, version, nsmap=None, prettyprint=True, indent=''):
|
||||||
|
"""
|
||||||
|
Serialize list of ETree.
|
||||||
|
|
||||||
|
Creates a string of all the ETrees in the list. The prefixes are
|
||||||
|
resolved through the nsmap for easier to read XML.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
value: list of ETrees with the extension data
|
||||||
|
version: string of GPX version, must be 1.1
|
||||||
|
nsmap: dict of prefixes and URIs
|
||||||
|
prettyprint: boolean, when true, indent line
|
||||||
|
indent: string prepended to tag, usually 2 spaces per level
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
string with all the prefixed tags and data for each node
|
||||||
|
as XML.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not prettyprint:
|
||||||
|
indent = ''
|
||||||
|
if not value or version != "1.1":
|
||||||
return ''
|
return ''
|
||||||
|
result = []
|
||||||
result = '\n<' + self.tag + '>'
|
result.append('\n' + indent + '<' + self.tag + '>')
|
||||||
for ext_key, ext_value in value.items():
|
for extension in value:
|
||||||
result += mod_utils.to_xml(ext_key, content=ext_value)
|
result.append(self._ETree_to_xml(extension, nsmap,
|
||||||
result += '</' + self.tag + '>'
|
prettyprint=prettyprint,
|
||||||
|
indent=indent+' '))
|
||||||
return result
|
result.append('\n' + indent + '</' + self.tag + '>')
|
||||||
|
return ''.join(result)
|
||||||
|
|
||||||
# ----------------------------------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------------------------------
|
||||||
# Utility methods:
|
# Utility methods:
|
||||||
# ----------------------------------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _check_dependents(gpx_object, fieldname):
|
||||||
|
"""
|
||||||
|
Check for data in subelements.
|
||||||
|
|
||||||
def gpx_fields_to_xml(instance, tag, version, custom_attributes=None):
|
Fieldname takes the form of 'tag:dep1:dep2:dep3' for an arbitrary
|
||||||
|
number of dependents. If all the gpx_object.dep attributes are
|
||||||
|
empty, return a sentinel value to suppress serialization of all
|
||||||
|
subelements.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
gpx_object: GPXField object to check for data
|
||||||
|
fieldname: string with tag and dependents delimited with ':'
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Two strings. The first is a sentinel value, '/' + tag, if all
|
||||||
|
the subelements are empty and an empty string otherwise. The
|
||||||
|
second is the bare tag name.
|
||||||
|
"""
|
||||||
|
if ':' in fieldname:
|
||||||
|
children = fieldname.split(':')
|
||||||
|
field = children.pop(0)
|
||||||
|
for child in children:
|
||||||
|
if getattr(gpx_object, child.lstrip('@')):
|
||||||
|
return '', field # Child has data
|
||||||
|
return '/' + field, field # No child has data
|
||||||
|
return '', fieldname # No children
|
||||||
|
|
||||||
|
def gpx_fields_to_xml(instance, tag, version, custom_attributes=None,
|
||||||
|
nsmap=None, prettyprint=True, indent=''):
|
||||||
|
if not prettyprint:
|
||||||
|
indent = ''
|
||||||
fields = instance.gpx_10_fields
|
fields = instance.gpx_10_fields
|
||||||
if version == '1.1':
|
if version == '1.1':
|
||||||
fields = instance.gpx_11_fields
|
fields = instance.gpx_11_fields
|
||||||
|
|
||||||
tag_open = bool(tag)
|
tag_open = bool(tag)
|
||||||
body = ''
|
body = []
|
||||||
if tag:
|
if tag:
|
||||||
body = '\n<' + tag
|
body.append('\n' + indent + '<' + tag)
|
||||||
|
if tag == 'gpx': # write nsmap in root node
|
||||||
|
body.append(' xmlns="{0}"'.format(nsmap['defaultns']))
|
||||||
|
namespaces = set(nsmap.keys())
|
||||||
|
namespaces.remove('defaultns')
|
||||||
|
for prefix in sorted(namespaces):
|
||||||
|
body.append(
|
||||||
|
' xmlns:{0}="{1}"'.format(prefix, nsmap[prefix])
|
||||||
|
)
|
||||||
if custom_attributes:
|
if custom_attributes:
|
||||||
for key, value in custom_attributes.items():
|
# Make sure to_xml() always return attributes in the same order:
|
||||||
body += ' %s="%s"' % (key, mod_utils.make_str(value))
|
for key in sorted(custom_attributes.keys()):
|
||||||
|
body.append(' {0}="{1}"'.format(key, mod_utils.make_str(custom_attributes[key])))
|
||||||
|
suppressuntil = ''
|
||||||
for gpx_field in fields:
|
for gpx_field in fields:
|
||||||
|
# strings indicate non-data container tags with subelements
|
||||||
if isinstance(gpx_field, str):
|
if isinstance(gpx_field, str):
|
||||||
|
# Suppress empty tags
|
||||||
|
if suppressuntil:
|
||||||
|
if suppressuntil == gpx_field:
|
||||||
|
suppressuntil = ''
|
||||||
|
else:
|
||||||
|
suppressuntil, gpx_field = _check_dependents(instance,
|
||||||
|
gpx_field)
|
||||||
|
if not suppressuntil:
|
||||||
if tag_open:
|
if tag_open:
|
||||||
body += '>'
|
body.append('>')
|
||||||
tag_open = False
|
tag_open = False
|
||||||
if gpx_field[0] == '/':
|
if gpx_field[0] == '/':
|
||||||
body += '<%s>' % gpx_field
|
body.append('\n' + indent + '<{0}>'.format(gpx_field))
|
||||||
|
if prettyprint and len(indent) > 1:
|
||||||
|
indent = indent[:-2]
|
||||||
else:
|
else:
|
||||||
body += '\n<%s' % gpx_field
|
if prettyprint:
|
||||||
|
indent += ' '
|
||||||
|
body.append('\n' + indent + '<{0}'.format(gpx_field))
|
||||||
tag_open = True
|
tag_open = True
|
||||||
else:
|
elif not suppressuntil:
|
||||||
value = getattr(instance, gpx_field.name)
|
value = getattr(instance, gpx_field.name)
|
||||||
if gpx_field.attribute:
|
if gpx_field.attribute:
|
||||||
body += ' ' + gpx_field.to_xml(value, version)
|
body.append(' ' + gpx_field.to_xml(value, version, nsmap,
|
||||||
elif value:
|
prettyprint=prettyprint,
|
||||||
|
indent=indent + ' '))
|
||||||
|
elif value is not None:
|
||||||
if tag_open:
|
if tag_open:
|
||||||
body += '>'
|
body.append('>')
|
||||||
tag_open = False
|
tag_open = False
|
||||||
xml_value = gpx_field.to_xml(value, version)
|
xml_value = gpx_field.to_xml(value, version, nsmap,
|
||||||
|
prettyprint=prettyprint,
|
||||||
|
indent=indent + ' ')
|
||||||
if xml_value:
|
if xml_value:
|
||||||
body += xml_value
|
body.append(xml_value)
|
||||||
|
|
||||||
if tag:
|
if tag:
|
||||||
if tag_open:
|
if tag_open:
|
||||||
body += '>'
|
body.append('>')
|
||||||
body += '</' + tag + '>'
|
body.append('\n' + indent + '</' + tag + '>')
|
||||||
|
|
||||||
return body
|
return ''.join(body)
|
||||||
|
|
||||||
|
|
||||||
def gpx_fields_from_xml(class_or_instance, parser, node, version):
|
def gpx_fields_from_xml(class_or_instance, node, version):
|
||||||
if mod_inspect.isclass(class_or_instance):
|
if mod_inspect.isclass(class_or_instance):
|
||||||
result = class_or_instance()
|
result = class_or_instance()
|
||||||
else:
|
else:
|
||||||
@ -331,34 +566,33 @@ def gpx_fields_from_xml(class_or_instance, parser, node, version):
|
|||||||
if version == '1.1':
|
if version == '1.1':
|
||||||
fields = result.gpx_11_fields
|
fields = result.gpx_11_fields
|
||||||
|
|
||||||
node_path = [ node ]
|
node_path = [node]
|
||||||
|
|
||||||
for gpx_field in fields:
|
for gpx_field in fields:
|
||||||
current_node = node_path[-1]
|
current_node = node_path[-1]
|
||||||
if isinstance (gpx_field, str):
|
if isinstance(gpx_field, str):
|
||||||
|
gpx_field = gpx_field.partition(':')[0]
|
||||||
if gpx_field.startswith('/'):
|
if gpx_field.startswith('/'):
|
||||||
node_path.pop()
|
node_path.pop()
|
||||||
else:
|
else:
|
||||||
if current_node is None:
|
if current_node is None:
|
||||||
node_path.append(None)
|
node_path.append(None)
|
||||||
else:
|
else:
|
||||||
node_path.append(parser.get_first_child(current_node, gpx_field))
|
node_path.append(current_node.find(gpx_field))
|
||||||
else:
|
else:
|
||||||
if current_node is not None:
|
if current_node is not None:
|
||||||
value = gpx_field.from_xml(parser, current_node, version)
|
value = gpx_field.from_xml(current_node, version)
|
||||||
setattr(result, gpx_field.name, value)
|
setattr(result, gpx_field.name, value)
|
||||||
elif gpx_field.attribute:
|
elif gpx_field.attribute:
|
||||||
value = gpx_field.from_xml(parser, node, version)
|
value = gpx_field.from_xml(node, version)
|
||||||
setattr(result, gpx_field.name, value)
|
setattr(result, gpx_field.name, value)
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def gpx_check_slots_and_default_values(classs):
|
def gpx_check_slots_and_default_values(classs):
|
||||||
"""
|
"""
|
||||||
Will fill the default values for this class. Instances will inherit those
|
Will fill the default values for this class. Instances will inherit those
|
||||||
values so we don't need to fill default values for every instance.
|
values so we don't need to fill default values for every instance.
|
||||||
|
|
||||||
This method will also fill the attribute gpx_field_names with a list of
|
This method will also fill the attribute gpx_field_names with a list of
|
||||||
gpx field names. This can be used
|
gpx field names. This can be used
|
||||||
"""
|
"""
|
||||||
@ -400,7 +634,7 @@ def gpx_check_slots_and_default_values(classs):
|
|||||||
gpx_field_names.append(field.name)
|
gpx_field_names.append(field.name)
|
||||||
|
|
||||||
gpx_field_names = tuple(gpx_field_names)
|
gpx_field_names = tuple(gpx_field_names)
|
||||||
if not hasattr(classs, '__slots__') or not classs.__slots__ or classs.__slots__ != gpx_field_names:
|
## if not hasattr(classs, '__slots__') or not classs.__slots__ or classs.__slots__ != gpx_field_names:
|
||||||
try: slots = classs.__slots__
|
## try: slots = classs.__slots__
|
||||||
except Exception as e: slots = '[Unknown:%s]' % e
|
## except Exception as e: slots = '[Unknown:%s]' % e
|
||||||
raise Exception('%s __slots__ invalid, found %s, but should be %s' % (classs, slots, gpx_field_names))
|
## raise Exception('%s __slots__ invalid, found %s, but should be %s' % (classs, slots, gpx_field_names))
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import xml.dom.minidom as mod_minidom
|
import xml.dom.minidom as mod_minidom
|
||||||
import gpxpy as mod_gpxpy
|
|
||||||
|
|
||||||
def split_gpxs(xml):
|
def split_gpxs(xml):
|
||||||
"""
|
"""
|
||||||
|
@ -14,208 +14,142 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import pdb
|
|
||||||
|
|
||||||
import re as mod_re
|
|
||||||
import logging as mod_logging
|
import logging as mod_logging
|
||||||
import datetime as mod_datetime
|
import re as mod_re
|
||||||
import xml.dom.minidom as mod_minidom
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import lxml.etree as mod_etree
|
import lxml.etree as mod_etree # Load LXML or fallback to cET or ET
|
||||||
except:
|
except ImportError:
|
||||||
mod_etree = None
|
try:
|
||||||
pass # LXML not available
|
import xml.etree.cElementTree as mod_etree
|
||||||
|
except ImportError:
|
||||||
|
import xml.etree.ElementTree as mod_etree
|
||||||
|
|
||||||
from . import gpx as mod_gpx
|
from . import gpx as mod_gpx
|
||||||
from . import utils as mod_utils
|
from . import utils as mod_utils
|
||||||
from . import gpxfield as mod_gpxfield
|
from . import gpxfield as mod_gpxfield
|
||||||
|
|
||||||
|
log = mod_logging.getLogger(__name__)
|
||||||
class XMLParser:
|
|
||||||
"""
|
|
||||||
Used when lxml is not available. Uses standard minidom.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, xml):
|
|
||||||
self.xml = xml
|
|
||||||
self.dom = mod_minidom.parseString(xml)
|
|
||||||
|
|
||||||
def get_first_child(self, node=None, name=None):
|
|
||||||
# TODO: Remove find_first_node from utils!
|
|
||||||
if not node:
|
|
||||||
node = self.dom
|
|
||||||
|
|
||||||
children = node.childNodes
|
|
||||||
if not children:
|
|
||||||
return None
|
|
||||||
|
|
||||||
if not name:
|
|
||||||
return children[0]
|
|
||||||
|
|
||||||
for tmp_node in children:
|
|
||||||
if tmp_node.nodeName == name:
|
|
||||||
return tmp_node
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
def get_node_name(self, node):
|
|
||||||
if not node:
|
|
||||||
return None
|
|
||||||
return node.nodeName
|
|
||||||
|
|
||||||
def get_children(self, node=None):
|
|
||||||
if not node:
|
|
||||||
node = self.dom
|
|
||||||
|
|
||||||
return list(filter(lambda node : node.nodeType == node.ELEMENT_NODE, node.childNodes))
|
|
||||||
|
|
||||||
def get_node_data(self, node):
|
|
||||||
if node is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
child_nodes = node.childNodes
|
|
||||||
if not child_nodes or len(child_nodes) == 0:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return child_nodes[0].nodeValue
|
|
||||||
|
|
||||||
def get_node_attribute(self, node, attribute):
|
|
||||||
if (not hasattr(node, 'attributes')) or (not node.attributes):
|
|
||||||
return None
|
|
||||||
if attribute in node.attributes.keys():
|
|
||||||
return node.attributes[attribute].nodeValue
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
class LXMLParser:
|
|
||||||
"""
|
|
||||||
Used when lxml is available.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, xml):
|
|
||||||
if not mod_etree:
|
|
||||||
raise Exception('Cannot use LXMLParser without lxml installed')
|
|
||||||
|
|
||||||
if mod_utils.PYTHON_VERSION[0] == '3':
|
|
||||||
# In python 3 all strings are unicode and for some reason lxml
|
|
||||||
# don't like unicode strings with XMLs declared as UTF-8:
|
|
||||||
self.xml = xml.encode('utf-8')
|
|
||||||
else:
|
|
||||||
self.xml = xml
|
|
||||||
|
|
||||||
self.dom = mod_etree.XML(self.xml)
|
|
||||||
# get the namespace
|
|
||||||
self.ns = self.dom.nsmap.get(None)
|
|
||||||
|
|
||||||
def get_first_child(self, node=None, name=None):
|
|
||||||
if node is None:
|
|
||||||
if name:
|
|
||||||
if self.get_node_name(self.dom) == name:
|
|
||||||
return self.dom
|
|
||||||
return self.dom
|
|
||||||
|
|
||||||
children = node.getchildren()
|
|
||||||
|
|
||||||
if not children:
|
|
||||||
return None
|
|
||||||
|
|
||||||
if name:
|
|
||||||
for node in children:
|
|
||||||
if self.get_node_name(node) == name:
|
|
||||||
return node
|
|
||||||
return None
|
|
||||||
|
|
||||||
return children[0]
|
|
||||||
|
|
||||||
def get_node_name(self, node):
|
|
||||||
if callable(node.tag):
|
|
||||||
tag = str(node.tag())
|
|
||||||
else:
|
|
||||||
tag = str(node.tag)
|
|
||||||
if '}' in tag:
|
|
||||||
return tag.split('}')[1]
|
|
||||||
return tag
|
|
||||||
|
|
||||||
def get_children(self, node=None):
|
|
||||||
if node is None:
|
|
||||||
node = self.dom
|
|
||||||
return node.getchildren()
|
|
||||||
|
|
||||||
def get_node_data(self, node):
|
|
||||||
if node is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return node.text
|
|
||||||
|
|
||||||
def get_node_attribute(self, node, attribute):
|
|
||||||
if node is None:
|
|
||||||
return None
|
|
||||||
return node.attrib.get(attribute)
|
|
||||||
|
|
||||||
|
|
||||||
class GPXParser:
|
class GPXParser:
|
||||||
def __init__(self, xml_or_file=None, parser=None):
|
|
||||||
"""
|
"""
|
||||||
Parser may be lxml of minidom. If you set to None then lxml will be used if installed
|
Parse the XML and provide new GPX instance.
|
||||||
otherwise minidom.
|
|
||||||
|
Methods:
|
||||||
|
__init__: initialize new instance
|
||||||
|
init: format XML
|
||||||
|
parse: parse XML, build tree, build GPX
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
gpx: GPX instance of the most recently parsed XML
|
||||||
|
xml: string containing the XML text
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, xml_or_file=None):
|
||||||
|
"""
|
||||||
|
Initialize new GPXParser instance.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
xml_or_file: string or file object containing the gpx
|
||||||
|
formatted xml
|
||||||
|
|
||||||
"""
|
"""
|
||||||
self.init(xml_or_file)
|
self.init(xml_or_file)
|
||||||
self.gpx = mod_gpx.GPX()
|
self.gpx = mod_gpx.GPX()
|
||||||
self.xml_parser_type = parser
|
|
||||||
self.xml_parser = None
|
|
||||||
|
|
||||||
def init(self, xml_or_file):
|
def init(self, xml_or_file):
|
||||||
|
"""
|
||||||
|
Store the XML and remove utf-8 Byte Order Mark if present.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
xml_or_file: string or file object containing the gpx
|
||||||
|
formatted xml
|
||||||
|
|
||||||
|
"""
|
||||||
text = xml_or_file.read() if hasattr(xml_or_file, 'read') else xml_or_file
|
text = xml_or_file.read() if hasattr(xml_or_file, 'read') else xml_or_file
|
||||||
self.xml = mod_utils.make_str(text)
|
self.xml = mod_utils.make_str(text)
|
||||||
self.gpx = mod_gpx.GPX()
|
|
||||||
|
|
||||||
def parse(self):
|
def parse(self, version=None):
|
||||||
"""
|
"""
|
||||||
Parses the XML file and returns a GPX object.
|
Parse the XML and return a GPX object.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
version: str or None indicating the GPX Schema to use.
|
||||||
|
Options are '1.0', '1.1' and None. When version is None
|
||||||
|
the version is read from the file or falls back on 1.0.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A GPX object loaded from the xml
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
GPXXMLSyntaxException: XML file is invalid
|
||||||
|
GPXException: XML is valid but GPX data contains errors
|
||||||
|
|
||||||
It will throw GPXXMLSyntaxException if the XML file is invalid or
|
|
||||||
GPXException if the XML file is valid but something is wrong with the
|
|
||||||
GPX data.
|
|
||||||
"""
|
"""
|
||||||
|
# Build prefix map for reserialization and extension handlings
|
||||||
|
for namespace in mod_re.findall(r'\sxmlns:?[^=]*="[^"]+"', self.xml):
|
||||||
|
prefix, _, URI = namespace[6:].partition('=')
|
||||||
|
prefix = prefix.lstrip(':')
|
||||||
|
if prefix == '':
|
||||||
|
prefix = 'defaultns' # alias default for easier handling
|
||||||
|
else:
|
||||||
|
if prefix.startswith("ns"):
|
||||||
|
mod_etree.register_namespace("noglobal_" + prefix, URI.strip('"'))
|
||||||
|
else:
|
||||||
|
mod_etree.register_namespace(prefix, URI.strip('"'))
|
||||||
|
self.gpx.nsmap[prefix] = URI.strip('"')
|
||||||
|
|
||||||
|
schema_loc = mod_re.search(r'\sxsi:schemaLocation="[^"]+"', self.xml)
|
||||||
|
if schema_loc:
|
||||||
|
_, _, value = schema_loc.group(0).partition('=')
|
||||||
|
self.gpx.schema_locations = value.strip('"').split()
|
||||||
|
|
||||||
|
# Remove default namespace to simplify processing later
|
||||||
|
self.xml = mod_re.sub(r"""\sxmlns=(['"])[^'"]+\1""", '', self.xml, count=1)
|
||||||
|
|
||||||
|
# Build tree
|
||||||
try:
|
try:
|
||||||
if self.xml_parser_type is None:
|
if GPXParser.__library() == "LXML":
|
||||||
if mod_etree:
|
# lxml does not like unicode strings when it's expecting
|
||||||
self.xml_parser = LXMLParser(self.xml)
|
# UTF-8. Also, XML comments result in a callable .tag().
|
||||||
|
# Strip them out to avoid handling them later.
|
||||||
|
if mod_utils.PYTHON_VERSION[0] >= '3':
|
||||||
|
self.xml = self.xml.encode('utf-8')
|
||||||
|
root = mod_etree.XML(self.xml,
|
||||||
|
mod_etree.XMLParser(remove_comments=True))
|
||||||
else:
|
else:
|
||||||
self.xml_parser = XMLParser(self.xml)
|
root = mod_etree.XML(self.xml)
|
||||||
elif self.xml_parser_type == 'lxml':
|
|
||||||
self.xml_parser = LXMLParser(self.xml)
|
|
||||||
elif self.xml_parser_type == 'minidom':
|
|
||||||
self.xml_parser = XMLParser(self.xml)
|
|
||||||
else:
|
|
||||||
raise mod_gpx.GPXException('Invalid parser type: %s' % self.xml_parser_type)
|
|
||||||
|
|
||||||
self.__parse_dom()
|
|
||||||
|
|
||||||
return self.gpx
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# The exception here can be a lxml or minidom exception.
|
# The exception here can be a lxml or ElementTree exception.
|
||||||
mod_logging.debug('Error in:\n%s\n-----------\n' % self.xml)
|
log.debug('Error in:\n%s\n-----------\n' % self.xml, exc_info=True)
|
||||||
mod_logging.exception(e)
|
|
||||||
|
|
||||||
# The library should work in the same way regardless of the
|
# The library should work in the same way regardless of the
|
||||||
# underlying XML parser that's why the exception thrown
|
# underlying XML parser that's why the exception thrown
|
||||||
# here is GPXXMLSyntaxException (instead of simply throwing the
|
# here is GPXXMLSyntaxException (instead of simply throwing the
|
||||||
# original minidom or lxml exception e).
|
# original ElementTree or lxml exception e).
|
||||||
#
|
#
|
||||||
# But, if the user need the original exception (lxml or minidom)
|
# But, if the user needs the original exception (lxml or ElementTree)
|
||||||
# it is available with GPXXMLSyntaxException.original_exception:
|
# it is available with GPXXMLSyntaxException.original_exception:
|
||||||
raise mod_gpx.GPXXMLSyntaxException('Error parsing XML: %s' % str(e), e)
|
raise mod_gpx.GPXXMLSyntaxException('Error parsing XML: %s' % str(e), e)
|
||||||
|
|
||||||
def __parse_dom(self):
|
if root is None:
|
||||||
node = self.xml_parser.get_first_child(name='gpx')
|
|
||||||
|
|
||||||
if node is None:
|
|
||||||
raise mod_gpx.GPXException('Document must have a `gpx` root node.')
|
raise mod_gpx.GPXException('Document must have a `gpx` root node.')
|
||||||
|
|
||||||
version = self.xml_parser.get_node_attribute(node, 'version')
|
if version is None:
|
||||||
|
version = root.get('version')
|
||||||
|
|
||||||
mod_gpxfield.gpx_fields_from_xml(self.gpx, self.xml_parser, node, version)
|
mod_gpxfield.gpx_fields_from_xml(self.gpx, root, version)
|
||||||
|
return self.gpx
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def __library():
|
||||||
|
"""
|
||||||
|
Return the underlying ETree.
|
||||||
|
|
||||||
|
Provided for convenient unittests.
|
||||||
|
"""
|
||||||
|
if "lxml" in str(mod_etree):
|
||||||
|
return "LXML"
|
||||||
|
return "STDLIB"
|
||||||
|
@ -21,26 +21,29 @@ import xml.sax.saxutils as mod_saxutils
|
|||||||
PYTHON_VERSION = mod_sys.version.split(' ')[0]
|
PYTHON_VERSION = mod_sys.version.split(' ')[0]
|
||||||
|
|
||||||
|
|
||||||
def to_xml(tag, attributes=None, content=None, default=None, escape=False):
|
def to_xml(tag, attributes=None, content=None, default=None, escape=False, prettyprint=True, indent=''):
|
||||||
|
if not prettyprint:
|
||||||
|
indent = ''
|
||||||
attributes = attributes or {}
|
attributes = attributes or {}
|
||||||
result = '\n<%s' % tag
|
result = []
|
||||||
|
result.append('\n' + indent + '<{0}'.format(tag))
|
||||||
|
|
||||||
if content is None and default:
|
if content is None and default:
|
||||||
content = default
|
content = default
|
||||||
|
|
||||||
if attributes:
|
if attributes:
|
||||||
for attribute in attributes.keys():
|
for attribute in attributes.keys():
|
||||||
result += make_str(' %s="%s"' % (attribute, attributes[attribute]))
|
result.append(make_str(' %s="%s"' % (attribute, attributes[attribute])))
|
||||||
|
|
||||||
if content is None:
|
if content is None:
|
||||||
result += '/>'
|
result.append('/>')
|
||||||
else:
|
else:
|
||||||
if escape:
|
if escape:
|
||||||
result += make_str('>%s</%s>' % (mod_saxutils.escape(content), tag))
|
result.append(make_str('>%s</%s>' % (mod_saxutils.escape(content), tag)))
|
||||||
else:
|
else:
|
||||||
result += make_str('>%s</%s>' % (content, tag))
|
result.append(make_str('>%s</%s>' % (content, tag)))
|
||||||
|
|
||||||
result = make_str(result)
|
result = make_str(''.join(result))
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@ -69,47 +72,20 @@ def to_number(s, default=0, nan_value=None):
|
|||||||
|
|
||||||
|
|
||||||
def total_seconds(timedelta):
|
def total_seconds(timedelta):
|
||||||
""" Some versions of python dont have timedelta.total_seconds() method. """
|
""" Some versions of python don't have the timedelta.total_seconds() method. """
|
||||||
if timedelta is None:
|
if timedelta is None:
|
||||||
return None
|
return None
|
||||||
return (timedelta.days * 86400) + timedelta.seconds
|
return (timedelta.days * 86400) + timedelta.seconds
|
||||||
|
|
||||||
# Hash utilities:
|
|
||||||
|
|
||||||
|
|
||||||
def __hash(obj):
|
|
||||||
result = 0
|
|
||||||
|
|
||||||
if obj is None:
|
|
||||||
return result
|
|
||||||
elif isinstance(obj, dict):
|
|
||||||
raise RuntimeError('__hash_single_object for dict not yet implemented')
|
|
||||||
elif isinstance(obj, list) or isinstance(obj, tuple):
|
|
||||||
return hash_list_or_tuple(obj)
|
|
||||||
|
|
||||||
return hash(obj)
|
|
||||||
|
|
||||||
|
|
||||||
def hash_list_or_tuple(iteration):
|
|
||||||
result = 17
|
|
||||||
|
|
||||||
for obj in iteration:
|
|
||||||
result = result * 31 + __hash(obj)
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def hash_object(obj, attributes):
|
|
||||||
result = 19
|
|
||||||
|
|
||||||
for attribute in attributes:
|
|
||||||
result = result * 31 + __hash(getattr(obj, attribute))
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def make_str(s):
|
def make_str(s):
|
||||||
""" Convert a str or unicode object into a str type. """
|
""" Convert a str or unicode or float object into a str type. """
|
||||||
|
if isinstance(s, float):
|
||||||
|
result = str(s)
|
||||||
|
if not 'e' in result:
|
||||||
|
return result
|
||||||
|
# scientific notation is illegal in GPX 1/1
|
||||||
|
return format(s, '.10f').rstrip('0.')
|
||||||
if PYTHON_VERSION[0] == '2':
|
if PYTHON_VERSION[0] == '2':
|
||||||
if isinstance(s, unicode):
|
if isinstance(s, unicode):
|
||||||
return s.encode("utf-8")
|
return s.encode("utf-8")
|
||||||
|
Loading…
x
Reference in New Issue
Block a user