def to_tile_rect(rect, zoom): minPoint = Point.from_latitude_longitude(rect.minX, rect.minY) maxPoint = Point.from_latitude_longitude(rect.maxX, rect.maxY) minTilePoint = minPoint.pixels(zoom) maxTilePoint = maxPoint.pixels(zoom) return rect_2d(minTilePoint[0] // tile_size, minTilePoint[1] // tile_size, maxTilePoint[0] // tile_size, maxTilePoint[1] // tile_size)
def georeference(self): """ :return: """ print('Georeferencing ...') # tile_start = Tile.for_latitude_longitude(self.lat_start, self.lon_start, self.zoom) # tile_stop = Tile.for_latitude_longitude(self.lat_stop, self.lon_stop, self.zoom) Xs, Ys = self.tiles_in_dir() start_x, start_y = min(Xs), min(Ys) stop_x, stop_y = max(Xs), max(Ys) tile_start = Tile.from_google(start_x, start_y, self.zoom) tile_stop = Tile.from_google(stop_x, stop_y, self.zoom) point_start = Point.from_latitude_longitude( max(tile_start.bounds[0][0], tile_start.bounds[1][0]), min(tile_start.bounds[0][1], tile_start.bounds[1][1])) point_stop = Point.from_latitude_longitude( min(tile_stop.bounds[0][0], tile_stop.bounds[1][0]), max(tile_stop.bounds[0][1], tile_stop.bounds[1][1])) # print(point_start.meters, point_stop.meters) if 'bing' in self.map or 'google' in self.map: a_srs = 'EPSG:3857' else: a_srs = 'EPSG:4326' # Georeferencing os.system( "gdal_translate -of GTiff -co BIGTIFF=YES -co NUM_THREADS=8 -a_ullr " + str(point_start.meters[0]) + " " + str(point_start.meters[1]) + " " + str(point_stop.meters[0]) + " " + str(point_stop.meters[1]) + " " + "-a_srs " + a_srs + " " + self.map + "_" + self.mode + "_stitched.tif result.tif") # warping with conversion to RGBA # --config GDAL_CACHEMAX 32000 - wm 1500 os.system( "gdalwarp -dstalpha -srcnodata 0 -dstnodata 0 -overwrite -wo NUM_THREADS=8 " + "-co COMPRESS=PACKBITS -co BIGTIFF=YES " + "-s_srs " + a_srs + " -t_srs EPSG:4326 result.tif " + self.map + "_" + self.mode + "_gcps.tif") os.remove('result.tif') print('Georeferencing Complete!')
def _tile_at(point, zoom): """Return the map tile located at the given point in Spherical Mercator coordinates with the specified zoom level. """ point = PyGeoPoint.from_meters(point[0], point[1]) tile = PyGeoTile.for_point(point, zoom) return tile.google
def test_assert_pixel_y(pixel_y, zoom): pixel_x = 1 with pytest.raises(AssertionError) as assertion_info: _ = Point.from_pixel(pixel_x=pixel_x, pixel_y=pixel_y, zoom=zoom) assert 'Point Y needs to be a value between 0 and (2^zoom) * 256.' in str( assertion_info.value)
def test_assert_meter_y(meter_y): meter_x = 0.0 with pytest.raises(AssertionError) as assertion_info: _ = Point.from_meters(meter_x=meter_x, meter_y=meter_y) assert 'Meter Y needs to be a value between -{0} and {0}.'.format( ORIGIN_SHIFT) in str(assertion_info.value)
def test_from_meters(chicago_latitude_longitude, chicago_meters): meter_x, meter_y = chicago_meters point = Point.from_meters(meter_x=meter_x, meter_y=meter_y) assert point.meters == pytest.approx(chicago_meters, abs=0.1) assert point.latitude_longitude == pytest.approx( chicago_latitude_longitude, abs=0.1)
def test_assert_longitude(longitude): latitude = 0.0 with pytest.raises(AssertionError) as assertion_info: _ = Point.from_latitude_longitude(latitude=latitude, longitude=longitude) assert 'Longitude needs to be a value between -180.0 and 180.0.' in str( assertion_info.value)
def convert_lon_lat_2_x_y(self, longitude, latitude, altitude=0.0): point = Point.from_latitude_longitude(latitude, longitude) # The source code of from_latitude_longitude method. # EARTH_RADIUS = 6378137.0 # ORIGIN_SHIFT = 2.0 * math.pi * EARTH_RADIUS / 2.0 # meter_x = longitude * ORIGIN_SHIFT / 180.0 # meter_y = math.log(math.tan((90.0 + latitude) * math.pi / 360.0)) / (math.pi / 180.0) # meter_y = meter_y * ORIGIN_SHIFT / 180.0 return point.meters[0], point.meters[1], 0.0
def test_from_pixel_chicago(chicago_latitude_longitude, chicago_pixel, chicago_zoom): pixel_x, pixel_y = chicago_pixel point = Point.from_pixel(pixel_x=pixel_x, pixel_y=pixel_y, zoom=chicago_zoom) assert point.pixels(zoom=chicago_zoom) == chicago_pixel assert point.latitude_longitude == pytest.approx( chicago_latitude_longitude, abs=0.2)
def _plotw(self, metric, waypoints, i, d): ''' Plot points for debugging ''' i1, i2 = int(i), int(i) + 1 x1, y1 = metric.normalize((waypoints[i1].lon, waypoints[i1].lat)) x2, y2 = metric.normalize((waypoints[i2].lon, waypoints[i2].lat)) k = i - i1 xk = x1 * (1 - k) + x2 * k yk = y1 * (1 - k) + y2 * k lon, lat = metric.denormalize((xk, yk)) print('PLOT: %s %.1f' % (Point.from_latitude_longitude(lat, lon).meters, d))
def _tiles_from_bbox(bbox, zoom_level): """ * Returns all tiles for the specified bounding box """ if isinstance(bbox, dict): point_min = Point.from_latitude_longitude(latitude=bbox['tl'], longitude=bbox['tr']) point_max = Point.from_latitude_longitude(latitude=bbox['bl'], longitude=bbox['br']) elif isinstance(bbox, list): point_min = Point.from_latitude_longitude(latitude=bbox[1], longitude=bbox[0]) point_max = Point.from_latitude_longitude(latitude=bbox[3], longitude=bbox[2]) else: raise RuntimeError("bbox must bei either a dict or a list") tile_min = Tile.for_point(point_min, zoom_level) tile_max = Tile.for_point(point_max, zoom_level) tiles = [] for x in range(tile_min.tms_x, tile_max.tms_x + 1): for y in range(tile_min.tms_y, tile_max.tms_y + 1): tiles.append(Tile.from_tms(tms_x=x, tms_y=y, zoom=zoom_level)) return tiles
def _load_data(self, s, loader, data): ajournal, activity_group, first_timestamp, path, records = data timespan, warned, logged, last_timestamp = None, 0, 0, to_time(0.0) log.debug(f'Loading {self.record_to_db}') def is_event(record, *types): event = False if record.name == 'event': event = record.value.event == 'timer' and record.value.event_type in types # log.debug(f'Event: {event} for {types} ({record})') if event: log.debug(f'{types} at {record.timestamp}') return event have_timespan = any(is_event(record, 'start') for record in records) only_records = list(filter(lambda x: x.name == 'record', records)) final_timestamp = only_records[-1].timestamp if not have_timespan: first_timestamp = only_records[0].timestamp log.warning('Experimental handling of data without timespans') timespan = add( s, ActivityTimespan(activity_journal=ajournal, start=first_timestamp)) for record in records: if have_timespan and is_event(record, 'start'): if timespan: log.warning( 'Ignoring start with no corresponding stop (possible lost data?)' ) else: timespan = add( s, ActivityTimespan(activity_journal=ajournal, start=record.value.timestamp, finish=record.value.timestamp)) elif record.name == 'record': if record.value.timestamp > last_timestamp: lat, lon, timestamp = None, None, record.value.timestamp # customizable loader for field, name, units, type in self.record_to_db: value = record.data.get(field, None) if logged < 3: log.debug(f'{name} = {value}') if value is not None: value = value[0][0] loader.add(name, units, None, activity_group, ajournal, value, timestamp, type) if name == LATITUDE: lat = value elif name == LONGITUDE: lon = value logged += 1 # values derived from lat/lon if lat is not None and lon is not None: x, y = Point.from_latitude_longitude(lat, lon).meters loader.add(SPHERICAL_MERCATOR_X, M, None, activity_group, ajournal, x, timestamp, StatisticJournalFloat) loader.add(SPHERICAL_MERCATOR_Y, M, None, activity_group, ajournal, y, timestamp, StatisticJournalFloat) if self.add_elevation: elevation = self.__oracle.elevation(lat, lon) if elevation: loader.add(RAW_ELEVATION, M, None, activity_group, ajournal, elevation, timestamp, StatisticJournalFloat) else: log.warning( 'Ignoring duplicate record data for %s at %s - some data may be missing' % (path, record.value.timestamp)) last_timestamp = record.value.timestamp if not have_timespan: ajournal.finish = record.value.timestamp elif have_timespan and is_event(record, 'stop_all', 'stop'): if timespan: timespan.finish = record.value.timestamp ajournal.finish = record.value.timestamp timespan = None else: log.debug( 'Ignoring stop with no corresponding start (possible lost data?)' ) if timespan: log.warning('Cleaning up dangling timespan') timespan.finish = final_timestamp
def _load_data(self, s, loader, data): ajournal, activity_group, first_timestamp, file_scan, kit, device, records = data timespan, warned, logged, last_timestamp = None, 0, 0, to_time(0.0) log.debug(f'Loading {self.record_to_db}') def is_event(record, *types): if record.name == 'event' and record.value.event == 'timer' and record.value.event_type in types: log.debug(f'{types} at {record.timestamp}') return True else: return False have_timespan = any(is_event(record, 'start') for record in records) only_records = list(filter(lambda x: x.name == 'record', records)) final_timestamp = only_records[-1].timestamp if kit: loader.add_data(N.KIT, ajournal, kit, ajournal.start) if device: loader.add_data(N.DEVICE, ajournal, device, ajournal.start) self._save_name(s, ajournal, file_scan) self.__ajournal = ajournal if not have_timespan: first_timestamp = only_records[0].timestamp log.warning('Experimental handling of data without timespans') timespan = add( s, ActivityTimespan(activity_journal=ajournal, start=first_timestamp, finish=final_timestamp)) for record in records: if have_timespan and is_event(record, 'start'): if timespan: log.warning( 'Ignoring start with no corresponding stop (possible lost data?)' ) else: timespan = add( s, ActivityTimespan(activity_journal=ajournal, start=record.value.timestamp, finish=record.value.timestamp)) elif record.name == 'record': if record.value.timestamp > last_timestamp: lat, lon, timestamp = None, None, record.value.timestamp # elapsed time is not customizable because it needs extra processing loader.add_data(T.ELAPSED_TIME, ajournal, (record.value.timestamp - first_timestamp).total_seconds(), timestamp) # customizable loader for field, title, units, type in self.record_to_db: value = record.data.get(field, None) if logged < 3: log.debug(f'{title} = {value}') if value is not None: value = value[0][0] if units == U.KM: # internally everything uses M value /= 1000 loader.add_data(title, ajournal, value, timestamp) if title == T.LATITUDE: lat = value elif title == T.LONGITUDE: lon = value logged += 1 # values derived from lat/lon if lat is not None and lon is not None: x, y = Point.from_latitude_longitude(lat, lon).meters loader.add_data(N.SPHERICAL_MERCATOR_X, ajournal, x, timestamp) loader.add_data(N.SPHERICAL_MERCATOR_Y, ajournal, y, timestamp) if self.add_elevation: elevation = self.__srtm1.elevation(lat, lon) if elevation: loader.add_data(N.SRTM1_ELEVATION, ajournal, elevation, timestamp) else: log.warning( 'Ignoring duplicate record data for %s at %s - some data may be missing' % (file_scan.path, record.value.timestamp)) last_timestamp = record.value.timestamp if not have_timespan: ajournal.finish = record.value.timestamp elif have_timespan and is_event(record, 'stop_all', 'stop'): if timespan: timespan.finish = record.value.timestamp ajournal.finish = record.value.timestamp timespan = None else: log.debug( 'Ignoring stop with no corresponding start (possible lost data?)' ) if timespan: log.warning('Cleaning up dangling timespan') timespan.finish = final_timestamp
def _plot(self, p): ''' Plot points for debugging ''' lon, lat = p print('PLOT: %s' % (Point.from_latitude_longitude(lat, lon).meters, ))
from pygeotile.point import Point meter_x, meter_y, zoom = -9757148.442088600, 5138517.444985110, 19 # meters in Spherical Mercator EPSG:900913 point = Point.from_meters(meter_x=meter_x, meter_y=meter_y) print('Pixels: ', point.pixels(zoom=zoom)) # Pixels: (34430592, 49899136) print('Lat/Lon: ', point.latitude_longitude ) # Lat/Lon: (41.84987190947754, -87.64995574951166)
def _load_data(self, s, loader, data): ajournal, activity_group, first_timestamp, file_scan, define, records = data timespan, warned, logged, last_timestamp = None, 0, 0, to_time(0.0) log.debug(f'Loading {self.record_to_db}') def is_event(record, *types): event = False if record.name == 'event': event = record.value.event == 'timer' and record.value.event_type in types if event: log.debug(f'{types} at {record.timestamp}') return event have_timespan = any(is_event(record, 'start') for record in records) only_records = list(filter(lambda x: x.name == 'record', records)) final_timestamp = only_records[-1].timestamp self._check_overlap(s, first_timestamp, final_timestamp, ajournal) self._load_define(s, define, ajournal) self._save_name(s, ajournal, file_scan) self.__ajournal = ajournal if not have_timespan: first_timestamp = only_records[0].timestamp log.warning('Experimental handling of data without timespans') timespan = add( s, ActivityTimespan(activity_journal=ajournal, start=first_timestamp, finish=final_timestamp)) for record in records: if have_timespan and is_event(record, 'start'): if timespan: log.warning( 'Ignoring start with no corresponding stop (possible lost data?)' ) else: timespan = add( s, ActivityTimespan(activity_journal=ajournal, start=record.value.timestamp, finish=record.value.timestamp)) elif record.name == 'record': if record.value.timestamp > last_timestamp: lat, lon, timestamp = None, None, record.value.timestamp # customizable loader for field, title, units, type in self.record_to_db: value = record.data.get(field, None) if logged < 3: log.debug(f'{title} = {value}') if value is not None: value = value[0][0] if units == Units.KM: # internally everything uses M value /= 1000 loader.add( title, units, None, ajournal, value, timestamp, type, description= f'The value of field {field} in the FIT record.' ) if title == T.LATITUDE: lat = value elif title == T.LONGITUDE: lon = value logged += 1 # values derived from lat/lon if lat is not None and lon is not None: x, y = Point.from_latitude_longitude(lat, lon).meters loader.add(T.SPHERICAL_MERCATOR_X, Units.M, None, ajournal, x, timestamp, StatisticJournalFloat, description='The WGS84 X coordinate') loader.add(T.SPHERICAL_MERCATOR_Y, Units.M, None, ajournal, y, timestamp, StatisticJournalFloat, description='The WGS84 Y coordinate') if self.add_elevation: elevation = self.__oracle.elevation(lat, lon) if elevation: loader.add( T.RAW_ELEVATION, Units.M, None, ajournal, elevation, timestamp, StatisticJournalFloat, description= 'The elevation from SRTM1 at this location' ) else: log.warning( 'Ignoring duplicate record data for %s at %s - some data may be missing' % (file_scan.path, record.value.timestamp)) last_timestamp = record.value.timestamp if not have_timespan: ajournal.finish = record.value.timestamp elif have_timespan and is_event(record, 'stop_all', 'stop'): if timespan: timespan.finish = record.value.timestamp ajournal.finish = record.value.timestamp timespan = None else: log.debug( 'Ignoring stop with no corresponding start (possible lost data?)' ) if timespan: log.warning('Cleaning up dangling timespan') timespan.finish = final_timestamp
def latlon2quadkey(self, lat, lon, zoom): point = Point.from_latitude_longitude(latitude=lat, longitude=lon) # point from lat lon in WGS84 tile = Tile.for_latitude_longitude(point.latitude_longitude[0], point.latitude_longitude[1], zoom) return tile.quad_tree
def test_from_coordinates(latitude_longitude): latitude, longitude = latitude_longitude point = Point.from_latitude_longitude(latitude=latitude, longitude=longitude) assert point.latitude_longitude == latitude_longitude
def test_meters_to_pixels(meter_x, meter_y, expected): point = Point.from_meters(meter_x=meter_x, meter_y=meter_y) assert point.latitude_longitude == pytest.approx(expected, abs=0.1) assert point.meters == pytest.approx((meter_x, meter_y), abs=0.1)
def test_pixels_to_latitude_longitude(pixel_x, pixel_y, zoom, expected): point = Point.from_pixel(pixel_x=pixel_x, pixel_y=pixel_y, zoom=zoom) assert point.latitude_longitude == pytest.approx(expected, abs=0.1) assert point.pixels(zoom=zoom) == (pixel_x, pixel_y)
def test_pixels_to_meters(pixel_x, pixel_y, zoom, expected): point = Point.from_pixel(pixel_x=pixel_x, pixel_y=pixel_y, zoom=zoom) assert point.meters == pytest.approx(expected, abs=0.1) assert point.pixels(zoom=zoom) == (pixel_x, pixel_y)
def test_no_assert_meter_y(meter_y): meter_x = 10.0 _ = Point.from_meters(meter_x=meter_x, meter_y=meter_y) assert "No assertion raise :)"
def test_no_assert_latitude(latitude): longitude = 10.0 _ = Point.from_latitude_longitude(latitude=latitude, longitude=longitude) assert "No assertion raise :)"
def test_no_assert_pixel_y(pixel_y, zoom): pixel_x = 10.0 _ = Point.from_pixel(pixel_x=pixel_x, pixel_y=pixel_y, zoom=zoom) assert "No assertion raise :)"