def get_bike_trip_path(start, end, client): """ Given a bike trip starting point, a bike trip ending point, and a Google Maps client, returns a list of coordinates corresponding with the path that that bike probably took, as reported by Google Maps. Parameters ---------- start: list The starting point coordinates, in [latitude, longitude] (or [y, x]) format. end: list The end point coordinates, in [latitude, longitude] (or [y, x]) format. client: googlemaps.Client A `googlemaps.Client` instance, as returned by e.g. `import_google_credentials()`. Returns ------- The list of [latitude, longitude] coordinates for the given bike trip. """ codec = PolylineCodec() req = client.directions(start, end, mode='bicycling') polylines = [step['polyline']['points'] for step in [leg['steps'] for leg in req[0]['legs']][0]] coords = [] for polyline in polylines: coords += codec.decode(polyline) return coords
def getPolylines(points, mode): baseUrl = "https://maps.googleapis.com/maps/api/directions/json" fullCall = baseUrl + "?origin=%s&destination=%s&waypoints=%s&key=%s&mode=%s" origin = str(points[0][0]) + ',' + str(points[0][1]) destination = str(points[-1][0]) + ',' + str(points[-1][1]) keys = getAPIkeys() global key_state key = keys[key_state] key_state = (key_state + 1) % len(keys) waypoints = '|'.join( map(lambda p: str(p[0]) + ',' + str(p[1]), points[1:-1])) r = requests.get(fullCall % (origin, destination, waypoints, key, mode)) js = r.json() try: legs = js['routes'][0][ 'legs'] #TODO: take the first route for now. Should use driving or walking? polylines = [] for leg in legs: legStepPoints = [ PolylineCodec().decode(s['polyline']['points']) for s in leg['steps'] ] legPolyline = PolylineCodec().encode( [p for step in legStepPoints for p in step]) polylines.append(legPolyline) except Exception as e: print e print js print "request:", origin, destination, waypoints return None return polylines
def CrearPolyline(tres): try: tresCodificada = PolylineCodec().encode(tres) uno = "\\" dos = "\\\\" ff = tresCodificada.replace(uno, dos) #remplaso las barras que me da error al representar la ruta en JS except: ff = "" return ff
def get_json(*paths): all_json = [] for path in paths: with open(path, 'r') as f: data = f.read() j = json.loads(data) pc = PC() for item in j: item['path'] = pc.decode(item['path']) all_json.extend(j) return all_json
def get(self,apnk,LatLongs): # Adding Geolocation gmaps = googlemaps.Client(key=apnk) val = [] cur = mysql.connection.cursor() df = pd.read_sql('select Longitude, Latitude, RadiusInKM, PostcodeNo, AccidentCount,concat(RoadName,\' \', RoadType) as Road from AccidentCluster;', con=mysql.connection) geolocation = GoogleV3(api_key=apnk) RoadInfo = json.loads(LatLongs) directions = gmaps.directions(RoadInfo[0],RoadInfo[1],mode="driving",alternatives=True) polylines = [i["overview_polyline"]["points"] for i in directions] routeLengths = [] routeDurations = [] for i in directions: totalLength = 0 totalTime = 0 for j in i["legs"]: totalLength += j["distance"]["value"] totalTime += j["duration"]["value"] routeLengths.append(totalLength) routeDurations.append(totalTime) for i in range(0,len(polylines)): list1 = PolylineCodec().decode(polylines[i]) list1 = list1[0::10] coordinates = [str(i[0])+", "+ str(i[1]) for i in list1] postcodes = [] roadnames = [] for k in coordinates: # Perform reverse geolocation for each point received in the API call newVal = geolocation.reverse(k) # Splitting the response based on commas roadstuff= newVal[1].address.split(', ') # If we get road names along with post codes follows format [road, suburb, Australia] if len(roadstuff) == 3: # Split road names based on spaces as roads may have format [number, roadname, roadtype] roadie = roadstuff[0].split(' ') if len(roadie) == 1: roadie = roadie[0] elif len(roadie) == 2: if roadie[0].isdigit(): roadie = roadie[1] else: roadie = roadie[0]+' '+roadie[1] else: roadie = roadie[1]+ ' '+roadie[2] areaa = roadstuff[1][-4:] postcodes.append(areaa) roadnames.append(roadie) roadset = set(roadnames) postset = set(postcodes) Roads = list(roadset) Postcodes = list(postset) newdf = df[(df['Road'].isin(Roads)) & (df['PostcodeNo'].isin(Postcodes))] val.append({"RouteNo":i,"polyline": polylines[i], 'routeLengthInMeters':routeLengths[i], 'routeDurationInSeconds':routeDurations[i], 'data':newdf.to_dict(orient='records'), 'totalAccidents': str(newdf['AccidentCount'].sum())}) finalVal = {'routes':val} return jsonify(finalVal)
def get_routes(): # x,y is center of screen args = request.args if len(args) != 3: return json.dumps({'status': 'error', 'status_extended': 'This function takes 3 arguments: lat and lng and r'}) else: if 'lat' not in args.keys() or ('lng' not in args.keys()) or ('r' not in args.keys()): return json.dumps({'status': 'error', 'status_extended': 'This function takes 3 arguments: lat and lng and r'}) else: lat = float(args['lat']) lng = float(args['lng']) r = float(args['r']) # Connect to SQL Server cnx = None try: cnx = connect() except sql.Error as e: return json.dumps({'status': 'error', 'status_extended': 'Couldnt connect to sql database'}) cursor = cnx.cursor(dictionary=True) query = ("SELECT ST_AsText(polyline) as line, reputation, title, description, timestamp " "FROM routes " "WHERE ABS(X(ST_PointN(polyline, ST_NumPoints(polyline)/2)) - %s) <= %s AND ABS(Y(ST_PointN(polyline, ST_NumPoints(polyline)/2)) - %s) <= %s" "ORDER BY timestamp DESC") # Do the query cursor.execute(query, [float(lat), float(r), float(lng), float(r)]) ret_val = cursor.fetchall() # Add distance from query point to return for val in ret_val: val["line"] = PolylineCodec().encode(wkt.loads(val["line"])["coordinates"]) val["timestamp"] = datetime.datetime.strftime(val["timestamp"], '%Y-%m-%d %H:%M:%S') cursor.close() cnx.close() return json.dumps({"status": 'success', 'status_extended': '', 'return': ret_val})
def seg_to_df(segment_explorer, act_type): dflist = [] for seg in segment_explorer: print 'seg id %s, seg name %s, seg dist %s' % (seg.id, seg.name, seg.distance) if act_type=='riding': acttype='ride' else: acttype='run' seg_detail = seg.segment newrow = {'seg_id' : int(seg.id), 'name' : str(seg.name), 'act_type' : str(acttype), 'elev_low' : 0, #float(seg_detail.elevation_low), 'elev_high' : 0, #float(seg_detail.elevation_high), 'start_lat' : float(seg.start_latlng[0]), 'start_long' : float(seg.start_latlng[1]), 'end_lat' : float(seg.end_latlng[0]), 'end_long' : float(seg.end_latlng[1]), 'date_created' : datetime.utcnow(), #seg_detail.created_at.replace(tzinfo=None), 'effort_cnt' : 0, #int(seg_detail.effort_count), 'ath_cnt' : 0, #int(seg_detail.athlete_count), 'cat' : int(seg.climb_category), 'elev_gain' : float(seg.elev_difference), 'distance' : float(seg.distance), 'seg_points' : str(seg.points), 'seg_points_decode' : PolylineCodec().decode(seg.points) } dflist.append(newrow) seg_df = pd.DataFrame(dflist) return seg_df
def get_rebalancing_trip_path_time_estimate_tuple(start, end, client): """ Given a re-balancing trip starting point, a re-balancing trip ending point, and a Google Maps client, returns a list of coordinates corresponding with the path that van probably took, as reported by Google Maps, as well as a time estimate. The need to return a tuple containing not just the path (as in the case of very similar `bike_tripper`) stems from the fact that whereas for bikes we have a precise time in transit, we have no such information for rebalancing van trips, meaning that we have to calculate the time taken and timing of such trips ourselves. Parameters ---------- start: list The starting point coordinates, in [latitude, longitude] (or [y, x]) format. end: list The end point coordinates, in [latitude, longitude] (or [y, x]) format. client: googlemaps.Client A `googlemaps.Client` instance, as returned by e.g. `import_google_credentials()`. Returns ------- The list of [latitude, longitude] coordinates for the given bike trip. """ codec = PolylineCodec() req = client.directions(start, end, mode='driving') # Get the time estimates. # Raw time estimate results are strings of the form "1 min", "5 mins", "1 hour 5 mins", "2 hours 5 mins", etc. time_estimates_raw = [step['duration']['text'] for step in [leg['steps'] for leg in req[0]['legs']][0]] time_estimate_mins = 0 for time_estimate_raw in time_estimates_raw: # Can we really get an hour+ estimate biking within the city? Possibly not but I won't risk it. if "min" in time_estimate_raw and "hour" not in time_estimate_raw: time_estimate_mins += int(time_estimate_raw.split(" ")[0]) elif "hour" in time_estimate_raw: time_estimate_mins += 60 * int(time_estimate_raw.split(" ")[0]) if "min" in time_estimate_raw: time_estimate_mins += int(time_estimate_raw.split(" ")[2]) else: # Uh-oh. pass # Get the polylines. polylines = [step['polyline']['points'] for step in [leg['steps'] for leg in req[0]['legs']][0]] coords = [] for polyline in polylines: coords += codec.decode(polyline) # Return return coords, time_estimate_mins
def get_path(polyline): ''' Obtain path latitudes and longitudes from polyline. ''' coords = PolylineCodec().decode(polyline) path_lats = [coord[0] for coord in coords] path_lons = [coord[1] for coord in coords] return path_lats, path_lons
def calc_distance(encoding): points = PolylineCodec().decode(encoding) dist = 0 for i in range(1,len(points)): dist += haversine(points[i-1][1], points[i-1][0], points[i][1], points[i][0]) return dist
def decode_geometry(geometry): points = PolylineCodec().decode(geometry) lats, lons = zip(*points) def fix(values): # OSRM API uses 6 digits precision, rather than Google/OpenLayers default of 5 # so the codec spits out lat,lon=19.9,-73.2 as (199,-732) return [v / 10. for v in values] return fix(lons), fix(lats)
def __init__(self, startLatitude, startLongitude, endLatitude, endLongitude, pickupStruct, cabRideNumber): self.baseUrl = "https://maps.googleapis.com/maps/api/directions/json?" self.startLatitude = startLatitude self.startLongitude = startLongitude self.endLatitude = endLatitude self.endLongitude = endLongitude self.pickupTime = pickupStruct self.cabRideNumber = cabRideNumber self.apiKey = "AIzaSyCuzBdk6sIIrrJpgQmcJbwtfumumuRLStU" self.polylineCodec = PolylineCodec()
def route(self): viaroute = SERVER_URL + 'viaroute?alt=false&geometry=true' for item in self.ids: try: coords = self.coord.get(item) viaroute += '&loc={},{}'.format(coords[1], coords[0]) except: print('>> note {} not found!'.format(item)) sys.exit() req = js.loads(requests.get(viaroute).text) return list(map( lambda x: [x[1] / 10.0, x[0] / 10.0], PolylineCodec().decode(req['route_geometry']) ))
def gpolyfiles2shp(pattern, ofname=None): files = glob(pattern) gpx = GpxRoute() with ThreadPoolExecutor(max_workers=4) as pool: futures2poly = {pool.submit(get_content, fname): fname for fname in files} for future in as_completed(futures2poly): fname = futures2poly[future] print('Handling %r' % fname) if future.exception() is not None: print('%r generated an exception: %s' % (fname, future.exception())) continue pcodec = PolylineCodec() polyxy = pcodec.decode(future.result()) gpx_route = new_gpx_route(polyxy, name=fname) gpx.routes.append(gpx_route) with open(ofname, 'wb') as ofile: ofile.write(gpx.to_xml()) print(ofname)
def gpoly2shp( ifname, ofname=None, geom_path='routes/route/polyline-definition/polyline', time_path='routes/route/summary/time', length_path='routes/route/summary/length', ): response = get_content(ifname) pcodec = PolylineCodec() polyxy = pcodec.decode(get_item(response, geom_path)) length = int(get_item(response, length_path)) time = int(get_item(response, time_path)) schema = { 'geometry': 'LineString', 'properties': { 'seconds': 'int', 'meters': 'int', 'x_start': 'float', 'y_start': 'float', 'x_end': 'float', 'y_end': 'float', 'cat': 'str', }, } fname, extension = splitext(ifname) ofname = ofname or join(dirname(fname), '{}.shp'.format(basename(fname))) with fiona.open(ofname, mode='w', driver='ESRI Shapefile', schema=schema) as lyr: lyr.write({ 'geometry': mapping(LineString(polyxy)), 'properties': { 'seconds': time, 'meters': length, } }) print(ofname)
def _get_mini_steps(cls, polyline, start_location, end_location, total_distance, total_time): mini_steps = [] if PolylineCodec().decode(polyline): avg_speed = total_distance / total_time curr_loc = start_location for next_loc in PolylineCodec().decode(polyline): distance = vincenty(curr_loc, next_loc).meters mini_steps.append( (next_loc[0], next_loc[1], distance, distance / avg_speed)) curr_loc = next_loc distance = vincenty(curr_loc, end_location).meters mini_steps.append((end_location[0], end_location[1], distance, distance / avg_speed)) else: mini_steps.append( (end_location[0], end_location[1], total_distance, total_time)) return mini_steps
class GooglePaths(): """ This tries to get all the coordinates along the start and end of a path for a user """ def __init__(self, startLatitude, startLongitude, endLatitude, endLongitude, pickupStruct, cabRideNumber): self.baseUrl = "https://maps.googleapis.com/maps/api/directions/json?" self.startLatitude = startLatitude self.startLongitude = startLongitude self.endLatitude = endLatitude self.endLongitude = endLongitude self.pickupTime = pickupStruct self.cabRideNumber = cabRideNumber self.apiKey = "AIzaSyCuzBdk6sIIrrJpgQmcJbwtfumumuRLStU" self.polylineCodec = PolylineCodec() def __makeRequest(self): requestUrl = self.baseUrl + "origin= " + self.startLatitude + "," + self.startLongitude + "&destination= " + self.endLatitude + "," + self.endLongitude + "&key= " + self.apiKey response = requests.request("GET", requestUrl) return response.json() def convertToLines(self, responseJson): polyline = responseJson["routes"][0]["overview_polyline"]["points"] points = self.polylineCodec.decode(polyline) stepsList =[] counter = 0 for lat, long in points: counter += 1 stepsList.append({"time": str(pickupTime + datetime.timedelta(seconds=counter+5)), "cabRideNumber": self.cabRideNumber, 'latitude': lat, 'longitude': long}) stepsListFinal = [stepsList[0]] length = len(stepsList) sample = [] if len(stepsList)-2 > 20: sample = random.sample(stepsList[1: (length-2)], length/6) stepsListFinal = stepsListFinal + sample + [stepsList[length -1]] return stepsListFinal def getPaths(self): self.responseJson = self.__makeRequest() self.lines = self.convertToLines(self.responseJson) return self.lines
def gpoly2gpx(ifname, output_dir=None, json_path='routes/route/polyline-definition/polyline'): response = get_content(ifname) pcodec = PolylineCodec() item = get_item(response, json_path) if item is None: logging.error( 'Bad input from file {ifname}' '`\n`--> response {response}' .format(**locals())) sys.exit(1) polyxy = pcodec.decode(item) gpx = GpxRoute() gpx_route = new_gpx_route(polyxy) gpx.routes.append(gpx_route) fname, extension = splitext(ifname) output_dir = output_dir or dirname(fname) ofname = join(output_dir, '{}.gpx'.format(basename(fname))) with open(ofname, 'wb') as ofile: ofile.write(gpx.to_xml()) print(ofname)
def polyline_to_gpx(polyline = None): if polyline == None: raise Exception("Need a Google Polyline as parameter") waypoints = None try: waypoints = PolylineCodec().decode(polyline) except Exception as e: raise Exception("Error decoding polyline. err: {}".format(e)) gpx = gpxpy.gpx.GPX() gpx.creator = "Ride with gpxpy" for point in waypoints: lat, lon = point gpx.waypoints.append(gpxpy.gpx.GPXWaypoint(lat, lon)) return gpx.to_xml()
def polylineDistance(pl): points = PolylineCodec().decode(pl) points_dist = 0 pointFirst = True lat_prev = points[0][0] lon_prev = points[0][1] for point in points: if pointFirst: lat_prev = point[0] lon_prev = point[1] pointFirst = False else: points_dist += pos2dist(lat_prev, lon_prev, point[0], point[1]) lat_prev = point[0] lon_prev = point[1] return points_dist
def get(self,apnk,LatLongs): # Adding Geolocation gmaps = googlemaps.Client(key=apnk) val = [] geolocation = GoogleV3(api_key=apnk) RoadInfo = json.loads(LatLongs) directions = gmaps.directions(RoadInfo[0],RoadInfo[1],mode="driving",alternatives=True) polylines = [i["overview_polyline"]["points"] for i in directions] routeLengths = [j["distance"]["value"] for i in directions for j in i["legs"]] routeDurations = [j["duration"]["value"] for i in directions for j in i["legs"]] cur = mysql.connection.cursor() AccData = pd.read_sql('select Longitude, Latitude, RadiusInKM, PostcodeNo, AccidentCount,concat(RoadName,\' \', RoadType) as Road from AccidentCluster where AccidentCount > 1 ;',con=mysql.connection) LS = [LineString(PolylineCodec().decode(line)[1::2]) for line in polylines] MLS = MultiLineString(LS) bounds = MLS.bounds newData = AccData.loc[(AccData['Latitude'] >= bounds[0]) & (AccData['Latitude'] <= bounds[2]) & (AccData['Longitude'] >= bounds[1]) & (AccData['Longitude'] <= bounds[3])] def myFun(point,line): np = line.interpolate(line.project(point)) new_point = pnt(longitude=np.y,latitude=np.x) old_point = pnt(longitude=point.y,latitude=point.x) dist = distance(new_point,old_point).km return dist geo = geop.GeoDataFrame(newData,geometry=geop.points_from_xy(newData.Latitude,newData.Longitude)) for i in range(len(LS)): s=str(i) geo[s] = geo.apply(lambda val: myFun(val['geometry'],LS[i]),axis=1) for i in range(7,len(geo.columns)): j = str(i-7) newdf = geo.loc[(geo[j] <= 3.0)] dropList = [str(thing-7) for thing in range(7,len(geo.columns))] newdf = newdf.drop(columns=dropList) newdf = newdf.drop(columns=['geometry']) val.append({"RouteNo":int(j),"polyline": polylines[i-7], 'routeLengthInMeters':routeLengths[i-7], 'routeDurationInSeconds':routeDurations[i-7], 'data':newdf.to_dict(orient='records'), 'totalAccidents': str(newdf['AccidentCount'].sum()), 'bounds':bounds}) finalVal = {'routes':val} return jsonify(finalVal)
def decode_geom(encoded_polyline): """ Function decoding an encoded polyline (with 'encoded polyline algorithm') and returning an ogr.Geometry object Parameters ---------- encoded_polyline : str The encoded string to decode. Returns ------- line : ogr.Geometry The line geometry, as an ogr.Geometry instance. """ ma_ligne = Geometry(2) lineAddPts = ma_ligne.AddPoint_2D for coord in PolylineCodec().decode(encoded_polyline): lineAddPts(coord[1], coord[0]) return ma_ligne
def worker(self, url, semaphore): with (yield from semaphore): response = yield from aiohttp.request( 'GET', url, connector=aiohttp.TCPConnector(share_cookies=True, verify_ssl=False)) try: body = yield from response.json() except: pass else: self.results += 1 total_time_osrm = body['route_summary']['total_time'] total_dist_osrm = body['route_summary']['total_distance'] epa_dec = PolylineCodec().decode(body['route_geometry']) ma_ligne = ogr.Geometry(ogr.wkbLineString) line_add_pts = ma_ligne.AddPoint_2D origin = url[url.find('?loc=') + 5:url.find('&')] dest = url[url.find('&loc=') + 5:url.find('&ins')] for coord in epa_dec: line_add_pts(coord[1] / 10.0, coord[0] / 10.0) feature = ogr.Feature(self.dstlayer.GetLayerDefn()) feature.SetGeometry(ma_ligne) for f_name, f_value in zip( ['ID', 'Total_time', 'Total_dist', 'Src_name', 'Tgt_name'], [ self.results, total_time_osrm, total_dist_osrm, self.dico_loc[origin], self.dico_loc[dest] ]): feature.SetField(f_name, f_value) self.dstlayer.CreateFeature(feature) feature.Destroy()
def _filename(self, name=None, suffix=None, prefix=None): """ File name generator for processed images """ filename = '' if prefix: filename += str(prefix) + '_' if name: filename += str(name) else: filename += str(self.scene) if suffix: filename += '_' + str(suffix) if self.clipped: bounds = [tuple(self.bounds[0:2]), tuple(self.bounds[2:4])] polyline = PolylineCodec().encode(bounds) filename += '_clipped_' + polyline filename += '.TIF' return filename
return [direction_label, direction_code, direction_out + append + street] if __name__ == "__main__": # Opens html for strava cue-sheets txt_test = open('cue-sheet-long.txt').read() txt_test = txt_test.split('<script>')[-1].split('</script>')[0] # Extracts polyline encoded data txt_poly = txt_test.split('polyline')[1:] # Condenses all polyline segments polyline_segments = [item.split('"data":"')[1].split('"},"')[0].replace('\\\\','\\') for item in txt_test.split('polyline')[1:]] # Extracts gps coords from polylines polyine_totals = [PolylineCodec().decode(encoding) for encoding in polyline_segments] polyine_totals = [item for sublist in polyine_totals for item in sublist] # Reencodes for one single polyline through all gps points re_encoded_polyline = PolylineCodec().encode(polyine_totals) txt_directions = txt_test.split('"directions":[') directions_set = [item.split(']')[0] for item in txt_directions[1:]] distance_set_2 = [calc_distance(poly) for poly in polyline_segments] direction_set = [] direction_set_4 = [] # Constructs list of distances between polyline points, and the net distance count = 0 for item in directions_set:
def match(points, steps=False, overview="simplified", geometry="polyline", timestamps=None, radius=None, annotations="false", gaps="split", tidy=False, waypoints=None, url_config=RequestConfig): """ Function wrapping OSRM 'match' function, returning the reponse in JSON Parameters ---------- points : list of tuple/list of point A sequence of points as (x ,y) where x is longitude and y is latitude. steps : bool, optional Default is False. overview : str, optional Query for the geometry overview, either "simplified", "full" or "false" (Default: "simplified") geometry : str, optional Format in which decode the geometry, either "polyline" (ie. not decoded), "geojson", "WKT" or "WKB" (default: "polyline"). timestamps : list of timestamp, optional radius : list of float, optional annotations : bool, optional gaps : str, optional tidy : bool, optional waypoints : list of tuple/list of point, optional url_config : osrm.RequestConfig, optional Parameters regarding the host, version and profile to use Returns ------- dict The response from the osrm instance, parsed as a dict """ host = check_host(url_config.host) url = [ host, '/match/', url_config.version, '/', url_config.profile, '/', ';'.join( [','.join([str(coord[0]), str(coord[1])]) for coord in points]), "?overview={}&steps={}&geometries={}&annotations={}&gaps={}&tidy={}". format(overview, str(steps).lower(), geometry, annotations, gaps, str(tidy).lower()) ] if radius: url.append("&radiuses=") url.append(";".join([str(rad) for rad in radius])) if timestamps: url.append("×tamps=") url.append(";".join([str(timestamp) for timestamp in timestamps])) if waypoints: url.append("&waypoints=") url.append(";".join([str(waypoint) for waypoint in waypoints])) req = Request("".join(url)) if url_config.auth: req.add_header("Authorization", url_config.auth) r = urlopen(req) r_json = json.loads(r.read().decode('utf-8')) if "code" not in r_json or "Ok" not in r_json["code"]: if 'matchings' in r_json.keys(): for i, _ in enumerate(r_json['matchings']): geom_encoded = r_json["matchings"][i]["geometry"] geom_decoded = [[ point[1] / 10.0, point[0] / 10.0 ] for point in PolylineCodec().decode(geom_encoded)] r_json["matchings"][i]["geometry"] = geom_decoded else: print('No matching geometry to decode') return r_json
def setUp(self): self.codec = PolylineCodec()
class PolylineCodecTestCase(unittest.TestCase): def setUp(self): self.codec = PolylineCodec() def test_decode_multiple_points(self): d = self.codec.decode('gu`wFnfys@???nKgE??gE?????oK????fE??fE') self.assertEqual(d, [ (40.641, -8.654), (40.641, -8.654), (40.641, -8.656), (40.642, -8.656), (40.642, -8.655), (40.642, -8.655), (40.642, -8.655), (40.642, -8.653), (40.642, -8.653), (40.642, -8.653), (40.641, -8.653), (40.641, -8.654) ]) def test_decode_multiple_points_precision(self): d = self.codec.decode('o}oolA~ieoO???~{Bo}@??o}@?????_|B????n}@??n}@', 6) self.assertEqual(d, [ (40.641, -8.654), (40.641, -8.654), (40.641, -8.656), (40.642, -8.656), (40.642, -8.655), (40.642, -8.655), (40.642, -8.655), (40.642, -8.653), (40.642, -8.653), (40.642, -8.653), (40.641, -8.653), (40.641, -8.654) ]) def test_decode_official_example(self): d = self.codec.decode('_p~iF~ps|U_ulLnnqC_mqNvxq`@') self.assertEqual(d, [ (38.500, -120.200), (40.700, -120.950), (43.252, -126.453) ]) def test_decode_official_example_precision(self): d = self.codec.decode('_izlhA~rlgdF_{geC~ywl@_kwzCn`{nI', 6) self.assertEqual(d, [ (38.500, -120.200), (40.700, -120.950), (43.252, -126.453) ]) def test_decode_single_point(self): d = self.codec.decode('gu`wFf`ys@') self.assertEqual(d, [ (40.641, -8.653) ]) def test_decode_single_point_precision(self): d = self.codec.decode('o}oolAnkcoO', 6) self.assertEqual(d, [ (40.641, -8.653) ]) def test_encode_multiple_points(self): e = self.codec.encode([ (40.641, -8.654), (40.641, -8.654), (40.641, -8.656), (40.642, -8.656), (40.642, -8.655), (40.642, -8.655), (40.642, -8.655), (40.642, -8.653), (40.642, -8.653), (40.642, -8.653), (40.641, -8.653), (40.641, -8.654) ]) self.assertEqual(e, 'gu`wFnfys@???nKgE??gE?????oK????fE??fE') def test_encode_multiple_points_precision(self): e = self.codec.encode([ (40.641, -8.654), (40.641, -8.654), (40.641, -8.656), (40.642, -8.656), (40.642, -8.655), (40.642, -8.655), (40.642, -8.655), (40.642, -8.653), (40.642, -8.653), (40.642, -8.653), (40.641, -8.653), (40.641, -8.654) ], 6) self.assertEqual(e, 'o}oolA~ieoO???~{Bo}@??o}@?????_|B????n}@??n}@') def test_encode_official_example(self): e = self.codec.encode([ (38.500, -120.200), (40.700, -120.950), (43.252, -126.453) ]) self.assertEqual(e, '_p~iF~ps|U_ulLnnqC_mqNvxq`@') def test_encode_official_example_precision(self): e = self.codec.encode([ (38.500, -120.200), (40.700, -120.950), (43.252, -126.453) ], 6) self.assertEqual(e, '_izlhA~rlgdF_{geC~ywl@_kwzCn`{nI') def test_encode_single_point(self): e = self.codec.encode([ (40.641, -8.653) ]) self.assertEqual(e, 'gu`wFf`ys@') def test_encode_single_point_precision(self): e = self.codec.encode([ (40.641, -8.653) ], 6) self.assertEqual(e, 'o}oolAnkcoO') def test_a_variety_of_precisions(self): """uses a generator to create a variety of lat-lon's across the global and tests a range of precision settings from 4 to 8""" def generator(): while True: coords = [] for i in range(2, randint(4, 10)): lat, lon = uniform(-180.0, 180.0), uniform(-180.0, 180.0) xy = (round(lat, 5), round(lon, 5)) coords.append(xy) yield coords patience = 3 # seconds. waypoints, okays = 0, 0 g = generator() start = time.time() while time.time() < start + patience: precision = randint(4, 8) wp = next(g) waypoints += len(wp) polyline = self.codec.encode(wp, precision) wp2 = self.codec.decode(polyline, precision) if wp == wp2: okays += len(wp2) else: for idx, _ in enumerate(wp): dx, dy = abs(wp[idx][0] - wp2[idx][0]), abs(wp[idx][1] - wp2[idx][1]) if dx > 10 ** -(precision - 1) or dy > 10 ** -(precision - 1): print("idx={}, dx={}, dy={}".format(idx, dx, dy)) else: okays += 1 assert okays == waypoints print("encoded and decoded {0:.2f}% correctly for {1} waypoints @ {2} wp/sec".format( 100 * okays / float(waypoints), waypoints, round(waypoints / patience, 0)))
def get_measurements_along_route(self, user_id): """ Returns a list of measurements along trip based on OTP data. Measurements inlcude location entries and motion entries. Motion entries are included so the pipeline can determine the mode of transportation for each section of the trip. """ measurements = [] otp_json = self.get_json() self._raise_exception_if_no_plan(otp_json) time_stamps_seen = set() #We iterate over the legs and create loation entries for based on the leg geometry. #the leg geometry is just a long list of coordinates along the leg. for i, leg in enumerate(otp_json["plan"]["itineraries"][0]['legs']): #If there are points along this leg if leg['legGeometry']['length'] > 0: #Add a new motion measurement based on the leg mode. This is necessary for the #pipeline to detect the mode of transportation and to differentiate sections. measurements.append(create_motion_entry_from_leg(leg, user_id)) #TODO: maybe we shoudl check if the leg start time is less than the last timestamp to ensure #that we are allways moving forward in time leg_start = otp_time_to_ours(leg['startTime']) leg_end = otp_time_to_ours(leg['endTime']) leg_start_time = leg_start.timestamp + leg_start.microsecond / 1e6 leg_end_time = leg_end.timestamp + leg_end.microsecond / 1e6 coordinates = PolylineCodec().decode( leg['legGeometry']['points']) prev_coord = coordinates[0] velocity = get_average_velocity(leg_start_time, leg_end_time, float(leg['distance'])) altitude = 0 time_at_prev_coord = leg_start_time #print('Speed along leg(m/s)', velocity) for j, curr_coordinate in enumerate(coordinates): if j == 0: curr_timestamp = leg_start_time elif j == len(coordinates) - 1: #We store the last coordinate so we can duplicate it at a later point in time. # This is necessary for the piepline to detect that the trip has ended. # TODO: should we make sure the last timestamp is the same as leg['endTime']? last_coordinate = curr_coordinate curr_timestamp = get_time_at_next_location( curr_coordinate, prev_coord, time_at_prev_coord, velocity) else: #Estimate the time at the current location curr_timestamp = get_time_at_next_location( curr_coordinate, prev_coord, time_at_prev_coord, velocity) #TODO: Check if two time stamps are equal, add a lil extra time to make sure all timestamps are unique #Hack to make the timestamps unique. # Also, we only need to keep track of previous timestamp. while int(curr_timestamp) in time_stamps_seen: #print(curr_timestamp) curr_timestamp += 1 time_stamps_seen.add(int(curr_timestamp)) ##TODO: remove this debug print statement #print(arrow.get(curr_timestamp).format(), curr_coordinate) measurements.append( create_measurement(curr_coordinate, float(curr_timestamp), velocity, altitude, user_id)) prev_coord = curr_coordinate time_at_prev_coord = curr_timestamp # We need to add one more measurement to indicate to the pipeline that the trip has ended. This value is hardcoded # based on the dwell segmentation dist filter time delta threshold. idle_time_stamp = arrow.get(curr_timestamp).shift( seconds=+1000).timestamp #print(arrow.get(idle_time_stamp), last_coordinate) measurements.append( create_measurement(last_coordinate, float(idle_time_stamp), 0, altitude, user_id)) return measurements
steps = [] #a = [2422, 2354] for index, route in enumerate(distinct_routes): steps.append(route[0]) start_time = time.time() print('Creating Polyline for: ', len(steps), 'Unique steps') sys.stdout.write("\r%d%%" % ((0/len(steps)*100))) for index, i in enumerate(steps): if index >= 2000: data = [] x = [distinct_routes[i][2], distinct_routes[i][1]] y = [distinct_routes[i][4], distinct_routes[i][3]] polyline = googleRoutes.getGooglePolyline(x,y) decoded_polyline = PolylineCodec().decode(polyline) for sequence_number, line in enumerate(decoded_polyline): data.append([distinct_routes[i][0], sequence_number, Point(line[1], line[0]).wkt]) data[0] = [distinct_routes[i][0], 0, Point(x[1], x[0]).wkt] data[len(data) - 1] = [distinct_routes[i][0], len(data) - 1, Point(y[1], y[0]).wkt] dbUtil.storeUniqueStepsPolly(cur, conn, data) sys.stdout.write("\r%d%%" % ((index/len(steps)*100))) sys.stdout.flush()
def result(host, req): # host = takeInput() now = datetime.now() sp1 = gmaps.directions(host[0], host[1], mode="driving", departure_time=now) sp1src = (sp1[0]['legs'][0]['start_location']['lat'], sp1[0]['legs'][0]['start_location']['lng']) sp1end = (sp1[0]['legs'][0]['end_location']['lat'], sp1[0]['legs'][0]['end_location']['lng']) poly1 = PolylineCodec().decode(sp1[0]['overview_polyline']['points']) # for i in range(len(poly1)-1): # print("{" + 'lat: {d[0]}, lng: {d[1]}'.format(d=poly1[i]) + "},") # num = input("num of request: ") # for i in range(int(num)): # print("I: ", i) # req = takeInput() now = datetime.now() sp2 = gmaps.directions(req[0], req[1], mode="driving", departure_time=now) sp2src = (sp2[0]['legs'][0]['start_location']['lat'], sp2[0]['legs'][0]['start_location']['lng']) sp2end = (sp2[0]['legs'][0]['end_location']['lat'], sp2[0]['legs'][0]['end_location']['lng']) poly2 = PolylineCodec().decode(sp2[0]['overview_polyline']['points']) # for i in range(len(poly2)-1): # print("{" + 'lat: {d[0]}, lng: {d[1]}'.format(d=poly2[i]) + "},") intersectedNodes = matchPolyLine(poly1, poly2) val = dis_desFromPath(poly1, (sp2[0]['legs'][0]['end_location']['lat'], sp2[0]['legs'][0]['end_location']['lng'])) srcdes = req[0].split(host[1]) d1 = math.atan2(sp2end[1] - sp1end[1], sp2end[0] - sp1end[0]) d2 = math.atan2(sp2end[1] - sp2src[1], sp2end[0] - sp2src[0]) d = False # else: # if(poly2[0][0] >= poly2[1][0]): # print(poly2[0][0], poly2[1][0]) # if(poly1[0][0] >= poly1[1][0]): # print("Increasing") # print(poly1[0][0], poly1[1][0]) # d = True # else: # d = False # elif(poly2[0][0] < poly2[1][0]): # print(poly2[0][0], poly2[1][0]) # if(poly1[0][0] < poly1[1][0]): # d = True # else: # d = False # else: # print("eq") # d = False # print(d) # print(d1) # print(d1) # print(d2) # if(d1 < 0 and d2 < 0): # d = True # elif (d1 >= 0 and d2 >= 0): # d = True # else: # d = False # print(srcdes) # print("Val : ",val) # print("int :",intersectedNodes) # print(val) # print(len(intersectedNodes)) # print(getDistance(sp1src, sp2src)) # print(getDistance(sp1src, sp2end)) if (getDistance(sp1src, sp2src) > getDistance(sp1src, sp2end)): # print("NO") return "NO" else: if (val <= 5000 and len(intersectedNodes) >= 10): # print("Yes") return "YES" else: # print("min dis from path:", val) # print("interesected nodes :", intersectedNodes) # print("NO") return "NO"
def encode_polyline(features, zoom_level=18): """Encode and iterable of features as a polyline """ points = list(read_points(features)) codec = PolylineCodec() return codec.encode(points)
sql = "UPDATE otpfast SET duration=%d, tooslow=%s WHERE route_id=%d;" \ "UPDATE routes SET useful=%s WHERE id=%d" % (pduration, pslow, id, not(pslow), id) con.exe(sql) fslow = pslow isfast = not (fslow) and pslow sql = "UPDATE routes SET isfast=%s WHERE id=%d" % (isfast, id) con.exe(sql) if not (fslow): legs = preflegs if pslow: legs = fastlegs i = 0 for leg in legs: geom = leg['legGeometry']['points'] if len(geom) > 0: pl = PolylineCodec().decode(geom) linestring = 'LINESTRING(' for lat, lon in pl: linestring = linestring + '%f %f,' % (lon, lat) linestring = linestring[:-1] + ')' sql = "INSERT INTO otplegs (route_id, legnum, points, mode, distance, duration) VALUES (%d, %d, ST_GeomFromText('%s', 4326), '%s', %f, %d)" \ % (id, i, linestring, leg['mode'], leg['distance'], leg['duration'] ) con.exe(sql) print 'Fouten: ', errors con.commit() con.sluit() def createEdges(): dbtables.OTPEDGES.createTable() con = dbconnect.Verbinding()
gmaps = googlemaps.Client(key='') dirs = [] for i in cbike.index: xa = cbike['start station longitude'][i] ya = cbike['start station latitude'][i] xb = cbike['end station longitude'][i] yb = cbike['end station latitude'][i] directions = gmaps.directions((ya,xa), (yb,xb), mode='bicycling') #print directions codec = PolylineCodec() path = [] for s in directions[0]['legs'][0]['steps']: path += codec.decode(s['polyline']['points']) # swap lat and lon pp = zip(*path) path = zip(pp[1], pp[0]) if len(path) > 1: lines = geometry.LineString(path) kml = simplekml.Kml() ls = kml.newlinestring(name='sample') ls.coords = lines.coords kml.save("data/paths/path500_" + str(i) + ".kml")
# # print(val) # # print(len(intersectedNodes)) # # print(getDistance(sp1src, sp2src)) # # print(getDistance(sp1src, sp2end)) # if(getDistance(sp1src, sp2src) > getDistance(sp1src, sp2end)): # print("NO") # else: # if(val <= 5000 and len(intersectedNodes) >= 10): # print("Yes") # else: # print("min dis from path:", val) # print("interesected nodes :", intersectedNodes) # print("NO") data1 = PolylineCodec().decode( "eqepCynrfPiHdCeCt@_DbACWpJsC~@[|XoJjH_CfCy@fCq@hD{@pIiBbEs@vIcB|IqBb@uA\\q@LOj@SrBYh@A`CNt@@j@KhUmEfAUh@Sb@Ir@IzEYvF_@rGg@|@O\\OXSTWPa@PqAjBqTHqA?YI_@QUYKiAGcDGsDI]?uB\\W@KAg@e@Q[Qg@qD_@eAMgBUeCWkCYuASuB[aAGeACsBJuEZg@?OCEGa@}FSuCYwC]wAoAsCe@i@iA_CmAkC_AqBeBqESu@Cw@?c@" ) # print(data1,end='\n\n') # print(json.dumps(directions_result1, indent=4)) data2 = PolylineCodec().decode( "{_`pCqotfPc@NGUAGBEf@QBMOw@WH_Bf@KAGEKOU?KEEOQyAGcCBc@E_@KOQQGq@G{ACYcBHgBJiE@gGO}EQkBOkGq@gBUkBSYCkCYkEo@aAGeACiIf@s@AGEAIq@mJSeCUyAQo@oAsCe@i@iA_CmAkC_AqBeBqESu@AUAeA" ) data3 = PolylineCodec().decode( directions_result2[0]['overview_polyline']['points']) data4 = PolylineCodec().decode( directions_result3[0]['overview_polyline']['points'])
def build_coords(json) -> list: # Builds coords from the polylines result = [] for i in json['routes'][0]['legs'][0]['steps']: result.extend(PolylineCodec().decode(i['polyline']['points'])) return result
class PolylineCodecTestCase(unittest.TestCase): def setUp(self): self.codec = PolylineCodec() def test_decode_multiple_points(self): d = self.codec.decode("gu`wFnfys@???nKgE??gE?????oK????fE??fE") self.assertEqual( d, [ (40.641, -8.654), (40.641, -8.654), (40.641, -8.656), (40.642, -8.656), (40.642, -8.655), (40.642, -8.655), (40.642, -8.655), (40.642, -8.653), (40.642, -8.653), (40.642, -8.653), (40.641, -8.653), (40.641, -8.654), ], ) def test_decode_multiple_points_precision(self): d = self.codec.decode("o}oolA~ieoO???~{Bo}@??o}@?????_|B????n}@??n}@", 6) self.assertEqual( d, [ (40.641, -8.654), (40.641, -8.654), (40.641, -8.656), (40.642, -8.656), (40.642, -8.655), (40.642, -8.655), (40.642, -8.655), (40.642, -8.653), (40.642, -8.653), (40.642, -8.653), (40.641, -8.653), (40.641, -8.654), ], ) def test_decode_official_example(self): d = self.codec.decode("_p~iF~ps|U_ulLnnqC_mqNvxq`@") self.assertEqual(d, [(38.500, -120.200), (40.700, -120.950), (43.252, -126.453)]) def test_decode_official_example_precision(self): d = self.codec.decode("_izlhA~rlgdF_{geC~ywl@_kwzCn`{nI", 6) self.assertEqual(d, [(38.500, -120.200), (40.700, -120.950), (43.252, -126.453)]) def test_decode_single_point(self): d = self.codec.decode("gu`wFf`ys@") self.assertEqual(d, [(40.641, -8.653)]) def test_decode_single_point_precision(self): d = self.codec.decode("o}oolAnkcoO", 6) self.assertEqual(d, [(40.641, -8.653)]) def test_encode_multiple_points(self): e = self.codec.encode( [ (40.641, -8.654), (40.641, -8.654), (40.641, -8.656), (40.642, -8.656), (40.642, -8.655), (40.642, -8.655), (40.642, -8.655), (40.642, -8.653), (40.642, -8.653), (40.642, -8.653), (40.641, -8.653), (40.641, -8.654), ] ) self.assertEqual(e, "gu`wFnfys@???nKgE??gE?????oK????fE??fE") def test_encode_multiple_points_precision(self): e = self.codec.encode( [ (40.641, -8.654), (40.641, -8.654), (40.641, -8.656), (40.642, -8.656), (40.642, -8.655), (40.642, -8.655), (40.642, -8.655), (40.642, -8.653), (40.642, -8.653), (40.642, -8.653), (40.641, -8.653), (40.641, -8.654), ], 6, ) self.assertEqual(e, "o}oolA~ieoO???~{Bo}@??o}@?????_|B????n}@??n}@") def test_encode_official_example(self): e = self.codec.encode([(38.500, -120.200), (40.700, -120.950), (43.252, -126.453)]) self.assertEqual(e, "_p~iF~ps|U_ulLnnqC_mqNvxq`@") def test_encode_official_example_precision(self): e = self.codec.encode([(38.500, -120.200), (40.700, -120.950), (43.252, -126.453)], 6) self.assertEqual(e, "_izlhA~rlgdF_{geC~ywl@_kwzCn`{nI") def test_encode_single_point(self): e = self.codec.encode([(40.641, -8.653)]) self.assertEqual(e, "gu`wFf`ys@") def test_encode_single_point_precision(self): e = self.codec.encode([(40.641, -8.653)], 6) self.assertEqual(e, "o}oolAnkcoO")
def match(points, steps=False, overview="simplified", geometry="polyline", timestamps=None, radius=None, url_config=RequestConfig): """ Function wrapping OSRM 'match' function, returning the reponse in JSON Parameters ---------- points : list of tuple/list of point A sequence of points as (x ,y) where x is longitude and y is latitude. steps : bool, optional Default is False. overview : str, optional Query for the geometry overview, either "simplified", "full" or "false" (Default: "simplified") geometry : str, optional Format in which decode the geometry, either "polyline" (ie. not decoded), "geojson", "WKT" or "WKB" (default: "polyline"). timestamps : bool, optional radius : bool, optional url_config : osrm.RequestConfig, optional Parameters regarding the host, version and profile to use Returns ------- dict The response from the osrm instance, parsed as a dict """ host = check_host(url_config.host) url = [ host, '/match/', url_config.version, '/', url_config.profile, '/', ';'.join( [','.join([str(coord[0]), str(coord[1])]) for coord in points]), "?overview={}&steps={}&geometries={}".format(overview, str(steps).lower(), geometry) ] if radius is not None: url.append(";".join([str(rad) for rad in radius])) if timestamps is not None: url.append("×tamps=") url.append(";".join([str(timestamp) for timestamp in timestamps])) # hacky. r_json = None #rep = None try: rep = urllib.request.urlopen(''.join(url)) #rep = urllib.request.urlopen("".join(url)) #r_json = json.loads(rep.read().decode('utf-8')) except urllib.error.HTTPError as e: #r_json = json.loads(rep.read().decode('utf-8')) #http_error_code = e.reason r_json = e.reason #return r_json #if len(rep)==0: #rep = urllib.request.urlopen("".join(url)) #r_json = json.loads(rep.read().decode('utf-8')) #r = urlopen("".join(url)) #r_json = json.loads(r.read().decode('utf-8')) #if "code" not in r_json or "Ok" not in r_json["code"]: #try: r_json["code"] #except: if r_json is None: #if r_json is None: rep = urllib.request.urlopen(''.join(url)) r_json = json.loads(rep.read().decode('utf-8')) if "ok" in r_json["code"]: if 'matchings' in r_json.keys(): for i, _ in enumerate(r_json['matchings']): geom_encoded = r_json["matchings"][i]["geometry"] geom_decoded = [[ point[1] / 10.0, point[0] / 10.0 ] for point in PolylineCodec().decode(geom_encoded)] r_json["matchings"][i]["geometry"] = geom_decoded return r_json
polyline = polylines[0] entry = "%d - %s\n" % (t.tripId, polyline) outputFile.write(entry) def get_paths(): paths = dict() with open('trips_route.txt', 'rb') as inputFile: for line in inputFile.readlines(): (tripId, path) = (d.strip() for d in line.split('-')) paths[tripId] = path return paths trips = [t for t in get_trips()] allStations = get_stations() paths = get_paths() for t in trips: if t.tripId in paths: t.path = PolylineCodec().decode(paths[t.tripId]) START_TIME = '07:00:00' # only animate trips from 7 a.m with open('../trips.json', 'w') as output: json.dump([ t.__dict__ for t in trips if t.path and t.sTime.split("T")[1] > START_TIME ], output) with open('../stations.json', 'w') as output: json.dump([s.__dict__ for s in allStations.values()], output)
def query_osrm_to_shp(dict_coord, coord_liste_s, coord_liste_t, dstpath, host): """ Fonction qui prend en entrée un dictionnaire de {'coordonnées':'noms'} et les listes de coordonées, envoie les requetes au serveur OSRM et enregistre le résultat dans le fichier de sortie indiqué (.shp). """ testit, error = 0, 0 # Syst de coord. a adopter pour écrire le fichier shp spatialreference = osr.SpatialReference() spatialreference.SetWellKnownGeogCS('WGS84') # Syst de coord. pour le calcul de la distance à vol d'oiseau geod = pyproj.Geod(ellps='WGS84') # Définition du type du fichier de sortie.. driver = ogr.GetDriverByName("ESRI Shapefile") try: if os.path.exists(dstpath): removefile(dstpath) dstfile = driver.CreateDataSource(dstpath) dstlayer = dstfile.CreateLayer("layer", spatialreference) except Exception as err: print(err, "\nErreur lors de la création du fichier") sys.exit(0) # Ajout des champs à remplir et de leurs variables associées dans un dico # qui va permettre de faire une boucle sur ces éléments et éviter # de retaper ça lors de la création des champs : fields = [['ID', { 'type': ogr.OFTInteger, 'width': 10 }], ['Total_time', { 'type': ogr.OFTInteger, 'width': 14 }], ['Total_dist', { 'type': ogr.OFTInteger, 'width': 14 }], ['Dist_eucl', { 'type': ogr.OFTInteger, 'width': 14 }], ['Src_name', { 'type': ogr.OFTString, 'width': 80 }], ['Tgt_name', { 'type': ogr.OFTString, 'width': 80 }]] for field_name, detail in fields: fielddef = ogr.FieldDefn(field_name, detail['type']) fielddef.SetWidth(detail['width']) dstlayer.CreateField(fielddef) print("pyq-OSRM : {0} routes to calculate".format( len(coord_liste_s) * len(coord_liste_t))) for source, target in range2d(coord_liste_s, coord_liste_t): src_name, tgt_name = dict_coord[source], dict_coord[target] # Préparation et envoi de la requete puis récupération de la réponse url_query = ('{0}/viaroute?loc={1}&loc={2}' '&instructions=false&alt=false').format( host, source, target) try: response = urllib.request.urlopen(url_query) except Exception as err: print("\npyq-OSRM :\nErreur lors du passage de l'URL\n", err) sys.exit(0) # Lecture des résultats (bytes) en json parsed_json = json.loads(response.readall().decode('utf-8')) # Calcul de la distance euclidienne entre l'origine et la destination _, _, distance_eucl = geod.inv(source[source.find(',') + 1:], source[:source.find(',')], target[target.find(',') + 1:], target[:target.find(',')]) # Verification qu'une route a bien été trouvée par OSRM (si aucune # route n'a été trouvé une exception doit être levée quand on essai # de récupérer le temps total et le code erreur est lu dans le except): try: # Récupération des infos intéressantes... total_time_osrm = parsed_json['route_summary']['total_time'] total_dist_osrm = parsed_json['route_summary']['total_distance'] # ...dont la géométrie est au format encoded polyline algorythm, # à décoder pour obtenir la liste des points composant la ligne # La géométrie arrive sous forme de liste de points (lat, lng) epa_dec = PolylineCodec().decode(parsed_json['route_geometry']) ma_ligne = ogr.Geometry(ogr.wkbLineString) line_add_pts = ma_ligne.AddPoint_2D for coord in epa_dec: line_add_pts(coord[1] / 10.0, coord[0] / 10.0) # Ecriture de la geométrie et des champs feature = ogr.Feature(dstlayer.GetLayerDefn()) feature.SetGeometry(ma_ligne) for f_name, f_value in zip([ 'ID', 'Total_time', 'Total_dist', 'Dist_eucl', 'Src_name', 'Tgt_name' ], [ testit, total_time_osrm, total_dist_osrm, distance_eucl, src_name, tgt_name ]): feature.SetField(f_name, f_value) dstlayer.CreateFeature(feature) # print("Processing.... {0}%".format(int( # testit / (len(coord_liste_s) * len(coord_liste_t)) * 100)), # end='\r') testit += 1 except KeyError: error += 1 if parsed_json['status'] == 207: print("Err #{0} : OSRM status 207 - " "No route found between {1} and {2}".format( error, src_name, tgt_name)) else: print("Err #{0} : No route found between {1} and {2}".format( error, src_name, tgt_name)) if error > 0: print("\t{0} route calculations failed".format(error)) feature.Destroy() dstfile.Destroy() return testit