def test_sending_polyline(self): osrm.RequestConfig.host = "router.project-osrm.org" result1 = osrm.simple_route((41.5332, 21.9598), (41.9725, 21.3114), output="routes", geometry="wkt", send_as_polyline=False) result2 = osrm.simple_route((41.5332, 21.9598), (41.9725, 21.3114), output="routes", geometry="wkt", send_as_polyline=True) self.assertEqual(result1, result2)
def get_osrm_distance_and_duration(longlat_origin, longlat_destination, osrm_host="http://192.168.56.101:5000"): """ Calculate distance over the road and normal travel duration from one point to the other. Parameters ---------- longlat_origin: tuple(float, float) coordinates of the start location in decimal longitude and latitude (in that order). longlat_destination: tuple(float, float) coordinates of the destination in decimal longitude and latitude. osrm_host: str The URL to the OSRM API. Returns ------- Tuple of ('distance', 'duration') according to OSRM. Notes ----- Requires OSRM to be installed (an optional dependency of fdsim). """ try: import osrm except ImportError: raise ImportError("Please install the OSRM Python package to calculate travel" "distances and durations.") osrm.RequestConfig.host = osrm_host result = osrm.simple_route(longlat_origin, longlat_destination, output="route", geometry="wkt")[0] return result["distance"], result["duration"]
def compute_geometry(self): from_n = osrm.Point(latitude=self.from_node.lat, longitude=self.from_node.lon) to_n = osrm.Point(latitude=self.to_node.lat, longitude=self.to_node.lon) result = osrm.simple_route( from_n, to_n) #, output='route', overview="full", geometry='wkt') # https://github.com/ustroetz/python-osrm self.geometry = result['routes'][0]['geometry']
def test_simple_route(self, mock_urlopen): mock_urlopen.return_value = MockReadable( u'''{"code":"Ok","routes":[{"legs":[{"steps":[],"summary":"","duration":14821.6,"distance":256884.8}],"geometry":"a|wdCobf{F}|@h]}pC`dAmpAbh@qTvTob@d_@sJfEcQvBeJtJcGjNiM~Hqe@~Mcn@`P{i@vFgPn@}HtBeGz@{Md@gGxAoGhCwEh@y_@lBmM|@ac@dLsc@nJmh@|Jot@zEeUXoWtEsRpBmRd@o`@`Eab@fGka@nDYB|a@va@gDAyEmCoFqC}K_FkHqCy_@iNsLmEiFuB}EuBgHgDwC_BwCaBwFmD_@UqMcJkJ}H_JmImI_JiJyLwIgMaTm\\\\waBqiCqaAqxAwfDq`FszFeuH_tCyaEuVs[alAgtAoTcV`AcA|FsGl@k@dK{KxE_FzGoHb^k^lFgGxMkQxGwKjpAksBlGoKnNyW~Xyj@fh@gdAzRka@hCsGlJwVxGuVlL{g@|Jae@pS{~@jCqJfCuGnFwLxDuGtDaGvCwDtDyEfEcElzA{sAxKmKtH_IvFcInHuM`EgKjDuLxB_LxAgNx@uPA{U@sGJgGFiAAcDdA}KtBqLbGcS`m@ydBzTmk@hz@uqBnOm]~LeVlM_TfXcc@ncAe`BlKsQ|GuNnSyn@`]_gAfUis@tI}SjLgTzv@ccAv@W`LiNbh@ip@nOcSlK{Plr@}zA|Tmf@lTye@nUqf@jz@u{AfTk`@rGqObH}TdYyeAzRiu@vE_QbHsSfF_M`f@yiAvoAexCrTog@hOy_@rFgR`GwW?aBdp@{xCvlAsvDnJi\\\\|Fo\\\\xDs`@xRqiClBaW~Fio@Fg@bCaRpUmpAbDcR`EoUPuAdAyHtB}P\\\\qEf@_DZqDvCv@`rAl^fPnEbT`Hh@`@BBBBB?DBF?D?FCZDxtAbf@`GnBzt@lUbZjJvWfIvAd@~d@`ObEjA|JlEfAd@h~Ap{@dUrLjTzJb\\\\bIdNnBxTtA|\\\\Zds@mCbZj@j[bEvTtHn`@hMx}@nUjk@lPd^zNrcCleAl`@jOhYrFxpC`WbWnEhNfDfM~HjaAnn@nPdJvRdG|bDnx@nUtMxd@jYnw@ng@pr@j\\\\dRhKzNpIrKnL~Zx_@bW|\\\\fMpI|NjHnP`FhuC~j@~O`F~IhKpEpIr`@x|@vNb\\\\NXjLfU^r@`KxN`Q`MtQdJvvAxg@hC~@FBVH~DnArj@dQ~`@vJr_Bd[vc@tJle@`PfXfNfwBbqAhY|Rn`@|\\\\lZzUt\\\\bTnf@zU`VjRdL~OxMlYpUl`@vR`Wd]dQ~iCzz@hShKbb@`^fc@hUt\\\\pSbyA`cA`KlEvRlE|HiAzHaFjrBclB`PcJbQuFfRyDhS}ApPb@zMbCdR`FfRbJjSjRdLvQpEnLjHbTrKpd@pEhKlI~HvLdGtKpBlnHra@rP`CxMvCbiBjj@nOvJrPfN`K|R~IpSnDdQdA`WdQhjFjClOrEnLzHtJnOjHtl@hNjNb@nOgD~Id@bK|Hjn@`XjIrEdFlBpEnBbD|AvDnB`F|CnCfBbC`BfDnCfBvAxCjCfFzD|EhDfEjCxGrDrF|CfL|FhSnJfDjBvBf@~A^n@Vp@ThBd@x@\\\\`@Pj@Vt@`@X\\\\JNh@b@\\\\VxBhBx@l@d@Zp@f@|@jAxAnBbArAx@xAHRr@zAXp@CTBRLLNHR?BABCb@FhAPrE|@dJbB~FdBjBj@dGhBnA^rA`@pA`@h@JTD\\\\FxBj@fF|Aj@PzAb@rNhEPDxEvAlF~AVHrDdA~FfBt@XL]lq@afBdBaGb@qBtAuGFc@DKBMIY}BeBaXqZoB_FYyE@kHi@kE_Jg_@EkG?aKvAcRbAgFbDkHfCcIrQsg@dBwHvCmZjB}JpAqGNqF]oLc@}C_BkC{BwAwDgAwCqC{@}CaCoVUaYo@yFyCqPoBoDiHeIcBcDyCwM}CkOmBgGoCqG}EkJoX_h@_Qy[yAaHsDgWe@}CoAgDgDqEkb@}b@oB}CkGePgV}f@{CiEkF{E{XiPgHkFcBuCs@qCKyEjF{w@VyE?sIk@iG_CwV{Kmy@g@mEKsGvAktAbBc~Ar@u~@jAwfACwF_@cHg@wGsBqIcEsRsBuGoBiEeCoD}[o[uEiFiCgEwBgFo@{CsAcGoByJwFu[w@eDcB_HiDeMaBaHaCqNyAeJkAmM@kIVuUb@_[b@oSLaWr@_[d@oJrAaFI}X^kMTqLo@yHqCaMmAeG{@qJDcIZqGv@qKfE_p@V_DQ{BCcAiGi[g@gCc@oEMeEOiGiAsg@]{PeAek@","duration":14821.6,"distance":256884.8}],"waypoints":[{"hint":"YtOXh5LYdIgAAAAAAAAAAH0FAAAAAAAA-1IAAD2o_wUlqP8FbrcAAC6OdgIrck4BEL95AngUTwEAAAEBfDhq3w==","name":"","location":[41.324078,21.918251]},{"hint":"apH5jEvnv40AAAAAYAEAAAAAAAC_LQAA42gAAHZ5UQZhQk4IbrcAAFxqgAK9REQBFHOAAqgvRQFOAAEBfDhq3w==","name":"","location":[41.970268,21.251261]}]}''' ) result = osrm.simple_route((41.5332, 21.9598), (41.9725, 21.3114), output="routes", geometry="wkt") # Only the "routes" part from the response should have be returned : self.assertIsInstance(result, list) # ... with geometry field transformed to WKT : self.assertIn("LINESTRING", result[0]["geometry"])
def assign_distance_between_neighbors(node, neighbor_name_coords): # get node coords node_point = osrm.Point(latitude=node.lat, longitude=node.lon) # get neighbor coords neighbor_name, neighbor_lon, neighbor_lat = neighbor_name_coords neighbor_point = osrm.Point(latitude=neighbor_lat, longitude=neighbor_lon) # find distance between the two full_route = osrm.simple_route(node_point, neighbor_point) dist = full_route['routes'][0]['distance'] # populate node with the neighbor and the distance between the two. node.set_new_neighbor_dist(neighbor_name, dist) # populate node with the neighbor and the duration between the two. dur = full_route['routes'][0]['duration'] node.set_new_neighbor_dur(neighbor_name, dur)
def test_non_existing_host(self): Profile = osrm.RequestConfig("localhost/v1/flying") self.assertEqual(Profile.host, "localhost") with self.assertRaises(URLError): osrm.nearest((12.36, 45.36), url_config=Profile) with self.assertRaises(URLError): osrm.trip([(13.38886, 52.51703), (10.00, 53.55), (52.374444, 9.738611)], url_config=Profile) with self.assertRaises(URLError): osrm.simple_route((13.38886, 52.51703), (10.00, 53.55), url_config=Profile) with self.assertRaises(URLError): osrm.AccessIsochrone((13.38886, 52.51703), points_grid=100, url_config=Profile) with self.assertRaises(URLError): osrm.match([(10.00, 53.55), (52.374444, 9.738611)], url_config=Profile) with self.assertRaises(URLError): osrm.table([(10.00, 53.55), (52.374444, 9.738611)], [(10.00, 53.55), (52.374444, 9.738611)], url_config=Profile)
def test_non_existing_host(self): Profile = osrm.RequestConfig("localhost/v1/flying") self.assertEqual(Profile.host, "localhost") with self.assertRaises(URLError): osrm.nearest((12.36, 45.36), url_config=Profile) with self.assertRaises(URLError): osrm.trip( [(13.38886, 52.51703), (10.00, 53.55), (52.374444, 9.738611)], url_config=Profile) with self.assertRaises(URLError): osrm.simple_route( (13.38886, 52.51703), (10.00, 53.55), url_config=Profile) with self.assertRaises(URLError): osrm.AccessIsochrone( (13.38886, 52.51703), points_grid=100, url_config=Profile) with self.assertRaises(URLError): osrm.match( [(10.00, 53.55), (52.374444, 9.738611)], url_config=Profile) with self.assertRaises(URLError): osrm.table( [(10.00, 53.55), (52.374444, 9.738611)], [(10.00, 53.55), (52.374444, 9.738611)], url_config=Profile)
def RouteAndInterp(matchedf, T, min_dist): # inputs: matched_data_frame for one Taxi_ID, time T, minimum accepted distance # ouput: best estimate of taxi location at time: T matchedf['ts_dff'] = matchedf['mts'] - T adf = matchedf[matchedf['ts_dff'] > 0].min() bdf = matchedf[matchedf['ts_dff'] < 0].max() d = haversine_pc(adf.mpos[0], adf.mpos[1], bdf.mpos[0], bdf.mpos[1]) #if adf.mpos == bdf.mpos: if d <= min_dist: taxi_pos_estimate = adf.mpos else: osrm_route_result = osrm.simple_route([bdf.mpos[0], bdf.mpos[1]], [adf.mpos[0], adf.mpos[1]], output='full', overview="full", geometry='polyline', steps='True', annotations='true') link_data, route_nodes = ProcessRouteResults(osrm_route_result, bdf.mts, adf.mts) T_index = max(link_data[link_data['dur_cumsum'] <= T].index.tolist()) x1 = route_nodes['longitude'][T_index] y1 = route_nodes['latitude'][T_index] if T_index == 0: t1 = link_data['dur_cumsum'][0] - link_data.duration[T_index] else: t1 = link_data['dur_cumsum'][T_index - 1] x2 = route_nodes['longitude'][T_index + 1] y2 = route_nodes['latitude'][T_index + 1] t2 = link_data['dur_cumsum'][T_index] T_longitude, T_latitude = Straight_Line_Interp(x1, y1, t1, x2, y2, t2, T) taxi_pos_estimate = tuple([T_longitude, T_latitude]) return taxi_pos_estimate
def __init__(self, **kwargs): super(Routing_machine, self).__init__(**kwargs) # Configuration self.host = kwargs.get('host', 'router.project-osrm.org') self.profile = kwargs.get('profile', 'driving') # Parameters self.latitude1 = kwargs.get('latitude1', None) self.longitude1 = kwargs.get('longitude1', None) self.latitude2 = kwargs.get('latitude2', None) self.longitude2 = kwargs.get('longitude2', None) self.alternatives = kwargs.get('alternatives', False) self.distance = kwargs.get('distance', False) self.duration = kwargs.get('duration', False) self.summary = kwargs.get('summary', False) self.route = kwargs.get('route', False) self.raw = kwargs.get('raw', False) if self._is_parameters_ok(): osrm.RequestConfig.host = self.host osrm.RequestConfig.profile = self.profile start = osrm.Point(latitude=float(self.latitude1), longitude=float(self.longitude1)) end = osrm.Point(latitude=float(self.latitude2), longitude=float(self.longitude2)) try: self.api_result = osrm.simple_route( start, end, steps=True, alternatives=self.alternatives) logging.debug("[OSRM] get API result") message = { "returncode": "OK", "start": str(start), "end": str(end), "profile": self.profile } if self.raw: message['raw'] = self.api_result logging.debug("[OSRM] add raw JSON to message") # create items (result, route) iterator and append mappers items_iterator = map( lambda route: { 'result': {}, 'route': route }, self.api_result['routes']) if self.distance: logging.debug("[OSRM] add distance to execution scheme") items_iterator = map(self.distance_mapper, items_iterator) if self.duration: logging.debug("[OSRM] add duration to execution scheme") items_iterator = map(self.duration_mapper, items_iterator) if self.summary: logging.debug("[OSRM] add summary to execution scheme") items_iterator = map(self.route_summary_mapper, items_iterator) if self.route: logging.debug("[OSRM] add route to execution scheme") items_iterator = map(self.route_mapper, items_iterator) # extract results from items items_iterator = map(lambda item: item['result'], items_iterator) message['routes'] = list(items_iterator) except HTTPError as e: message = {"returncode": str(e.code)} logging.debug("[OSRM] neuron return dict %s" % message) self.say(message)
j = 0 result_routes_list = {} routes_list = {} to_station_list = {} station_list = {} for row in new_stations: for row in new_stations: if j > i: p1 = Point(latitude=float(new_stations[i][2]), longitude=float(new_stations[i][3])) p2 = Point(latitude=float(new_stations[j][2]), longitude=float(new_stations[j][3])) while True: try: result = osrm.simple_route(p1, p2, geometry='geojson') break except: time.sleep(0.5) continue # time.sleep(0.3) route = [] route_first = result['routes'][0]['geometry']['coordinates'] for elemet in route_first: point = [elemet[1], elemet[0]] route.append(point) # print(route) #### перекодирование k = 0
def TaxiNetworkResults_T(combined_taxidf, T, min_dist, T_search_margin, t_accept): # split enormous dataframe... to values within T+/- T_search_margin #taxidf = pd.DataFrame() taxidf = combined_taxidf[(combined_taxidf.unix_ts < T + T_search_margin) & (combined_taxidf.unix_ts >= T - T_search_margin)] taxi_ids_before_T = taxidf[taxidf.unix_ts < T].taxi_id.unique() taxi_ids_after_T = taxidf[taxidf.unix_ts >= T].taxi_id.unique() #taxi_ids2process = set(taxi_ids_after_T).intersection(taxi_ids_before_T) taxi_ids_not2process = list( set(taxi_ids_after_T).symmetric_difference(taxi_ids_before_T)) if len(taxi_ids_not2process) > 0: for taxi_id_2drop in taxi_ids_not2process: taxidf.drop(taxidf[taxidf.taxi_id == taxi_id_2drop].index) # this ratio is interesting, early estimates suggest window length of 30s (~91%, # maybe better to have 1min , 60s long windows (accept_ratio ~1ish # needs more investigation print('reject ratio = %f' % float(len(taxi_ids_not2process) / len(taxidf.taxi_id.unique()))) reject_ratio = (len(taxi_ids_not2process) / len(taxidf.taxi_id.unique())) taxis_Tpos = [] taxis_Tids = [] taxi_ids_to_process = taxidf.taxi_id.unique().tolist() #for each trace, another loop here... for taxi_id in taxi_ids_to_process: #taxi_pos_estimate = [] #taxi_id = taxidf.taxi_id[31] # map match. taxi_subset = taxidf[taxidf.taxi_id == taxi_id].sort_values('unix_ts') timestamps2match = taxi_subset.unix_ts.tolist() taxi_pos2match = [ tuple(x) for x in taxi_subset[['longitude', 'latitude']].values ] matched_points = osrm.match(taxi_pos2match, overview="simplified", timestamps=timestamps2match, radius=None) # error checking.. if type(matched_points ) is str: # implies no points were matched, hence ditch... taxi_pos_estimate = None #snapped_subset = Snap2Road(taxi_subset) #matchedf = pd.DataFrame({'mpos':snapped_subset.snap_pos, 'mts':snapped_subset.unix_ts}) else: matchedf, nobody_index = ProcessMapMatchResults( matched_points, timestamps2match) # quickly remove those where taxi_ts = T if (any(matchedf['mts'] == T)) and (bool( np.isnan(matchedf[matchedf.mts == T].mpos.values[0][0])) is False): #taxi_TDF.append([matchedf[matchedf.mts==T]]) taxi_pos_estimate = matchedf[matchedf.mts == T].mpos.tolist()[0] else: # route&/interp #taxi_pos_estimate = RouteAndInterp(matchedf,T,min_dist) matchedf['ts_dff'] = matchedf['mts'] - T adf = matchedf[matchedf['ts_dff'] > 0].min() bdf = matchedf[matchedf['ts_dff'] < 0].max() #if map-matching doesn't work... # complete fail? --> snap coords instead, then route? # for now, people just do nothing if (adf.isnull().any() == True) or (bdf.isnull().any() == True): taxi_pos_estimate = None # [np.nan] #tuple([np.nan,np.nan]) #if (taxi_pos_estimate is not None) and ((bool(np.isnan(adf.mpos[0])) is True) or (bool(np.isnan(bdf.mpos[1])) is True)): taxi_pos_estimate = None #if adf.isnull().any()==True and len(nobody_index)>0: #taxi_subset[taxi_subset.unix_ts>T] # maybe just snap? then again routing might be a bitch. else: d = haversine_pc(adf.mpos[0], adf.mpos[1], bdf.mpos[0], bdf.mpos[1]) #if adf.mpos == bdf.mpos: if d <= min_dist: taxi_pos_estimate = adf.mpos else: osrm_route_result = osrm.simple_route( [bdf.mpos[0], bdf.mpos[1]], [adf.mpos[0], adf.mpos[1]], output='full', overview="full", geometry='polyline', steps='True', annotations='true') if type(osrm_route_result) is str: taxi_pos_estimate = None else: link_data, route_nodes = ProcessRouteResults( osrm_route_result, bdf.mts, adf.mts) #maybe another if statement, if link_data.dur_cumsum == T: ..., else: if any(link_data.dur_cumsum < T): T_index = max(link_data[ link_data['dur_cumsum'] <= T].index.tolist()) else: T_index = 0 x1 = route_nodes['longitude'][T_index] y1 = route_nodes['latitude'][T_index] if T_index == 0: t1 = link_data['dur_cumsum'][ 0] - link_data.duration[T_index] else: t1 = link_data['dur_cumsum'][T_index - 1] x2 = route_nodes['longitude'][T_index + 1] y2 = route_nodes['latitude'][T_index + 1] t2 = link_data['dur_cumsum'][T_index] T_longitude, T_latitude = Straight_Line_Interp( x1, y1, t1, x2, y2, t2, T) taxi_pos_estimate = tuple([T_longitude, T_latitude]) if taxi_pos_estimate is not None and (bool( np.isnan(taxi_pos_estimate[0])) is False) and (bool( np.isinf(taxi_pos_estimate)[0]) is False): taxis_Tpos.append(taxi_pos_estimate) taxis_Tids.append(taxi_id) print('succesfull spatial estimation = %f' % (float(len(taxis_Tids) / len(taxi_ids_to_process)))) reject_taxis_pos = (1 - float(len(taxis_Tids) / len(taxi_ids_to_process))) min_los_length = 0 max_los_length = 100 #same as f*****g paper. #def TaxisWithinNOLOSRange(input_gps_pos,taxis_Tids,max_los_length,min_los_length): input_gps_pos = taxis_Tpos # accpets tupple list of positions: [(long1,lat1),(long2,lat2),... etc.] # creates a haversine distance matrix, then finds pairs of taxis that are within # main_los_length and max_los_length, typical values [0,100] # outputs list of taxi_id pairs and their respective haversine distasnces between them # [(taxi_id_A,taxi_id_B,t,haversine_distance)] mat_length = len(input_gps_pos) Hdist_matrix = np.zeros((mat_length, mat_length), dtype=int) taxis_nolos = [] #queck2 = [] for row in range(0, mat_length): for col in range(0, row): Hdist = HaversineDistPC2(input_gps_pos[row], input_gps_pos[col]) Hdist_matrix[row, col] = Hdist if (Hdist > min_los_length) & (Hdist < max_los_length): taxis_nolos.append( (taxis_Tids[row], taxis_Tids[col], input_gps_pos[row], input_gps_pos[col], Hdist)) #queck2.append([row,col]) # Line of Sight Model: num_buildings = [] for i in range(len(taxis_nolos)): #i=0 #longitude,latitude in query LoS_execution_str = ( "SELECT * FROM rome_buildings WHERE ST_Intersects(ST_SetSRID('LINESTRING (%s %s, %s %s)'::geometry,4326), geom);" % (str(taxis_nolos[i][2][0]), str(taxis_nolos[i][2][1]), str(taxis_nolos[i][3][0]), str(taxis_nolos[i][3][1]))) LoS_df = pdsql.read_sql_query(LoS_execution_str, connection) num_buildings.append(len(LoS_df)) if len(taxis_nolos) > 0: taxiAid, taxiBid, Alonglat, Blonglat, Hdist = zip(*taxis_nolos) RESULT_DF = pd.DataFrame({ 'taxiAid': taxiAid, 'taxiBid': taxiBid, 'Alonglat': Alonglat, 'Blonglat': Blonglat, 'Hdist': Hdist, 'num_buildings': num_buildings }) #print('current search time: %i iterations left: %f' % (T, ((T_search_times[-1]-T)/T_search_margin))) return RESULT_DF, reject_ratio, reject_taxis_pos
def compute(df): group_10m = df.groupby(pd.Grouper(freq=str(MAX_WAITING_TIME) + 'Min')) collapsedJobs = [] sharedJobs = [] doable = 0 total = 0 dfg = 0 rides = [] for key, item in group_10m: try: # cluster here! group = group_10m.get_group(key) # empty groups cannot be shared... if group.shape[0] == 1: continue # list_coord = [[21.0566163803209, 42.004088575972], # [21.3856064050746, 42.0094518118189], # [20.9574645547597, 41.5286973392856], # [21.1477394809847, 41.0691482795275], # [21.5506463080973, 41.3532256406286]] # list_id = ['name1', 'name2', 'name3', 'name4', 'name5'] # time_matrix, snapped_coords = osrm.table(list_coord, # ids_origin=list_id, # output='dataframe') dropoffCoords = group.as_matrix( columns=['dropoff_longitude', 'dropoff_latitude']) # print(len(dropoffCoords)) time_matrix = osrm.table(dropoffCoords, ids_origin=np.arange(len(dropoffCoords)), ids_dest=np.arange(len(dropoffCoords)), output='dataframe', send_as_polyline=False) # print(time_matrix) time_matrix_delay = np.add(time_matrix, +LATENESS_ADJUSTER) print("OSRM Table Returned") x = 0 dfg = 0 marked = [] for i in time_matrix: # tollerable means different between pickup and dropoff is minimal y = 0 for j in i: if x == y or time_matrix_delay[x][ y] == 0 or time_matrix_delay[x][ y] == LATENESS_ADJUSTER: y = y + 1 # if x != y: # print(str(x) + " " + str(y)) continue first = group.iloc[[x]] second = group.iloc[[y]] # is first pickup before second # consider change of duration for second ride aswell! if pd.Timedelta(second["pickup_datetime"].values[0] - first["pickup_datetime"].values[0] ).seconds >= 0 and pd.Timedelta( second.index.values[0] - first.index.values[0] ).seconds < time_matrix_delay[x][y]: # is pickup time realistic? result = osrm.simple_route([ first["pickup_longitude"].values[0], first["pickup_latitude"].values[0] ], [ second["pickup_longitude"].values[0], second["pickup_latitude"].values[0] ], output='route', geometry='wkt', send_as_polyline=True) extended_trip_time = result[0]["duration"] * 2 if pd.Timedelta( first["pickup_datetime"].values[0] - second["pickup_datetime"].values[0] ).seconds <= extended_trip_time + EARLY_PICKUP_ADJUSTER: # if True: # print("Tollerable") # how long does journey take? # pickup first, pickup_second, dropoff_first, dropoff_second, first_result = osrm.simple_route( [ first["pickup_longitude"].values[0], first["pickup_latitude"].values[0] ], [ first["dropoff_longitude"].values[0], first["dropoff_latitude"].values[0] ], [[ second["pickup_longitude"].values[0], second["pickup_latitude"].values[0] ]], output='route', geometry='wkt', send_as_polyline=True) first_trip_time = first[ "trip_time_in_secs"].values[0] extended_first_trip_time = first_result[0][ "duration"] if extended_first_trip_time < first_trip_time + LATENESS_ADJUSTER: # print("no friggin way") second_result = osrm.simple_route( [ second["pickup_longitude"].values[0], second["pickup_latitude"].values[0] ], [ second["dropoff_longitude"].values[0], second["dropoff_latitude"].values[0] ], [[ first["dropoff_longitude"].values[0], first["dropoff_latitude"].values[0] ]], output='route', geometry='wkt', send_as_polyline=True) second_trip_time = second[ "trip_time_in_secs"].values[0] extended_second_trip_time = second_result[0][ "duration"] if extended_second_trip_time < second_trip_time + LATENESS_ADJUSTER: # print("no friggin way") dfg += 1 result = osrm.simple_route( [ first["pickup_longitude"]. values[0], first["pickup_latitude"].values[0] ], [ second["dropoff_longitude"]. values[0], second["dropoff_latitude"]. values[0] ], [[ second["pickup_longitude"]. values[0], second["pickup_latitude"].values[0] ], [ first["dropoff_longitude"]. values[0], first["dropoff_latitude"]. values[0] ]], output='route', geometry='geojson', send_as_polyline=True) collapsedJobs.append(group.iloc[[x]]) collapsedJobs.append(group.iloc[[y]]) ride = {} ride["pickup_first"] = {} ride["pickup_first"]["type"] = "point" ride["pickup_first"]["latitude"] = first[ "pickup_latitude"].values[0] ride["pickup_first"]["longitude"] = first[ "pickup_longitude"].values[0] ride["dropoff_first"] = {} ride["dropoff_first"]["type"] = "point" ride["dropoff_first"]["latitude"] = first[ "dropoff_latitude"].values[0] ride["dropoff_first"]["longitude"] = first[ "dropoff_longitude"].values[0] ride["pickup_second"] = {} ride["pickup_second"]["type"] = "point" ride["pickup_second"]["latitude"] = second[ "pickup_latitude"].values[0] ride["pickup_second"][ "longitude"] = second[ "pickup_longitude"].values[0] ride["dropoff_second"] = {} ride["dropoff_second"]["type"] = "point" ride["dropoff_second"][ "latitude"] = second[ "dropoff_latitude"].values[0] ride["dropoff_second"][ "longitude"] = second[ "dropoff_longitude"].values[0] # print(second_result) ride["geometry"] = result[0]["geometry"] rides.append(ride) marked.append(x) marked.append(y) g = 0 for k in i: print(x) time_matrix_delay[x][g] = 0 time_matrix_delay[y][g] = 0 time_matrix_delay[g][x] = 0 time_matrix_delay[g][y] = 0 g += 1 # print(g) else: l = 0 else: l = 1 else: l = 3 else: l = 2 y = y + 1 x = x + 1 total += group.shape[0] doable += dfg print("Total") print(total) print("Doable") print(doable) if (doable > 20): break except Exception: print(traceback.format_exc()) # continue # break # break # print(group_10m.get_group(key)["dropoff_latitude"]) # # break collapsedDropoffArray = [] collapsedPickupArray = [] for collapsedJob in collapsedJobs: collapsedJobDropoffDict = {} collapsedJobDropoffDict["type"] = "point" collapsedJobDropoffDict["longitude"] = collapsedJob[ "dropoff_longitude"].values[0] collapsedJobDropoffDict["latitude"] = collapsedJob[ "dropoff_latitude"].values[0] collapsedDropoffArray.append(collapsedJobDropoffDict) collapsedJobPickupDict = {} collapsedJobPickupDict["type"] = "point" collapsedJobPickupDict["longitude"] = collapsedJob[ "pickup_longitude"].values[0] collapsedJobPickupDict["latitude"] = collapsedJob[ "pickup_latitude"].values[0] collapsedPickupArray.append(collapsedJobPickupDict) dataDict = { 'collapsedDropoffArray': collapsedDropoffArray, 'collapsedPickupArray': collapsedPickupArray, 'rides': rides, 'totalRides': total, 'collapsedRides': doable * 2, 'sharedRides': doable, 'algorithm': "Minimal Delay" } # print(json.dumps(dataDict)) data = json.dumps(dataDict) return data # print(len(collapsedJobs)) # print(len(sharedJobs)) # collapsedDropoffArray = [] # collapsedPickupArray = [] # sharedDropoffArray = [] # sharedPickupArray = [] # for collapsedJob in collapsedJobs: # collapsedJobDropoffDict = {} # collapsedJobDropoffDict["type"] = "point" # collapsedJobDropoffDict["longitude"] = collapsedJob["dropoff_longitude"].values[0] # collapsedJobDropoffDict["latitude"] = collapsedJob["dropoff_latitude"].values[0] # collapsedDropoffArray.append(collapsedJobDropoffDict) # collapsedJobPickupDict = {} # collapsedJobPickupDict["type"] = "point" # collapsedJobPickupDict["longitude"] = collapsedJob["pickup_longitude"].values[0] # collapsedJobPickupDict["latitude"] = collapsedJob["pickup_latitude"].values[0] # collapsedPickupArray.append(collapsedJobPickupDict) # for sharedJob in sharedJobs: # sharedJobPickupDict = {} # sharedJobPickupDict["type"] = "point" # sharedJobPickupDict["longitude"] = sharedJob["pickup_longitude"].values[0] # sharedJobPickupDict["latitude"] = sharedJob["pickup_latitude"].values[0] # sharedPickupArray.append(sharedJobPickupDict) # sharedJobDropoffDict = {} # sharedJobDropoffDict["type"] = "point" # sharedJobDropoffDict["longitude"] = sharedJob["dropoff_longitude"].values[0] # sharedJobDropoffDict["latitude"] = sharedJob["dropoff_latitude"].values[0] # sharedDropoffArray.append(sharedJobDropoffDict) # dataDict = { # 'collapsedDropoffArray' : collapsedDropoffArray, # 'collapsedPickupArray' : collapsedPickupArray, # 'sharedDropoffArray' : sharedDropoffArray, # 'sharedPickupArray' : sharedPickupArray, # 'totalRides' : df.shape[0], # 'collapsedRides' : len(collapsedJobs), # 'sharedRides' : len(sharedJobs), # 'algorithm' : "naiveN" # } # data = json.dumps(dataDict) # return data return