def polyline_encoded_table(requests, port): """ Function that uses the python wrapper for osrm in order to get the distance matrix between given input points by using the table service of the osrm server hosted on the input host. Args: requests: A list of objects of the class Request corresponding to each rider in the system. port: The full link to the localhost port on which osrm server is running. """ osrm.RequestConfig.host = port sources_data = [(i.source_long, i.source_lat) for i in requests] destinations_data = [(i.dest_long, i.dest_lat) for i in requests] coordinates = [] for element in sources_data: coordinates.append(element) for element in destinations_data: coordinates.append(element) response = osrm.table(coordinates, output='raw', annotations='distance') return response['distances']
def compute_distance_matrix(csv_nodes_path, titles='', annotations='distance'): """ Method to compute and return distance matrix given a set of nodes of interest Args: csv_nodes_path (str]): Path to a csv containing info about nodes. titles (str, optional): Titles of the od matrix to be produced. Defaults to ''. annotations (str, optional): distance or time. Defaults to 'distance'. """ supermarkets = pd.read_csv(csv_nodes_path, delimiter='\t') coords = [ ','.join(str(x) for x in y) for y in map(tuple, supermarkets[['latitude', 'longitude']].values) ] numerical_coords = [] for elem in coords: long_lat_elems = elem.split(',') numerical_coords.append( (float(long_lat_elems[1]), float(long_lat_elems[0]))) # create titles if not titles or len(titles) != len(numerical_coords): titles = [i for i in range(len(numerical_coords))] # compute distance matrix and return it dist_od = osrm.table(numerical_coords, ids_origin=titles, output='pandas', annotations=annotations) return dist_od
def make_experiment(): MyConfig = osrm.RequestConfig("localhost:5000/v1/driving") for size in range(2000, 2001, 1): points = [random_point() for i in range(size)] st = time() res = osrm.table(points, points, url_config=MyConfig) print(res[0]) print(size, round(time() - st, 4))
def test_table_only_origins(self, mock_urlopen): mock_urlopen.return_value = MockReadable( u'''{"code":"Ok","durations":[[0,1559.9,4192.8,9858.4,7679.7],[1579.3,0,5300.6,8735.2,6507.6],[4214.7,5334,0,5671.5,3972.1],[9496.8,8354.6,5689.7,0,2643.2],[7270.1,6127.9,3971.5,2624.5,0]],"destinations":[{"hint":"-hmZhIIUJo8AAAAAAQAAAAoAAAAAAAAANwUAAP6jLAP9oywDbrcAAGdMQQF37oACaExBAXjugAIAAAEBfDhq3w==","name":"","location":[21.056615,42.004087]},{"hint":"P7yyg-Xnvo-GOH0ALAAAACYAAAAAAAAA-QAAAMut1QNf1AgDbrcAAIZRRgFrA4EChlFGAWsDgQIAAAEBfDhq3w==","name":"\xd0\x91\xd1\x83\xd0\xbb\xd0\xb5\xd0\xb2\xd0\xb0\xd1\x80 \xd0\x98\xd0\xbb\xd0\xb8\xd0\xbd\xd0\xb4\xd0\xb5\xd0\xbd","location":[21.385606,42.009451]},{"hint":"SzHlia8HEorwPf4AAQAAAFYAAAAAAAAA8wAAAIPVMgcv1TIHbrcAABfJPwF4rXkCGMk_AXmteQIAAAEBfDhq3w==","name":"R2231","location":[20.957463,41.528696]},{"hint":"VktjiIkUGY_-bPEACgAAAA8AAAAXAQAAWwQAAGHfwwb1cVUGbrcAAFqwQgFcqnICW7BCAVyqcgIJAAEBfDhq3w==","name":"R2347","location":[21.147738,41.069148]},{"hint":"dku7jXlLu43E7icBcQAAABYAAAAAAAAAAAAAAK8RqA-tEagPbrcAADXWSAEJAHcCNtZIAQkAdwIAAAEBfDhq3w==","name":"\xd0\xa2\xd1\x80\xd0\xb8\xd0\xb7\xd0\xbb\xd0\xb0","location":[21.550645,41.353225]}],"sources":[{"hint":"-hmZhIIUJo8AAAAAAQAAAAoAAAAAAAAANwUAAP6jLAP9oywDbrcAAGdMQQF37oACaExBAXjugAIAAAEBfDhq3w==","name":"","location":[21.056615,42.004087]},{"hint":"P7yyg-Xnvo-GOH0ALAAAACYAAAAAAAAA-QAAAMut1QNf1AgDbrcAAIZRRgFrA4EChlFGAWsDgQIAAAEBfDhq3w==","name":"\xd0\x91\xd1\x83\xd0\xbb\xd0\xb5\xd0\xb2\xd0\xb0\xd1\x80 \xd0\x98\xd0\xbb\xd0\xb8\xd0\xbd\xd0\xb4\xd0\xb5\xd0\xbd","location":[21.385606,42.009451]},{"hint":"SzHlia8HEorwPf4AAQAAAFYAAAAAAAAA8wAAAIPVMgcv1TIHbrcAABfJPwF4rXkCGMk_AXmteQIAAAEBfDhq3w==","name":"R2231","location":[20.957463,41.528696]},{"hint":"VktjiIkUGY_-bPEACgAAAA8AAAAXAQAAWwQAAGHfwwb1cVUGbrcAAFqwQgFcqnICW7BCAVyqcgIJAAEBfDhq3w==","name":"R2347","location":[21.147738,41.069148]},{"hint":"dku7jXlLu43E7icBcQAAABYAAAAAAAAAAAAAAK8RqA-tEagPbrcAADXWSAEJAHcCNtZIAQkAdwIAAAEBfDhq3w==","name":"\xd0\xa2\xd1\x80\xd0\xb8\xd0\xb7\xd0\xbb\xd0\xb0","location":[21.550645,41.353225]}]}''' ) names = ['name1', 'name2', 'name3', 'name4', 'name5'] coords = [[21.0566163803209, 42.004088575972], [21.3856064050746, 42.0094518118189], [20.9574645547597, 41.5286973392856], [21.1477394809847, 41.0691482795275], [21.5506463080973, 41.3532256406286]] durations, new_coords, _ = osrm.table(coords, ids_origin=names, output="pandas") self.assertIsInstance(durations, DataFrame) durations2, new_coords2, _ = osrm.table(coords, ids_origin=names, output="np") self.assertIsInstance(durations2, numpy.ndarray) self.assertEqual(durations.values.tolist(), durations2.tolist())
def test_non_existing_host(self): Profile = osrm.RequestConfig("localhost/v1/flying") self.assertEqual(Profile.host, "localhost") with self.assertRaises(URLError): osrm.nearest((12.36, 45.36), url_config=Profile) with self.assertRaises(URLError): osrm.trip([(13.38886, 52.51703), (10.00, 53.55), (52.374444, 9.738611)], url_config=Profile) with self.assertRaises(URLError): osrm.simple_route((13.38886, 52.51703), (10.00, 53.55), url_config=Profile) with self.assertRaises(URLError): osrm.AccessIsochrone((13.38886, 52.51703), points_grid=100, url_config=Profile) with self.assertRaises(URLError): osrm.match([(10.00, 53.55), (52.374444, 9.738611)], url_config=Profile) with self.assertRaises(URLError): osrm.table([(10.00, 53.55), (52.374444, 9.738611)], [(10.00, 53.55), (52.374444, 9.738611)], url_config=Profile)
def test_non_existing_host(self): Profile = osrm.RequestConfig("localhost/v1/flying") self.assertEqual(Profile.host, "localhost") with self.assertRaises(URLError): osrm.nearest((12.36, 45.36), url_config=Profile) with self.assertRaises(URLError): osrm.trip( [(13.38886, 52.51703), (10.00, 53.55), (52.374444, 9.738611)], url_config=Profile) with self.assertRaises(URLError): osrm.simple_route( (13.38886, 52.51703), (10.00, 53.55), url_config=Profile) with self.assertRaises(URLError): osrm.AccessIsochrone( (13.38886, 52.51703), points_grid=100, url_config=Profile) with self.assertRaises(URLError): osrm.match( [(10.00, 53.55), (52.374444, 9.738611)], url_config=Profile) with self.assertRaises(URLError): osrm.table( [(10.00, 53.55), (52.374444, 9.738611)], [(10.00, 53.55), (52.374444, 9.738611)], url_config=Profile)
def _get_travel_durations(self): """ Use OSRM to find the travel durations between every set of demand locations and stations. """ progress("Creating matrix of travel times...", verbose=self.verbose) coord_list = list(self.demand_locs.values()) + list( self.station_locs.values()) id_list = list(self.demand_locs.keys()) + list( self.station_locs.keys()) time_matrix, _, _ = osrm.table(coord_list, coords_dest=coord_list, ids_origin=id_list, ids_dest=id_list, output='dataframe', url_config=self.osrm_config) return time_matrix
def list_matching(list1, list2, metric, maxMat=50000): numberOfSources = len(list1) numberOfTargets = len(list2) if numberOfSources + numberOfTargets > maxMat: print('The list call needs to be split to fit the maximum allowed of ' + str(maxMat)) print('(Calling with ' + str(numberOfSources) + ' sources and ' + str(numberOfTargets) + ' targets)') if numberOfTargets < maxMat - 1: nel1 = maxMat - numberOfTargets - 1 print('Making calls with ' + str(nel1) + ' elements from list1 and ' + str(numberOfTargets) + ' from list2') nSplits = float(numberOfSources)/float(nel1) nSplits = int(np.floor(nSplits)) print('Expecting ' + str(nSplits) + ' recursive calls.') matchListGo = [] matchListReturn = [] stored = 0 nCalls = 0 while stored < numberOfSources: nCalls += 1 print('\tProcessing call number ' + str(nCalls) + '...') miniList1 = list1[stored:int(min(stored+nel1,numberOfSources))] print('\t(call with minlist: ' + str(len(miniList1)) + ' targets: ' + str(len(list2)) + ')') miniGo, miniReturn = list_matching(miniList1, list2, metric,maxMat=maxMat) matchListGo += copy.deepcopy(miniGo) matchListReturn += copy.deepcopy(miniReturn) stored += nel1 print('\tDone.') print('Performed ' + str(nCalls) + ' calls.') print('> :split exit') return [matchListGo, matchListReturn] else: print('ERROR: Nedd to split second / both lists, not yet implemented!') exit(-1) superList = reverse_list_coords(list1 + list2) l2idx = len(list1) [theMatrix, dummy, dummy] = osrm.table(superList, output='np', annotations=metric) matchListGo = [] matchListReturn = [] for i in range(len(list1)): matchListGo.append(theMatrix[i,l2idx:]) matchListReturn.append(theMatrix[l2idx:,i]) return [matchListGo, matchListReturn]
def test_table_OD(self, mock_urlopen): mock_urlopen.return_value = MockReadable( u'''{"code":"Ok","durations":[[10785.3,9107,14619.6,5341.2],[9546.8,7934.9,15473,4054.3],[14559.4,12440.7,18315.9,9115.3],[14463.4,10768.4,22202.6,9904],[12236.7,8541.7,19975.9,7677.3]],"destinations":[{"hint":"XAiehPiqpY4_JQUAZQAAAA0BAACzBQAAVwEAALxM2gRcSLAIbrcAABnMXwGE7IAC2NBfASDugAINAAEBfDhq3w==","name":"1061","location":[23.055385,42.003588]},{"hint":"qdjCi1mz-4wAAAAAFgAAAAAAAABzEgAAKQgAABae8wfvnfMHbrcAAHmQVQErD4ECwJNVATgDgQK6AAEBfDhq3w==","name":"","location":[22.384761,42.012459]},{"hint":"lfafieL2n4kAAAAAAAAAADIAAABbAAAAPQYAAEz6EwdiWKIFbrcAADTGPwF-M5gC2Mg_AZgxmAIDAAEBfDhq3w==","name":"","location":[20.956724,43.529086]},{"hint":"8gQxizUOMYsAAAAAAAAAABsAAAAAAAAAQRkAANIcvgd1Hb4HbrcAADQQSQEqrn0CCNZIAdinfQIAAAEBfDhq3w==","name":"","location":[21.565492,41.791018]}],"sources":[{"hint":"-hmZhIIUJo8AAAAAAQAAAAoAAAAAAAAANwUAAP6jLAP9oywDbrcAAGdMQQF37oACaExBAXjugAIAAAEBfDhq3w==","name":"","location":[21.056615,42.004087]},{"hint":"P7yyg-Xnvo-GOH0ALAAAACYAAAAAAAAA-QAAAMut1QNf1AgDbrcAAIZRRgFrA4EChlFGAWsDgQIAAAEBfDhq3w==","name":"\xd0\x91\xd1\x83\xd0\xbb\xd0\xb5\xd0\xb2\xd0\xb0\xd1\x80 \xd0\x98\xd0\xbb\xd0\xb8\xd0\xbd\xd0\xb4\xd0\xb5\xd0\xbd","location":[21.385606,42.009451]},{"hint":"SzHlia8HEorwPf4AAQAAAFYAAAAAAAAA8wAAAIPVMgcv1TIHbrcAABfJPwF4rXkCGMk_AXmteQIAAAEBfDhq3w==","name":"R2231","location":[20.957463,41.528696]},{"hint":"VktjiIkUGY_-bPEACgAAAA8AAAAXAQAAWwQAAGHfwwb1cVUGbrcAAFqwQgFcqnICW7BCAVyqcgIJAAEBfDhq3w==","name":"R2347","location":[21.147738,41.069148]},{"hint":"dku7jXlLu43E7icBcQAAABYAAAAAAAAAAAAAAK8RqA-tEagPbrcAADXWSAEJAHcCNtZIAQkAdwIAAAEBfDhq3w==","name":"\xd0\xa2\xd1\x80\xd0\xb8\xd0\xb7\xd0\xbb\xd0\xb0","location":[21.550645,41.353225]}]}''' ) origins = [[21.0566163803209, 42.004088575972], [21.3856064050746, 42.0094518118189], [20.9574645547597, 41.5286973392856], [21.1477394809847, 41.0691482795275], [21.5506463080973, 41.3532256406286]] destinations = [[23.0566, 42.004], [22.3856, 42.0094], [20.9574, 43.5286], [21.5506, 41.7894]] durations, snapped_origins, snapped_destinations = \ osrm.table(origins, destinations) self.assertIsInstance(durations, numpy.ndarray) expected_shape = (len(origins), len(destinations)) self.assertEqual(durations.shape, expected_shape) self.assertTrue(durations.any())
def calcODMatrix(self, longName="Long", latName="Lat", idName="id"): ''' Generates the OSRM table based on the origins and destinations. The x and y locations need to be extracted from the shapefiles longName - column name of longitude latName - column name of latitude idName - column name of id ''' list_coord = [ tuple(x) for x in self.origins[[longName, latName]].values ] list_id = self.origins[[idName]].values print list_coord print list_id time_matrix, snapped_coords = osrm.table(list_coord, ids_origin=list_id, output="dataframe") self.od_matrix = time_matrix self.snappedOrigins = snapped_coords return self.odMatrix
import pandas as pd import osrm coords = pd.read_csv(r'./inputs/boundaries_coords.csv', index_col='BoundaryId') boundaries = pd.read_csv(r'./inputs/boundaries_used.csv', index_col='BoundaryId') df = boundaries.join(coords)[['X', 'Y']] coords_src = df.apply(list, axis=1).tolist() ids_origin = df.index.tolist() dists = osrm.table(coords_src, ids_origin, output='df', annotations='duration')
def od_matrix_latlong(pointList, metric='distance'): odmatrix, dummy, dummy = osrm.table(reverse_list_coords(pointList), output='np', annotations=metric) return odmatrix
import osrm osrm.RequestConfig.host = "router.project-osrm.org" list_coord = [[21.0566163803209, 42.004088575972], [21.3856064050746, 42.0094518118189], [20.9574645547597, 41.5286973392856], [21.1477394809847, 41.0691482795275], [21.5506463080973, 41.3532256406286]] list_id = ['name1', 'name2', 'name3', 'name4', 'name5'] time_matrix, snapped_coords = osrm.table(list_coord, ids_origin=list_id, output='dataframe') print(time_matrix)
def compute(df): group_10m = df.groupby(pd.Grouper(freq=str(MAX_WAITING_TIME) + 'Min')) collapsedJobs = [] sharedJobs = [] doable = 0 total = 0 dfg = 0 rides = [] for key, item in group_10m: try: # cluster here! group = group_10m.get_group(key) # empty groups cannot be shared... if group.shape[0] == 1: continue # list_coord = [[21.0566163803209, 42.004088575972], # [21.3856064050746, 42.0094518118189], # [20.9574645547597, 41.5286973392856], # [21.1477394809847, 41.0691482795275], # [21.5506463080973, 41.3532256406286]] # list_id = ['name1', 'name2', 'name3', 'name4', 'name5'] # time_matrix, snapped_coords = osrm.table(list_coord, # ids_origin=list_id, # output='dataframe') dropoffCoords = group.as_matrix( columns=['dropoff_longitude', 'dropoff_latitude']) # print(len(dropoffCoords)) time_matrix = osrm.table(dropoffCoords, ids_origin=np.arange(len(dropoffCoords)), ids_dest=np.arange(len(dropoffCoords)), output='dataframe', send_as_polyline=False) # print(time_matrix) time_matrix_delay = np.add(time_matrix, +LATENESS_ADJUSTER) print("OSRM Table Returned") x = 0 dfg = 0 marked = [] for i in time_matrix: # tollerable means different between pickup and dropoff is minimal y = 0 for j in i: if x == y or time_matrix_delay[x][ y] == 0 or time_matrix_delay[x][ y] == LATENESS_ADJUSTER: y = y + 1 # if x != y: # print(str(x) + " " + str(y)) continue first = group.iloc[[x]] second = group.iloc[[y]] # is first pickup before second # consider change of duration for second ride aswell! if pd.Timedelta(second["pickup_datetime"].values[0] - first["pickup_datetime"].values[0] ).seconds >= 0 and pd.Timedelta( second.index.values[0] - first.index.values[0] ).seconds < time_matrix_delay[x][y]: # is pickup time realistic? result = osrm.simple_route([ first["pickup_longitude"].values[0], first["pickup_latitude"].values[0] ], [ second["pickup_longitude"].values[0], second["pickup_latitude"].values[0] ], output='route', geometry='wkt', send_as_polyline=True) extended_trip_time = result[0]["duration"] * 2 if pd.Timedelta( first["pickup_datetime"].values[0] - second["pickup_datetime"].values[0] ).seconds <= extended_trip_time + EARLY_PICKUP_ADJUSTER: # if True: # print("Tollerable") # how long does journey take? # pickup first, pickup_second, dropoff_first, dropoff_second, first_result = osrm.simple_route( [ first["pickup_longitude"].values[0], first["pickup_latitude"].values[0] ], [ first["dropoff_longitude"].values[0], first["dropoff_latitude"].values[0] ], [[ second["pickup_longitude"].values[0], second["pickup_latitude"].values[0] ]], output='route', geometry='wkt', send_as_polyline=True) first_trip_time = first[ "trip_time_in_secs"].values[0] extended_first_trip_time = first_result[0][ "duration"] if extended_first_trip_time < first_trip_time + LATENESS_ADJUSTER: # print("no friggin way") second_result = osrm.simple_route( [ second["pickup_longitude"].values[0], second["pickup_latitude"].values[0] ], [ second["dropoff_longitude"].values[0], second["dropoff_latitude"].values[0] ], [[ first["dropoff_longitude"].values[0], first["dropoff_latitude"].values[0] ]], output='route', geometry='wkt', send_as_polyline=True) second_trip_time = second[ "trip_time_in_secs"].values[0] extended_second_trip_time = second_result[0][ "duration"] if extended_second_trip_time < second_trip_time + LATENESS_ADJUSTER: # print("no friggin way") dfg += 1 result = osrm.simple_route( [ first["pickup_longitude"]. values[0], first["pickup_latitude"].values[0] ], [ second["dropoff_longitude"]. values[0], second["dropoff_latitude"]. values[0] ], [[ second["pickup_longitude"]. values[0], second["pickup_latitude"].values[0] ], [ first["dropoff_longitude"]. values[0], first["dropoff_latitude"]. values[0] ]], output='route', geometry='geojson', send_as_polyline=True) collapsedJobs.append(group.iloc[[x]]) collapsedJobs.append(group.iloc[[y]]) ride = {} ride["pickup_first"] = {} ride["pickup_first"]["type"] = "point" ride["pickup_first"]["latitude"] = first[ "pickup_latitude"].values[0] ride["pickup_first"]["longitude"] = first[ "pickup_longitude"].values[0] ride["dropoff_first"] = {} ride["dropoff_first"]["type"] = "point" ride["dropoff_first"]["latitude"] = first[ "dropoff_latitude"].values[0] ride["dropoff_first"]["longitude"] = first[ "dropoff_longitude"].values[0] ride["pickup_second"] = {} ride["pickup_second"]["type"] = "point" ride["pickup_second"]["latitude"] = second[ "pickup_latitude"].values[0] ride["pickup_second"][ "longitude"] = second[ "pickup_longitude"].values[0] ride["dropoff_second"] = {} ride["dropoff_second"]["type"] = "point" ride["dropoff_second"][ "latitude"] = second[ "dropoff_latitude"].values[0] ride["dropoff_second"][ "longitude"] = second[ "dropoff_longitude"].values[0] # print(second_result) ride["geometry"] = result[0]["geometry"] rides.append(ride) marked.append(x) marked.append(y) g = 0 for k in i: print(x) time_matrix_delay[x][g] = 0 time_matrix_delay[y][g] = 0 time_matrix_delay[g][x] = 0 time_matrix_delay[g][y] = 0 g += 1 # print(g) else: l = 0 else: l = 1 else: l = 3 else: l = 2 y = y + 1 x = x + 1 total += group.shape[0] doable += dfg print("Total") print(total) print("Doable") print(doable) if (doable > 20): break except Exception: print(traceback.format_exc()) # continue # break # break # print(group_10m.get_group(key)["dropoff_latitude"]) # # break collapsedDropoffArray = [] collapsedPickupArray = [] for collapsedJob in collapsedJobs: collapsedJobDropoffDict = {} collapsedJobDropoffDict["type"] = "point" collapsedJobDropoffDict["longitude"] = collapsedJob[ "dropoff_longitude"].values[0] collapsedJobDropoffDict["latitude"] = collapsedJob[ "dropoff_latitude"].values[0] collapsedDropoffArray.append(collapsedJobDropoffDict) collapsedJobPickupDict = {} collapsedJobPickupDict["type"] = "point" collapsedJobPickupDict["longitude"] = collapsedJob[ "pickup_longitude"].values[0] collapsedJobPickupDict["latitude"] = collapsedJob[ "pickup_latitude"].values[0] collapsedPickupArray.append(collapsedJobPickupDict) dataDict = { 'collapsedDropoffArray': collapsedDropoffArray, 'collapsedPickupArray': collapsedPickupArray, 'rides': rides, 'totalRides': total, 'collapsedRides': doable * 2, 'sharedRides': doable, 'algorithm': "Minimal Delay" } # print(json.dumps(dataDict)) data = json.dumps(dataDict) return data # print(len(collapsedJobs)) # print(len(sharedJobs)) # collapsedDropoffArray = [] # collapsedPickupArray = [] # sharedDropoffArray = [] # sharedPickupArray = [] # for collapsedJob in collapsedJobs: # collapsedJobDropoffDict = {} # collapsedJobDropoffDict["type"] = "point" # collapsedJobDropoffDict["longitude"] = collapsedJob["dropoff_longitude"].values[0] # collapsedJobDropoffDict["latitude"] = collapsedJob["dropoff_latitude"].values[0] # collapsedDropoffArray.append(collapsedJobDropoffDict) # collapsedJobPickupDict = {} # collapsedJobPickupDict["type"] = "point" # collapsedJobPickupDict["longitude"] = collapsedJob["pickup_longitude"].values[0] # collapsedJobPickupDict["latitude"] = collapsedJob["pickup_latitude"].values[0] # collapsedPickupArray.append(collapsedJobPickupDict) # for sharedJob in sharedJobs: # sharedJobPickupDict = {} # sharedJobPickupDict["type"] = "point" # sharedJobPickupDict["longitude"] = sharedJob["pickup_longitude"].values[0] # sharedJobPickupDict["latitude"] = sharedJob["pickup_latitude"].values[0] # sharedPickupArray.append(sharedJobPickupDict) # sharedJobDropoffDict = {} # sharedJobDropoffDict["type"] = "point" # sharedJobDropoffDict["longitude"] = sharedJob["dropoff_longitude"].values[0] # sharedJobDropoffDict["latitude"] = sharedJob["dropoff_latitude"].values[0] # sharedDropoffArray.append(sharedJobDropoffDict) # dataDict = { # 'collapsedDropoffArray' : collapsedDropoffArray, # 'collapsedPickupArray' : collapsedPickupArray, # 'sharedDropoffArray' : sharedDropoffArray, # 'sharedPickupArray' : sharedPickupArray, # 'totalRides' : df.shape[0], # 'collapsedRides' : len(collapsedJobs), # 'sharedRides' : len(sharedJobs), # 'algorithm' : "naiveN" # } # data = json.dumps(dataDict) # return data return