def set_drivers(self, ratio, distr): # Drivers self._params['ratio'] = ratio self._params['distr'] = distr num_drivers = int(round(self._params['num_customers'] / ratio)) if self._z is None: osm = OsmManager() self._z = osm.zipf_sample_bbox(self._bbox, self._free, num_drivers, hotspots=False, pois=False, seed=self._seed) if self._z is None: return None self._u = self._rnd.choice(a=list(self._free.difference(self._z)), size=num_drivers, replace=False) if distr == 'U-U': d_starts_ends = self._rnd.choice(a=list(self._free), size=num_drivers * 2, replace=False) self._ds = [((d_starts_ends[i], 1, 300), (d_starts_ends[i + num_drivers], 1, 300)) for i in range(num_drivers)] else: if distr == "Z-U": self._ds = [((self._z[i], 1, 300), (self._u[i], 1, 300)) for i in range(num_drivers)] else: self._ds = [((self._u[i], 1, 300), (self._z[i], 1, 300)) for i in range(num_drivers)] return self._ds
class TestOsmManager(TestCase): def setUp(self): self.osmmgr = OsmManager() self.bbox = (144.942043, -37.822496, 145.053342, -37.734496) generator = SuitableNodeWeightGenerator() self.graph = self.osmmgr.generate_graph_for_bbox(self.bbox[0], self.bbox[1], self.bbox[2], self.bbox[3], generator, hotspots=False, pois=False) def test_zonify_bbox(self): zones = self.osmmgr.zipf_sample_bbox(self.bbox, self.graph.keys(), 100, hotspots=False, pois=False, seed=0) self.assertListEqual(zones, [127362667, 251452389, 214131749, 1423021859, 247961910, 1645179845, 310742008, 2172834570, 3947168757, 32997295, 34177595, 1272832422, 2905756447, 2207661041, 871096413, 570734694, 1283671455, 581831423, 53857533, 387152903, 225183957, 1299225014, 370230599, 1287222143, 3218293527, 2180785588, 702450201, 2180785582, 777710193, 775942584, 1833115722, 1449431554, 387153044, 598154601, 3313134385, 269335261, 1833121231, 2189497233, 2279153652, 778827314, 1449431342, 233314923, 297178812, 1900213537, 233314999, 319862071, 3146808113, 266081969, 99498965, 493053630, 227775933, 33239382, 309346901, 333901615, 244870022, 250480669, 227316012, 2491032728, 147606698, 559301782, 256715870, 370750302, 3810929386, 3126924753, 2481930758, 230397690, 151452152, 283058857, 332797041, 127544544, 248746969, 1463620935, 2090721543, 384354011, 2688454941, 1463620394, 301548223, 224750279, 246939633, 248969195, 127715644, 361771408, 356083538, 361770537, 248746950, 256715871, 313859384, 244859445, 3146808060, 2567155983, 1095506178, 277363029, 356474894, 250933657, 224738431, 373008467, 4061053456, 356098372, 233255340, 246998674])
def main(): # Outer bbox. # bounds = [-78.51114567859952, -0.22156158994849384, -78.46239384754483, -0.12980902510699335] # (small) Quito bounds = [-78.57160966654635, -0.4180073651030667, -78.36973588724948, -0.06610523586538203] # (big) Quito # bounds = [144.58265438867193, -38.19424168942873, 145.36955014062505, -37.55250095415727] # Melbourne # bounds = [-74.0326191484375, 40.69502239217181, -73.93236890429688, 40.845827729757275] # Manhattan zone = "Quito" delta_meters = 3000.0 delta = delta_meters / 111111 num_samples = 100 nuq = 5 osm = OsmManager() generator = SuitableNodeWeightGenerator() results = [] sample = 0 initial_seed = 500 while sample < num_samples: # np.random.seed(initial_seed) initial_seed += 1 # Compute bbox coords (inner sample bbox of 25 km^2) min_lon = np.random.uniform(bounds[0], bounds[2] - delta) min_lat = np.random.uniform(bounds[1], bounds[3] - delta) max_lon = min_lon + delta max_lat = min_lat + delta # Generate network sample. graph, _, pois, _, _ = osm.generate_graph_for_bbox(min_lon, min_lat, max_lon, max_lat, generator, hotspots=False, cost_type="travel_time") N = len(graph.keys()) num_pois = len(pois) if num_pois == 0: continue # Group POIs by subtype (activity). ps_subtype = dict() for p in pois: ps_subtype.setdefault(graph[p][2]['subtype'], []).append(p) # Available nodes for users. nq = len(ps_subtype.keys()) free_nodes = set(graph.keys()).difference(pois) if len(free_nodes) < nq * nuq: continue # Create queries. queries = [] occupied = set() for _, pois_ in ps_subtype.iteritems(): where = set(free_nodes).difference(occupied) terminals = np.random.choice(a=list(where), size=nuq, replace=False) queries.append((terminals, pois_)) occupied.update(terminals) # Compute capacity for every road segment. graph.capacitated = True capacity = int(math.ceil((nuq / 4.0 * nq) / 4.0)) graph.set_capacities({e: capacity for e in graph.get_edges()}) # merge_users = False max_iter = 20 alpha = 1.0 beta = 4.0 # VST-NCA ****************************************************************************************************** vst_rs = VST_RS(graph) st = time.clock() try: _, c, warl, mwrl, mrl1, mrl2, entropy = vst_rs.non_congestion_aware(queries, 4, 8, bpr, merge_users=merge_users, alpha=alpha, beta=beta, verbose=False) except: continue et = time.clock() - st line = ["VST-NCA", "N/A", zone, N, capacity, merge_users, sample, nq, nuq, "N/A", num_pois, c, warl, mwrl, mrl1, mrl2, 0, et, alpha, beta, entropy] print line results.append(line) # VST-CA MIXED ************************************************************************************************ vst_rs = VST_RS(graph) st = time.clock() try: _, c, warl, mwrl, mrl1, mrl2, entropy, ni = vst_rs.congestion_aware(queries, 4, 8, bpr, merge_users=merge_users, max_iter=max_iter, alpha=alpha, beta=beta, verbose=False, randomize=True) except: continue et = time.clock() - st ni_ = str(ni) if ni == max_iter: ni_ += "(*)" line = ["VST-CA", "mixed", zone, N, capacity, merge_users, sample, nq, nuq, "N/A", num_pois, c, warl, mwrl, mrl1, mrl2, ni_, et, alpha, beta, entropy] print line results.append(line) # VST-CA PURE ************************************************************************************************* vst_rs = VST_RS(graph) st = time.clock() try: _, c, warl, mwrl, mrl1, mrl2, entropy, ni = vst_rs.congestion_aware(queries, 4, 8, bpr, merge_users=merge_users, max_iter=max_iter, alpha=alpha, beta=beta, verbose=False, randomize=False) except: continue et = time.clock() - st ni_ = str(ni) if ni == max_iter: ni_ += "(*)" line = ["VST-CA", "pure", zone, N, capacity, merge_users, sample, nq, nuq, "N/A", num_pois, c, warl, mwrl, mrl1, mrl2, ni_, et, alpha, beta, entropy] print line results.append(line) sample += 1 result_file = open("files/vstca_vstnca_osm_1_" + time.strftime("%d%b%Y_%H%M%S") + ".csv", 'wb') wr = csv.writer(result_file) wr.writerows(results)
import operator import math import csv import time from osmmanager import OsmManager from suitability import SuitableNodeWeightGenerator from numpy.random import RandomState if __name__ == '__main__': osm = OsmManager() generator = SuitableNodeWeightGenerator() # regions = { 'MEL': ([ 144.58265438867193, -38.19424168942873, 145.36955014062505, -37.55250095415727 ], ['COLES', 'WOOLWORTHS', 'ALDI']), # 'UIO': ( # [-78.57160966654635, -0.4180073651030667, -78.36973588724948, -0.06610523586538203], # ['LA FAVORITA', 'SANTA MARIA', 'MI COMISARIATO'] # ), # 'MHT': ( # [-74.0326191484375, 40.69502239217181, -73.93236890429688, 40.845827729757275], # ['WALMART', 'TARGET', 'COSTCO'] # ), } delta_meters = 5000.0 delta = delta_meters / 111111 num_samples = 5
import time from osmmanager import OsmManager from osmdbmanager import OsmDBManager from suitability import SuitableNodeWeightGenerator from vst_rs import VST_RS from hotspot_based import HotspotBased if __name__ == '__main__': osm = OsmManager() osmdbmngr = OsmDBManager("postgres", "naya0105", "osm", "localhost") generator = SuitableNodeWeightGenerator() files = {21303: 'maribyrnong'} samples = range(5) for sa3_code11, file_ in files.iteritems(): dep_hours = osm.get_departure_hours(file_) for dh in dep_hours: dest_acts = osm.get_dest_activities(file_, dh) for act in dest_acts: if act[0] == 805: continue for sample in samples: print sample graph, hotspots, pois, nodes_by_sa1_code, _ = osm.generate_graph_for_file(file_, act[0], generator) terminals = osm.choose_terminals_according_to_vista(file_, dh, act[0], nodes_by_sa1_code) temp = list(hotspots) temp.extend(pois) temp.extend(terminals) graph.compute_dist_paths(origins=temp, destinations=temp, compute_paths=False) # print graph.issues_dist_paths #
def get_routes_map(routes_, graph_): routes_map_ = list() for i_, route in enumerate(routes_): lats_ = list() lons_ = list() for v in route: lats_.append(graph_[v][2]['lat']) lons_.append(graph_[v][2]['lon']) routes_map_.append((lats_, lons_)) return routes_map_ if __name__ == '__main__': # osm = OsmManager() generator = SuitableNodeWeightGenerator() # regions = { 'MEL': ([ 144.58265438867193, -38.19424168942873, 145.36955014062505, -37.55250095415727 ], { 'COLES': 0.326, 'WOOLWORTHS': 0.39, 'ALDI': 0.164, 'IGA': 0.12 }), # 'UIO': ( # [-78.57160966654635, -0.4180073651030667, -78.36973588724948, -0.06610523586538203], # ['LA FAVORITA', 'SANTA MARIA', 'MI COMISARIATO']
import numpy as np import time import csv from osmmanager import OsmManager from suitability import SuitableNodeWeightGenerator from hotspot_based import HotspotBased if __name__ == '__main__': # results = [] osm = OsmManager() generator = SuitableNodeWeightGenerator() # files = {21303: 'maribyrnong'} samples = range(3) # for sa3_code11, file_ in files.iteritems(): # act_dh = {} dep_hours = osm.get_departure_hours(file_) for dh in dep_hours: dest_acts = osm.get_dest_activities(file_, dh) for act_desc in dest_acts: try: act_dh[act_desc[0]].append(dh) except KeyError: act_dh[act_desc[0]] = [dh] # for act, dhs in act_dh.iteritems(): graph, hotspots, pois, nodes_by_sa1_code, nodes_by_sa2_code = osm.generate_graph_for_file(
def index(request): generator = SuitableNodeWeightGenerator() # Long integers seem not to be JSON serializable. Thus, str() function is used whenever the integer does not come # from session or from the DB. (Not pretty sure!) if 'op' in request.GET: # top = request.GET.get('top') left = request.GET.get('left') bottom = request.GET.get('bottom') right = request.GET.get('right') print top, left, bottom, right # min_lon = min(left, right) min_lat = min(top, bottom) max_lon = max(left, right) max_lat = max(top, bottom) # osm = OsmManager() # CREATE NETWORK SAMPLE ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ if request.GET['op'] == 'show_pois': graph, _, pois, _, _ = \ osm.generate_graph_for_bbox(min_lon, min_lat, max_lon, max_lat, generator, hotspots=False, cost_type="travel_time") # request.session['graph'] = graph # request.session['graph'] = {(str(e[0]), str(e[1])): v for e, v in graph.edges.iteritems()} request.session['pois'] = pois # geo_pois = [(graph[p][2]['lat'], graph[p][2]['lon'], p, graph[p][2]['subtype']) for p in pois] return HttpResponse( json.dumps( dict( isOk=1, content=render_to_string('congestion/index.html', {}), pois=geo_pois, ))) # , default=decimal_default)) # SLICE POIS +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ elif request.GET['op'] == 'slice_pois': graph = get_suitability_graph_from_session(request) pois = request.session['pois'] # s_pois = osm.get_nodes_for_bbox(min_lon, min_lat, max_lon, max_lat, hotspots=False) s_pois = set(pois).intersection(s_pois) # request.session['pois'] = list(s_pois) # geo_pois = [(graph[p][2]['lat'], graph[p][2]['lon'], p, graph[p][2]['subtype']) for p in s_pois] return HttpResponse( json.dumps( dict( isOk=1, content=render_to_string('congestion/index.html', {}), pois=geo_pois, ))) # , default=decimal_default)) # CREATE QUERIES +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ elif request.GET['op'] == "create_queries": nuq = int(request.GET.get('nusers')) seed = request.GET.get('seed') # graph = get_suitability_graph_from_session(request) pois = request.session['pois'] # How many different activities were sliced? ps_subtype = dict() for p in pois: ps_subtype.setdefault(graph[p][2]['subtype'], []).append(p) # s_nodes = osm.get_nodes_for_bbox(min_lon, min_lat, max_lon, max_lat, hotspots=False) s_nodes = set(graph.keys()).intersection(s_nodes).difference(pois) # queries = [] ts_subtype = dict() occupied = set() np.random.seed(int(seed)) for subtype, pois_ in ps_subtype.iteritems(): where = set(s_nodes).difference(occupied) terminals = np.random.choice(a=list(where), size=nuq, replace=False) queries.append(([str(t) for t in terminals], pois_, subtype)) occupied.update(terminals) ts_subtype[subtype] = list(terminals) # request.session['queries'] = queries # geo_pois = [(graph[p][2]['lat'], graph[p][2]['lon'], p, graph[p][2]['subtype']) for p in pois] geo_terminals = [] for subtype, ts in ts_subtype.iteritems(): for t in ts: geo_terminals.append((graph[t][2]['lat'], graph[t][2]['lon'], str(t), subtype)) return HttpResponse( json.dumps( dict( isOk=1, content=render_to_string('congestion/index.html', {}), pois=geo_pois, terminals=geo_terminals, ))) # , default=decimal_default)) elif 'alg' in request.GET: alg = request.GET.get('alg') print alg # Set up the graph. graph = get_suitability_graph_from_session(request) graph.capacitated = True graph.set_capacities({ e: 2 for e in graph.get_edges() }) # FIX THIS +++++++++++++++++++++++++++++++++++++++++++ # queries = get_queries_from_session(request) queries_ = [(ts, pois) for ts, pois, _ in queries] # ni = 0 # # with open('file_tt.txt', 'w') as file_: # file_.write(json.dumps(graph)) # merge_users = False max_iter = 20 alpha = 1.0 beta = 4.0 vst_rs = VST_RS(graph) st = time.clock() if alg == 'vst-nca': plans, cost, warl, mwrl, mrl1, mrl2, entropy = \ vst_rs.non_congestion_aware(queries_, 4, 8, bpr, merge_users=merge_users, alpha=alpha, beta=beta, verbose=True) elif alg == "vst-ca-mixed": plans, cost, warl, mwrl, mrl1, mrl2, entropy, ni = \ vst_rs.congestion_aware(queries_, 4, 8, bpr, merge_users=merge_users, max_iter=max_iter, alpha=alpha, beta=beta, verbose=True, randomize=True) else: plans, cost, warl, mwrl, mrl1, mrl2, entropy, ni = \ vst_rs.congestion_aware(queries_, 4, 8, bpr, merge_users=merge_users, max_iter=max_iter, alpha=alpha, beta=beta, verbose=True, randomize=False) elapsed_time = time.clock() - st # geo_edges = [] for ord_, plan, _ in plans: geo_edges.extend( get_geo_forest_edges(queries[ord_][2], plan, graph)) return HttpResponse( json.dumps( dict(content=render_to_string('congestion/index.html', {}), route=geo_edges, cost=cost, elapsed_time=elapsed_time, warl=warl, mwrl=mwrl, mrl1=mrl1, mrl2=mrl2, ent=entropy, ni=ni))) else: return render(request, 'congestion/index.html', {})
def setUp(self): self.osmmgr = OsmManager() self.bbox = (144.942043, -37.822496, 145.053342, -37.734496) generator = SuitableNodeWeightGenerator() self.graph = self.osmmgr.generate_graph_for_bbox(self.bbox[0], self.bbox[1], self.bbox[2], self.bbox[3], generator, hotspots=False, pois=False)
def index(request): # generator = SuitableNodeWeightGenerator() if 'file_to_retrieve_dhs' in request.GET: # file_ = request.GET.get('file_to_retrieve_dhs') osm = OsmManager() dep_hours = osm.get_departure_hours(file_) return HttpResponse(json.dumps(dict(dh=dep_hours))) elif 'file_to_retrieve_acts' in request.GET and 'dh_to_retrieve_acts' in request.GET: # file_ = request.GET.get('file_to_retrieve_acts') dh = request.GET.get('dh_to_retrieve_acts') osm = OsmManager() dest_acts = osm.get_dest_activities(file_, dh) return HttpResponse(json.dumps(dict(acts=dest_acts))) elif 'file' in request.GET and 'dh' in request.GET and 'act' in request.GET: # file_ = request.GET.get('file') dh = request.GET.get('dh') act = request.GET.get('act') print file_, dh, act # osm = OsmManager() graph, hotspots, pois, nodes_by_sa1_code, nodes_by_sa2_code = osm.generate_graph_for_file(file_, act, generator) terminals = osm.choose_terminals_according_to_vista(file_, dh, act, nodes_by_sa1_code) reset_hotspots_weights = {h: generator.weights["WARNING"][0] for h in hotspots} graph.update_node_weights(reset_hotspots_weights) excluded = list(pois) excluded.extend(terminals) # rest_nodes = list(set(graph.keys()).difference(excluded)) # # Option A: Hot-spots are the rest of the nodes, i.e., users can meet anywhere. # hotspots = list(rest_nodes) # # Option B: Hot-spots chosen randomly from the rest of the nodes, i.e., nodes that aren't terminals nor POIs. # ind = np.random.choice(a=len(rest_nodes), size=len(hotspots), replace=False) # hotspots = [rest_nodes[i] for i in ind] # Option C: Hot-spots chosen based on population distribution. # TODO: Dynamic sa3 code hotspots = osm.choose_hotspots_according_to_population(21303, len(hotspots), nodes_by_sa2_code, excluded) weights = {h: generator.weights["VERY_SUITABLE"][0] for h in hotspots} graph.update_node_weights(weights) temp = list(hotspots) temp.extend(pois) temp.extend(terminals) graph.compute_dist_paths(origins=temp, destinations=temp, compute_paths=False) # request.session['graph'] = graph request.session['dist'] = {str(k[0]) + "," + str(k[1]): v for k, v in graph.dist.iteritems()} request.session['pairs_dist_paths'] = [str(v) + "," + str(w) for v, w in graph.pairs_dist_paths] request.session['hotspots'] = hotspots request.session['pois'] = pois request.session['terminals'] = terminals # geo_hotspots = [(graph[h][2]['lat'], graph[h][2]['lon'], h) for h in hotspots] geo_pois = [(graph[p][2]['lat'], graph[p][2]['lon'], p) for p in pois] geo_terminals = [(graph[t][2]['lat'], graph[t][2]['lon'], t) for t in terminals] return HttpResponse(json.dumps(dict( isOk=1, content=render_to_string('hotspots/index.html', {}), hotspots=geo_hotspots, pois=geo_pois, terminals=geo_terminals, ))) # , default=decimal_default)) elif 'alg' in request.GET: alg = request.GET.get('alg') print alg # graph = get_suitability_graph_from_session(request) hotspots = request.session['hotspots'] pois = request.session['pois'] terminals = request.session['terminals'] # pdb.set_trace() # if alg == 'rahman': cap = int(request.GET.get('cap_r')) vst_rs = VST_RS(graph, nodes=hotspots) start_time = time.clock() forest, cost, gr, avg_dr, num_trees, avg_or, _, _ = vst_rs.steiner_forest(terminals, pois, cap, 8) elapsed_time = time.clock() - start_time else: cap = int(request.GET.get('cap_c')) mdr = request.GET.get('mdr') mwd = request.GET.get('mwd') if mdr is not None and mdr != '': mdr = float(request.GET.get('mdr')) else: mdr = sys.maxint if mwd is not None and mwd != '': mwd = float(request.GET.get('mwd')) else: mwd = sys.maxint # print mdr, mwd hb = HotspotBased(graph, terminals, pois) start_time = time.clock() forest, cost, gr, avg_dr, num_trees, avg_or, _ = \ hb.steiner_forest(k=cap, max_dr=mdr, max_wd=mwd, get_lsv=False) elapsed_time = time.clock() - start_time # geo_steiner_tree_edges = get_geo_steiner_tree_edges(forest, graph) return HttpResponse(json.dumps(dict( content=render_to_string('hotspots/index.html', {}), route=geo_steiner_tree_edges, distance=cost, elapsed_time=elapsed_time, gr=gr, avg_dr=avg_dr, num_cars=num_trees, avg_or=avg_or ))) else: return render(request, 'hotspots/index.html', {})
from osmmanager import OsmManager from suitability import SuitableNodeWeightGenerator if __name__ == "__main__": osm = OsmManager() # osm.generate_samples(21303, "maribyrnong") generator = SuitableNodeWeightGenerator() _, _, _, _, nodes_by_sa2_code = osm.generate_graph_for_file( "maribyrnong", 602, generator, with_hotspots=False) osm.choose_hotspots_according_to_population(21303, 127, nodes_by_sa2_code)
from osmmanager import OsmManager from suitability import SuitableNodeWeightGenerator if __name__ == "__main__": osm = OsmManager() res = osm.get_session_users(65) print res
bounds = [144.58265438867193, -38.19424168942873, 145.36955014062505, -37.55250095415727] # Melbourne delta_meters = 10000.0 delta = delta_meters / 111111 found = False # iter = 0 while not found and iter < 1000: iter += 1 min_lon = np.random.uniform(bounds[0], bounds[2] - delta) min_lat = np.random.uniform(bounds[1], bounds[3] - delta) max_lon = min_lon + delta max_lat = min_lat + delta # osm = OsmManager() generator = SuitableNodeWeightGenerator() graph, _, pois, _, _ = osm.generate_graph_for_bbox(min_lon, min_lat, max_lon, max_lat, generator, hotspots=False, poi_names=['COLES', 'WOOLWORTHS', 'ALDI']) if len(pois) < 10: continue print min_lon, min_lat, max_lon, max_lat center_lat = (min_lat + max_lat) / 2 center_lon = (min_lon + max_lon) / 2 gmap = gmplot.GoogleMapPlotter(center_lat, center_lon, 13, apikey='') # stores_customers = dict() # STORES for poi in pois: lat = graph[poi][2]['lat']
return stats_ @staticmethod def compute_route_cost(graph, route): cost = 0 for i_ in range(len(route) - 1): v = route[i_] w = route[i_ + 1] if v != w: cost += graph.get_edges()[tuple(sorted([v, w]))] return cost if __name__ == '__main__': # o = OsmManager() generator = SuitableNodeWeightGenerator() # regions = { 'MEL': ([ 144.58265438867193, -38.19424168942873, 145.36955014062505, -37.55250095415727 ], { 'COLES': 0.326, 'WOOLWORTHS': 0.39, 'ALDI': 0.164, 'IGA': 0.12 }), # 'UIO': ( # [-78.57160966654635, -0.4180073651030667, -78.36973588724948, -0.06610523586538203], # ['LA FAVORITA', 'SANTA MARIA', 'MI COMISARIATO']