def __init__(self, graph, root, terminals): # Check whether graph is node-weighted. if not graph.is_node_weighted(): raise ( ValueError, "Dreyfus with IMRs algorithm only works with node-weighted graphs." ) # self.__graph = graph self.__root = root self.__terminals = terminals self.__root_term = list(terminals) self.__root_term.append(root) # generator = SuitableNodeWeightGenerator() self.__nodes = self.__graph.get_suitable_nodes( generator, excluded_nodes=terminals) if root not in self.__nodes: self.__nodes.append(root) # self.__dist = {} self.__paths = {} temp = list(terminals) # root is already included temp.extend(self.__nodes) self.__dist, self.__paths = self.__graph.get_dist_paths( origins=temp, destinations=temp)
def __init__(self, graph, terminals): # An edge-weighted graph is represented by a dictionary with entries -> node: adjacent_nodes # adjacent_nodes: dictionary with entries -> adjacent_node: edge_weight self.__graph = graph # self.__terminals = terminals generator = SuitableNodeWeightGenerator() self.__suitable_nodes = self.__graph.get_suitable_nodes( generator, excluded_nodes=terminals) # Distances and paths node-to-node self.__dist_paths_node_node = {} # For every node in the graph, get the tuple (distances, paths) to every other node and store it as the value # for the entry -> node: (distances, paths) in a dictionary which is a class variable. # distances: dictionary with entries -> target_node: distance_to_target # paths: dictionary with entries -> target_node: path # path: list of nodes ordered from node to target for n in self.__suitable_nodes: distances, paths = self.__graph.compute_shortest(n) self.__dist_paths_node_node[n] = (distances, paths) # Subtrees self.__subtrees = {} # For every terminal create a subtree which has such terminal as the only node. Each subtree is a digraph. for s in terminals: subtree = Graph() subtree[s] = {} self.__subtrees[s] = subtree self.__calculate_distances_paths_to_subtrees()
def __init__(self, graph, terminals, pois): # Init some instance variables. self.__graph = graph self.__terminals = terminals self.__pois = pois # Temporal list to be excluded when getting suitable nodes. temp = list(terminals) temp.extend(pois) # Get suitable nodes. generator = SuitableNodeWeightGenerator() self.__hotspots = self.__graph.get_suitable_nodes(generator, excluded_nodes=temp) temp.extend(self.__hotspots) # Compute distances between every node within set [temp] if len(self.__graph.nodes_dist_paths) == 0: self.__graph.compute_dist_paths(origins=temp, destinations=temp, compute_paths=False) # Compute P-Voronoi cells. self.__p_cells, self.__medoids = self.__graph.get_voronoi_medoids_cells( self.__pois, temp)
def __init__(self, graph, terminals, contract_graph=True, contracted_graph=None, within_convex_hull=False, dist_paths=None, nodes=None, use_medoid=False): # Check whether graph is node-weighted. if not graph.is_node_weighted(): raise (ValueError, "Dreyfus with IMRs algorithm only works with node-weighted graphs.") # Extract POI from the terminals list. if len(terminals) > 0: self.__poi = terminals[0] else: return # Set object variables. generator = SuitableNodeWeightGenerator() self.__original_graph = graph self.__terminals = terminals self.__contract_graph = contract_graph self.__use_medoid = use_medoid # Contracted graph... if contract_graph: if contracted_graph is not None: self.__graph = contracted_graph.copy() else: self.__graph = SuitabilityGraph() self.__graph.append_graph(graph) self.__graph.contract_suitable_regions(generator, excluded_nodes=terminals, get_centroid_medoid=use_medoid) else: self.__graph = SuitabilityGraph() self.__graph.append_graph(graph) # if nodes is not None: self.__nodes = list(nodes) else: if within_convex_hull: pass # self.__nodes = self.__graph.get_suitable_nodes_within_convex_set(terminals, generator, dist_paths) else: self.__nodes = self.__graph.get_suitable_nodes(generator, excluded_nodes=terminals) # for t in terminals: self.__nodes.append(t) # print(self.__nodes) # self.__dist = {} self.__paths = {} if dist_paths is not None: self.__dist = dict(dist_paths[0]) self.__paths = dict(dist_paths[1]) else: self.__dist, self.__paths = self.__graph.get_dist_paths(origins=self.__nodes, destinations=self.__nodes) # self.__s_d = {}
def __init__(self, graph, terminals): # Check whether graph is node-weighted. if not graph.is_node_weighted(): raise ( ValueError, "Cluster-based algorithm only works with node-weighted graphs." ) # Extract POI from the terminals list. if len(terminals) > 0: self.__poi = terminals[0] else: return # generator = SuitableNodeWeightGenerator() # Set object variables. self.__graph = SuitabilityGraph() self.__graph.append_graph(graph) self.__terminals = terminals # # self.__regions = self.__graph.get_suitable_regions(generator, excluded_nodes=terminals, # get_border_internal_nodes=True, get_centroid_medoid=True, # get_characteristic_nodes=True) self.__regions = self.__graph.get_suitable_regions( generator, excluded_nodes=terminals, get_border_internal_nodes=True, get_centroid_medoid=True) # self.__nodes = [] for id_r in self.__regions: # # characteristic_nodes = self.__regions[id_r][6] # self.__nodes.extend(characteristic_nodes) border_nodes = self.__regions[id_r][1] self.__nodes.extend(border_nodes) # # if len(characteristic_nodes) == 0: # medoid = self.__regions[id_r][4] # # print(medoid) # self.__nodes.append(medoid) for t in terminals: self.__nodes.append(t) # self.__dist, self.__paths = self.__graph.get_dist_paths( origins=self.__nodes, destinations=self.__nodes)
def __init__(self, graph, poi, terminals, max_stops, dist_paths=None, nodes=None): # Check whether graph is node-weighted. if not graph.is_node_weighted(): raise ( ValueError, "Dreyfus with IMRs algorithm only works with node-weighted graphs." ) # self.__graph = graph self.__poi = poi self.__terminals = terminals self.__max_stops = max_stops # generator = SuitableNodeWeightGenerator() if nodes is not None: self.__nodes = list(nodes) else: self.__nodes = self.__graph.get_suitable_nodes( generator, excluded_nodes=terminals) if poi not in self.__nodes: self.__nodes.append(poi) # self.__dist = {} self.__paths = {} if dist_paths is not None: self.__dist = dict(dist_paths[0]) self.__paths = dict(dist_paths[1]) else: temp = list(terminals) # POI is already included temp.extend(self.__nodes) self.__dist, self.__paths = self.__graph.get_dist_paths( origins=temp, destinations=temp) # self.__s_d = {}
if __name__ == '__main__': # graph = {'a': (1.2, {'b': 4, 'd': 3}), # 'b': (1.4, {'a': 4, 'd': 5, 'e': 3, 'f': 3, 'g': 4, 'c': 2}), # 'c': (1.1, {'b': 2, 'g': 1}), # 'd': (1.7, {'a': 3, 'b': 5, 'e': 1, 'f': 2, 'i': 1, 'h': 1}), # 'e': (1.2, {'d': 1, 'b': 3, 'f': 1}), # 'f': (1.8, {'e': 1, 'd': 2, 'b': 3, 'g': 2, 'i': 2}), # 'g': (1.1, {'f': 2, 'b': 4, 'c': 1, 'i': 3}), # 'h': (1.1, {'d': 1, 'i': 2}), # 'i': (1.4, {'f': 2, 'g': 3, 'd': 1, 'h': 2})} # # terminals = ['b', 'c', 'e', 'h', 'i'] seed = 6 gh = GridDigraphGenerator() generator = SuitableNodeWeightGenerator() node_weighted = gh.generate(30, 30, node_weighted=True, node_weight_generator=generator, seed=seed) terminals = [123, 230, 310, 464, 588, 625, 700] kr = KleinRavi(node_weighted, terminals) kr_st = kr.steiner_tree() kr_cost, node_cost = kr_st.compute_total_weights(terminals) ngh = NetworkXGraphHelper(node_weighted) ngh.draw_graph(
def main(): # Outer bbox. # bounds = [-78.51114567859952, -0.22156158994849384, -78.46239384754483, -0.12980902510699335] # (small) Quito bounds = [-78.57160966654635, -0.4180073651030667, -78.36973588724948, -0.06610523586538203] # (big) Quito # bounds = [144.58265438867193, -38.19424168942873, 145.36955014062505, -37.55250095415727] # Melbourne # bounds = [-74.0326191484375, 40.69502239217181, -73.93236890429688, 40.845827729757275] # Manhattan zone = "Quito" delta_meters = 3000.0 delta = delta_meters / 111111 num_samples = 100 nuq = 5 osm = OsmManager() generator = SuitableNodeWeightGenerator() results = [] sample = 0 initial_seed = 500 while sample < num_samples: # np.random.seed(initial_seed) initial_seed += 1 # Compute bbox coords (inner sample bbox of 25 km^2) min_lon = np.random.uniform(bounds[0], bounds[2] - delta) min_lat = np.random.uniform(bounds[1], bounds[3] - delta) max_lon = min_lon + delta max_lat = min_lat + delta # Generate network sample. graph, _, pois, _, _ = osm.generate_graph_for_bbox(min_lon, min_lat, max_lon, max_lat, generator, hotspots=False, cost_type="travel_time") N = len(graph.keys()) num_pois = len(pois) if num_pois == 0: continue # Group POIs by subtype (activity). ps_subtype = dict() for p in pois: ps_subtype.setdefault(graph[p][2]['subtype'], []).append(p) # Available nodes for users. nq = len(ps_subtype.keys()) free_nodes = set(graph.keys()).difference(pois) if len(free_nodes) < nq * nuq: continue # Create queries. queries = [] occupied = set() for _, pois_ in ps_subtype.iteritems(): where = set(free_nodes).difference(occupied) terminals = np.random.choice(a=list(where), size=nuq, replace=False) queries.append((terminals, pois_)) occupied.update(terminals) # Compute capacity for every road segment. graph.capacitated = True capacity = int(math.ceil((nuq / 4.0 * nq) / 4.0)) graph.set_capacities({e: capacity for e in graph.get_edges()}) # merge_users = False max_iter = 20 alpha = 1.0 beta = 4.0 # VST-NCA ****************************************************************************************************** vst_rs = VST_RS(graph) st = time.clock() try: _, c, warl, mwrl, mrl1, mrl2, entropy = vst_rs.non_congestion_aware(queries, 4, 8, bpr, merge_users=merge_users, alpha=alpha, beta=beta, verbose=False) except: continue et = time.clock() - st line = ["VST-NCA", "N/A", zone, N, capacity, merge_users, sample, nq, nuq, "N/A", num_pois, c, warl, mwrl, mrl1, mrl2, 0, et, alpha, beta, entropy] print line results.append(line) # VST-CA MIXED ************************************************************************************************ vst_rs = VST_RS(graph) st = time.clock() try: _, c, warl, mwrl, mrl1, mrl2, entropy, ni = vst_rs.congestion_aware(queries, 4, 8, bpr, merge_users=merge_users, max_iter=max_iter, alpha=alpha, beta=beta, verbose=False, randomize=True) except: continue et = time.clock() - st ni_ = str(ni) if ni == max_iter: ni_ += "(*)" line = ["VST-CA", "mixed", zone, N, capacity, merge_users, sample, nq, nuq, "N/A", num_pois, c, warl, mwrl, mrl1, mrl2, ni_, et, alpha, beta, entropy] print line results.append(line) # VST-CA PURE ************************************************************************************************* vst_rs = VST_RS(graph) st = time.clock() try: _, c, warl, mwrl, mrl1, mrl2, entropy, ni = vst_rs.congestion_aware(queries, 4, 8, bpr, merge_users=merge_users, max_iter=max_iter, alpha=alpha, beta=beta, verbose=False, randomize=False) except: continue et = time.clock() - st ni_ = str(ni) if ni == max_iter: ni_ += "(*)" line = ["VST-CA", "pure", zone, N, capacity, merge_users, sample, nq, nuq, "N/A", num_pois, c, warl, mwrl, mrl1, mrl2, ni_, et, alpha, beta, entropy] print line results.append(line) sample += 1 result_file = open("files/vstca_vstnca_osm_1_" + time.strftime("%d%b%Y_%H%M%S") + ".csv", 'wb') wr = csv.writer(result_file) wr.writerows(results)
def __init__(self, graph, terminals, poi, max_level_attraction=2, contract_graph=True, contracted_graph=None, within_convex_hull=False, dist_paths_suitable_nodes=None): if not graph.is_node_weighted(): raise ( ValueError, "Gravitation algorithm only works with node-weighted graphs.") # Store class variables for future references. self.__original_graph = graph self.__terminals = terminals self.__poi = poi self.__contract_graph = contract_graph # terminals_poi = list(terminals) terminals_poi.append(poi) generator = SuitableNodeWeightGenerator() # Contracted graph... if contract_graph: if contracted_graph is not None: self.__graph = contracted_graph.copy() else: self.__graph = SuitabilityGraph() self.__graph.append_graph(graph) self.__graph.contract_suitable_regions( generator, excluded_nodes=terminals_poi) else: self.__graph = SuitabilityGraph() self.__graph.append_graph(graph) # # # ngh = NetworkXGraphHelper(self.__original_graph) # ngh.draw_graph(nodes_1=terminals, # nodes_2=[poi], # subgraphs_1=[r for _, (r, _, _) in self.__graph.contracted_regions.iteritems()], # node_weight_generator=generator, # node_size=25) # # # ngh = NetworkXGraphHelper(self.__graph) # ngh.draw_graph(node_weight_generator=generator, node_size=25, node_labels=True) # Copy distances and paths dictionary since it will be changed. dist_paths = None if dist_paths_suitable_nodes is not None: dist_paths = dict(dist_paths_suitable_nodes) for e in terminals_poi: dist_paths[e] = dijkstra(self.__graph, e) # Get the suitable nodes. if within_convex_hull: self.__suitable_nodes = self.__graph.get_suitable_nodes_within_convex_set( terminals_poi, generator, dist_paths) else: self.__suitable_nodes = self.__graph.get_suitable_nodes( generator, excluded_nodes=terminals_poi) # print(self.__suitable_nodes) # self.__dist_paths_node_node = {} if dist_paths is not None: self.__dist_paths_node_node = { n: dist_paths[n] for n in self.__suitable_nodes } else: self.__dist_paths_node_node = \ {n: dijkstra(self.__graph, n) for n in self.__suitable_nodes} for e in terminals_poi: if e not in self.__dist_paths_node_node: self.__dist_paths_node_node[e] = dijkstra(self.__graph, e) # max_distances = [ max(self.__dist_paths_node_node[n][0].values()) for n in self.__suitable_nodes if len(self.__dist_paths_node_node[n][0].values()) > 0 ] if len(max_distances) > 0: self.__max_dist = max(max_distances) else: self.__max_dist = 0 # # # max_level_attraction_poi = 0 # for t in terminals: # max_level_attraction_poi = max(max_level_attraction_poi, len(self.__dist_paths_node_node[poi][1][t])) # mass = self.__calculate_mass_suitable_node(poi) # self.__attract_nodes_to(poi, mass, poi, max_level_attraction_poi, 0, []) # dist_to_poi = {} for n in self.__suitable_nodes: try: dist_to_poi[n] = self.__dist_paths_node_node[n][0][poi] except KeyError: dist_to_poi[n] = sys.maxint # dist_to_poi = {n: self.__dist_paths_node_node[n][0][poi] for n in self.__suitable_nodes} # ord_suit_nodes = sorted(dist_to_poi.iteritems(), key=operator.itemgetter(1), reverse=True) ord_suit_nodes = sorted(dist_to_poi.iteritems(), key=operator.itemgetter(1)) for n, _ in ord_suit_nodes: mass = self.__calculate_mass_suitable_node(n) self.__attract_nodes_to(n, mass, n, max_level_attraction, 0, [])
def __init__(self, graph, terminals, hot_spots=None, generator=None, distances=None): # Check whether graph is node-weighted. if not graph.is_node_weighted(): raise (ValueError, "Lazy Steiner Tree only works with node-weighted graphs.") # Extract POI from the terminals list. if len(terminals) > 0: self.__poi = terminals[0] else: return # Set object variables. self.__graph = SuitabilityGraph() self.__graph.append_graph(graph) self.__terminals = terminals self.__hot_spots = None self.__nodes = None self.__s_d = {} self.__paths = {} self.__refs = {} # Set hot spots. if hot_spots is None: if generator is None: generator = SuitableNodeWeightGenerator() self.__hot_spots = self.__graph.get_suitable_nodes( generator, excluded_nodes=terminals) else: self.__hot_spots = list(hot_spots) # Set nodes = hot spots + terminals. self.__nodes = list(self.__hot_spots) for t in terminals: self.__nodes.append(t) # Set distances. if distances is None: len_hot_spots = len(self.__hot_spots) self.__distances = {} for t in self.__terminals: dist, paths = dijkstra(self.__graph, t, self.__nodes) for n in self.__nodes: try: self.__distances[tuple(sorted([t, n]))] = (dist[n], 'N') self.__paths[tuple(sorted([t, n]))] = paths[n] except KeyError: self.__distances[tuple(sorted([t, n]))] = (sys.maxint, 'N') self.__paths[tuple(sorted([t, n]))] = [] for h1 in self.__hot_spots: for i in range(self.__hot_spots.index(h1), len_hot_spots): h2 = self.__hot_spots[i] distance = 0 d_type = 'E' if h1 == h2: d_type = 'N' else: distance = haversine(self.__graph[h1][2]['lat'], self.__graph[h1][2]['lon'], self.__graph[h2][2]['lat'], self.__graph[h2][2]['lon']) self.__distances[tuple(sorted([h1, h2]))] = (distance, d_type) else: self.__distances = dict(distances)
def index(request): generator = SuitableNodeWeightGenerator() # Long integers seem not to be JSON serializable. Thus, str() function is used whenever the integer does not come # from session or from the DB. (Not pretty sure!) if 'op' in request.GET: # top = request.GET.get('top') left = request.GET.get('left') bottom = request.GET.get('bottom') right = request.GET.get('right') print top, left, bottom, right # min_lon = min(left, right) min_lat = min(top, bottom) max_lon = max(left, right) max_lat = max(top, bottom) # osm = OsmManager() # CREATE NETWORK SAMPLE ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ if request.GET['op'] == 'show_pois': graph, _, pois, _, _ = \ osm.generate_graph_for_bbox(min_lon, min_lat, max_lon, max_lat, generator, hotspots=False, cost_type="travel_time") # request.session['graph'] = graph # request.session['graph'] = {(str(e[0]), str(e[1])): v for e, v in graph.edges.iteritems()} request.session['pois'] = pois # geo_pois = [(graph[p][2]['lat'], graph[p][2]['lon'], p, graph[p][2]['subtype']) for p in pois] return HttpResponse( json.dumps( dict( isOk=1, content=render_to_string('congestion/index.html', {}), pois=geo_pois, ))) # , default=decimal_default)) # SLICE POIS +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ elif request.GET['op'] == 'slice_pois': graph = get_suitability_graph_from_session(request) pois = request.session['pois'] # s_pois = osm.get_nodes_for_bbox(min_lon, min_lat, max_lon, max_lat, hotspots=False) s_pois = set(pois).intersection(s_pois) # request.session['pois'] = list(s_pois) # geo_pois = [(graph[p][2]['lat'], graph[p][2]['lon'], p, graph[p][2]['subtype']) for p in s_pois] return HttpResponse( json.dumps( dict( isOk=1, content=render_to_string('congestion/index.html', {}), pois=geo_pois, ))) # , default=decimal_default)) # CREATE QUERIES +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ elif request.GET['op'] == "create_queries": nuq = int(request.GET.get('nusers')) seed = request.GET.get('seed') # graph = get_suitability_graph_from_session(request) pois = request.session['pois'] # How many different activities were sliced? ps_subtype = dict() for p in pois: ps_subtype.setdefault(graph[p][2]['subtype'], []).append(p) # s_nodes = osm.get_nodes_for_bbox(min_lon, min_lat, max_lon, max_lat, hotspots=False) s_nodes = set(graph.keys()).intersection(s_nodes).difference(pois) # queries = [] ts_subtype = dict() occupied = set() np.random.seed(int(seed)) for subtype, pois_ in ps_subtype.iteritems(): where = set(s_nodes).difference(occupied) terminals = np.random.choice(a=list(where), size=nuq, replace=False) queries.append(([str(t) for t in terminals], pois_, subtype)) occupied.update(terminals) ts_subtype[subtype] = list(terminals) # request.session['queries'] = queries # geo_pois = [(graph[p][2]['lat'], graph[p][2]['lon'], p, graph[p][2]['subtype']) for p in pois] geo_terminals = [] for subtype, ts in ts_subtype.iteritems(): for t in ts: geo_terminals.append((graph[t][2]['lat'], graph[t][2]['lon'], str(t), subtype)) return HttpResponse( json.dumps( dict( isOk=1, content=render_to_string('congestion/index.html', {}), pois=geo_pois, terminals=geo_terminals, ))) # , default=decimal_default)) elif 'alg' in request.GET: alg = request.GET.get('alg') print alg # Set up the graph. graph = get_suitability_graph_from_session(request) graph.capacitated = True graph.set_capacities({ e: 2 for e in graph.get_edges() }) # FIX THIS +++++++++++++++++++++++++++++++++++++++++++ # queries = get_queries_from_session(request) queries_ = [(ts, pois) for ts, pois, _ in queries] # ni = 0 # # with open('file_tt.txt', 'w') as file_: # file_.write(json.dumps(graph)) # merge_users = False max_iter = 20 alpha = 1.0 beta = 4.0 vst_rs = VST_RS(graph) st = time.clock() if alg == 'vst-nca': plans, cost, warl, mwrl, mrl1, mrl2, entropy = \ vst_rs.non_congestion_aware(queries_, 4, 8, bpr, merge_users=merge_users, alpha=alpha, beta=beta, verbose=True) elif alg == "vst-ca-mixed": plans, cost, warl, mwrl, mrl1, mrl2, entropy, ni = \ vst_rs.congestion_aware(queries_, 4, 8, bpr, merge_users=merge_users, max_iter=max_iter, alpha=alpha, beta=beta, verbose=True, randomize=True) else: plans, cost, warl, mwrl, mrl1, mrl2, entropy, ni = \ vst_rs.congestion_aware(queries_, 4, 8, bpr, merge_users=merge_users, max_iter=max_iter, alpha=alpha, beta=beta, verbose=True, randomize=False) elapsed_time = time.clock() - st # geo_edges = [] for ord_, plan, _ in plans: geo_edges.extend( get_geo_forest_edges(queries[ord_][2], plan, graph)) return HttpResponse( json.dumps( dict(content=render_to_string('congestion/index.html', {}), route=geo_edges, cost=cost, elapsed_time=elapsed_time, warl=warl, mwrl=mwrl, mrl1=mrl1, mrl2=mrl2, ent=entropy, ni=ni))) else: return render(request, 'congestion/index.html', {})
def setUp(self): self.osmmgr = OsmManager() self.bbox = (144.942043, -37.822496, 145.053342, -37.734496) generator = SuitableNodeWeightGenerator() self.graph = self.osmmgr.generate_graph_for_bbox(self.bbox[0], self.bbox[1], self.bbox[2], self.bbox[3], generator, hotspots=False, pois=False)
def index(request): # generator = SuitableNodeWeightGenerator() if 'file_to_retrieve_dhs' in request.GET: # file_ = request.GET.get('file_to_retrieve_dhs') osm = OsmManager() dep_hours = osm.get_departure_hours(file_) return HttpResponse(json.dumps(dict(dh=dep_hours))) elif 'file_to_retrieve_acts' in request.GET and 'dh_to_retrieve_acts' in request.GET: # file_ = request.GET.get('file_to_retrieve_acts') dh = request.GET.get('dh_to_retrieve_acts') osm = OsmManager() dest_acts = osm.get_dest_activities(file_, dh) return HttpResponse(json.dumps(dict(acts=dest_acts))) elif 'file' in request.GET and 'dh' in request.GET and 'act' in request.GET: # file_ = request.GET.get('file') dh = request.GET.get('dh') act = request.GET.get('act') print file_, dh, act # osm = OsmManager() graph, hotspots, pois, nodes_by_sa1_code, nodes_by_sa2_code = osm.generate_graph_for_file(file_, act, generator) terminals = osm.choose_terminals_according_to_vista(file_, dh, act, nodes_by_sa1_code) reset_hotspots_weights = {h: generator.weights["WARNING"][0] for h in hotspots} graph.update_node_weights(reset_hotspots_weights) excluded = list(pois) excluded.extend(terminals) # rest_nodes = list(set(graph.keys()).difference(excluded)) # # Option A: Hot-spots are the rest of the nodes, i.e., users can meet anywhere. # hotspots = list(rest_nodes) # # Option B: Hot-spots chosen randomly from the rest of the nodes, i.e., nodes that aren't terminals nor POIs. # ind = np.random.choice(a=len(rest_nodes), size=len(hotspots), replace=False) # hotspots = [rest_nodes[i] for i in ind] # Option C: Hot-spots chosen based on population distribution. # TODO: Dynamic sa3 code hotspots = osm.choose_hotspots_according_to_population(21303, len(hotspots), nodes_by_sa2_code, excluded) weights = {h: generator.weights["VERY_SUITABLE"][0] for h in hotspots} graph.update_node_weights(weights) temp = list(hotspots) temp.extend(pois) temp.extend(terminals) graph.compute_dist_paths(origins=temp, destinations=temp, compute_paths=False) # request.session['graph'] = graph request.session['dist'] = {str(k[0]) + "," + str(k[1]): v for k, v in graph.dist.iteritems()} request.session['pairs_dist_paths'] = [str(v) + "," + str(w) for v, w in graph.pairs_dist_paths] request.session['hotspots'] = hotspots request.session['pois'] = pois request.session['terminals'] = terminals # geo_hotspots = [(graph[h][2]['lat'], graph[h][2]['lon'], h) for h in hotspots] geo_pois = [(graph[p][2]['lat'], graph[p][2]['lon'], p) for p in pois] geo_terminals = [(graph[t][2]['lat'], graph[t][2]['lon'], t) for t in terminals] return HttpResponse(json.dumps(dict( isOk=1, content=render_to_string('hotspots/index.html', {}), hotspots=geo_hotspots, pois=geo_pois, terminals=geo_terminals, ))) # , default=decimal_default)) elif 'alg' in request.GET: alg = request.GET.get('alg') print alg # graph = get_suitability_graph_from_session(request) hotspots = request.session['hotspots'] pois = request.session['pois'] terminals = request.session['terminals'] # pdb.set_trace() # if alg == 'rahman': cap = int(request.GET.get('cap_r')) vst_rs = VST_RS(graph, nodes=hotspots) start_time = time.clock() forest, cost, gr, avg_dr, num_trees, avg_or, _, _ = vst_rs.steiner_forest(terminals, pois, cap, 8) elapsed_time = time.clock() - start_time else: cap = int(request.GET.get('cap_c')) mdr = request.GET.get('mdr') mwd = request.GET.get('mwd') if mdr is not None and mdr != '': mdr = float(request.GET.get('mdr')) else: mdr = sys.maxint if mwd is not None and mwd != '': mwd = float(request.GET.get('mwd')) else: mwd = sys.maxint # print mdr, mwd hb = HotspotBased(graph, terminals, pois) start_time = time.clock() forest, cost, gr, avg_dr, num_trees, avg_or, _ = \ hb.steiner_forest(k=cap, max_dr=mdr, max_wd=mwd, get_lsv=False) elapsed_time = time.clock() - start_time # geo_steiner_tree_edges = get_geo_steiner_tree_edges(forest, graph) return HttpResponse(json.dumps(dict( content=render_to_string('hotspots/index.html', {}), route=geo_steiner_tree_edges, distance=cost, elapsed_time=elapsed_time, gr=gr, avg_dr=avg_dr, num_cars=num_trees, avg_or=avg_or ))) else: return render(request, 'hotspots/index.html', {})
def __init__(self, graph, terminals, poi, contract_graph=True, contracted_graph=None, within_convex_hull=False, dist_paths_suitable_nodes=None): # Check whether graph is node-weighted. if not graph.is_node_weighted(): raise (ValueError, "Spiders algorithm only works with node-weighted graphs.") # Store class variables for future references. self.__original_graph = graph self.__terminals = terminals self.__poi = poi self.__contract_graph = contract_graph terminals_poi = list(terminals) terminals_poi.append(poi) generator = SuitableNodeWeightGenerator() # Contracted graph... if contract_graph: if contracted_graph is not None: self.__graph = contracted_graph.copy() else: self.__graph = SuitabilityGraph() self.__graph.append_graph(graph) self.__graph.contract_suitable_regions( generator, excluded_nodes=terminals_poi) else: self.__graph = SuitabilityGraph() self.__graph.append_graph(graph) # Copy distances and paths dictionary since it will be changed. dist_paths = None if dist_paths_suitable_nodes is not None: dist_paths = dict(dist_paths_suitable_nodes) for e in terminals_poi: dist_paths[e] = dijkstra(self.__graph, e) # Get the suitable nodes. if within_convex_hull: self.__suitable_nodes = self.__graph.get_suitable_nodes_within_convex_set( terminals_poi, generator, dist_paths) else: self.__suitable_nodes = self.__graph.get_suitable_nodes( generator, excluded_nodes=terminals_poi) # POI will be included in this list. self.__suitable_nodes.append(poi) # Calculate distances and paths between nodes. IMPORTANT: Only suitable nodes are regarded as start nodes. self.__dist_paths_node_node = {} # self.__dist_paths_node_within_region_node = {} if dist_paths is not None: self.__dist_paths_node_node = { n: dist_paths[n] for n in self.__suitable_nodes } # for n in self.__suitable_nodes: # self.__dist_paths_node_node[n] = dist_paths[n] # if n in self.__graph.contracted_regions: # region = self.__graph.contracted_regions[n][0] # for w in region: # self.__dist_paths_node_within_region_node[w] = \ # dijkstra(self.__original_graph, w, consider_node_weights=False) else: self.__dist_paths_node_node = \ {n: dijkstra(self.__graph, n) for n in self.__suitable_nodes} # for n in self.__suitable_nodes: # self.__dist_paths_node_node[n] = dijkstra(self.__graph, n, consider_node_weights=False) # if n in self.__graph.contracted_regions: # region = self.__graph.contracted_regions[n][0] # for w in region: # self.__dist_paths_node_within_region_node[w] = \ # dijkstra(self.__original_graph, w, consider_node_weights=False) for e in terminals_poi: if e not in self.__dist_paths_node_node: self.__dist_paths_node_node[e] = dijkstra(self.__graph, e) # For every terminal create a subtree which has such terminal as the only node. Each subtree is digraph. self.__subtrees = {} for s in terminals: subtree = SuitabilityGraph() subtree[s] = (self.__graph[s][0], {}) self.__subtrees[s] = subtree # IMPORTANT: This method calculates the distances and paths from suitable nodes only. self.__calculate_distances_paths_to_subtrees()
def __init__(self, graph, terminals, pois, hot_spots=None): # Init some instance variables. self.__graph = graph self.__terminals = terminals self.__pois = pois # Temporal list to be excluded when getting suitable nodes. temp = list(terminals) temp.extend(pois) # Get suitable nodes. generator = SuitableNodeWeightGenerator() self.__hot_spots = self.__graph.get_suitable_nodes(generator, excluded_nodes=temp) self.__actual_hs = set(self.__hot_spots) if hot_spots is not None: self.__hot_spots = hot_spots temp.extend(self.__hot_spots) # Compute distances between every node within set [temp] # if len(self.__graph.pairs_dist_paths) == 0: self.__graph.compute_dist_paths(origins=temp, destinations=temp, compute_paths=False) # Deal with nodes that are not accessible. # print self.__graph.issues_dist_paths # Compute P-Voronoi cells. _, self.__medoids = self.__graph.get_voronoi_medoids_cells( self.__pois, temp) # Subtree-SVs self.__leaves_subtree_sv = {} self.__inv_subtree_sv = {} # Information by terminal / pseudo-terminal self.__ind_cost = {} self.__cum_loss = {} self.__cum_num_terms = {} self.__vert_tree = {} self.__term_tree = {} self.__detour = {} # Terminals only, i.e. not pseudo-terminals. self.__confirmed = {} for t in self.__terminals: self.__ind_cost[t] = self.__graph.dist[tuple( sorted([t, self.__medoids[t]]))] self.__cum_loss[t] = 0 self.__cum_num_terms[t] = 1 self.__vert_tree[t] = {t} self.__term_tree[t] = {t} self.__detour[t] = 0 self.__confirmed[t] = None # Gain ratios. self.__gain_ratios = {} # Loss ratios. self.__loss_ratios = {} # Aware terminals. self.__aware = []