Example #1
0
 def steiner_forest(self, k, S):
     forests = {}
     mz = VST_RS(self.__graph)
     membership = self.__compute_membership()
     branches = self.__compute_branches(0, {}, membership)
     print len(branches)
     for b in branches:
         groups = {}
         for t, m in b.iteritems():
             if m in groups:
                 groups[m].append(t)
             else:
                 groups[m] = [t]
         forest, cost = mz.steiner_forest(
             [ts for _, ts in groups.iteritems()], self.__terminals,
             self.__pois, k, S)
         forests[cost] = forest
     return forests[min(forests)], min(forests)
Example #2
0
 def steiner_forest(self, requests, k=4):
     lambda_ = 1
     MSTs = {}
     n = len(self.__graph)
     for i, (terminals, pois) in enumerate(requests):
         self.__graph.update_edge_weights({
             e: self.__weights[e] * n**(self.congestion[e] / lambda_ - 1)
             for e in self.__edges
         })
         mz = VST_RS(self.__graph)
         Ti, l, _, _, _, _, _ = mz.steiner_forest(terminals, pois, k,
                                                  sys.maxint)
         MSTs[i] = (Ti, l)
         for e in Ti.get_edges():
             self.congestion[e] += 1
             lambda_ = max(lambda_, self.congestion[e])
     iteration = 1
     while iteration <= 100:
         for i, (terminals, pois) in enumerate(requests):
             cmax = max(self.congestion.values())
             E_Ti = MSTs[i][0].get_edges()
             A = len(E_Ti)
             self.__graph.update_edge_weights({
                 e: self.__weights[e] * A**(self.congestion[e] - cmax)
                 for e in self.__edges
             })
             self.__graph.update_edge_weights(
                 {e: le / A
                  for e, le in E_Ti.iteritems()})
             mz = VST_RS(self.__graph)
             Ti_, l, _, _, _, _, _ = mz.steiner_forest(
                 terminals, pois, k, sys.maxint)
             self.__graph.update_edge_weights(
                 {e: le * A
                  for e, le in E_Ti.iteritems()})
             if MSTs[i][1] > l:
                 for e in MSTs[i][0].get_edges():
                     self.congestion[e] -= 1
                 for e in Ti_.get_edges():
                     self.congestion[e] += 1
                 MSTs[i] = (Ti_, l)
         iteration += 1
     return MSTs
Example #3
0
    def steiner_forest(self, requests, k=4):
        MSTs = {}
        for i, (terminals, pois) in enumerate(requests):
            mz = VST_RS(self.__graph)
            Ti, l, _, _, _, _, _ = mz.steiner_forest(terminals, pois, k,
                                                     sys.maxint)
            MSTs[i] = (Ti, l)
            weights = dict()
            for e in Ti.get_edges():
                self.congestion[e] += 1
                weights[e] = self.compute_weight(e)
            self.__graph.update_edge_weights(weights)

        iteration = 1
        while iteration <= 100:
            for i, (terminals, pois) in enumerate(requests):
                #         cmax = max(self.congestion.values())
                #         E_Ti = MSTs[i][0].get_edges()
                #         A = len(E_Ti)
                #         self.__graph.update_edge_weights(
                #             {e: self.__weights[e] * A ** (self.congestion[e] - cmax) for e in self.__edges})
                #         self.__graph.update_edge_weights({e: le / A for e, le in E_Ti.iteritems()})
                mz = VST_RS(self.__graph)
                Ti_, l, _, _, _, _, _ = mz.steiner_forest(
                    terminals, pois, k, sys.maxint)
                weights = dict()
                for e in Ti_.get_edges():
                    weights[e] = self.compute_weight(e)
                self.__graph.update_edge_weights(weights)
                #         self.__graph.update_edge_weights({e: le * A for e, le in E_Ti.iteritems()})
                if MSTs[i][1] > l:
                    for e in MSTs[i][0].get_edges():
                        self.congestion[e] -= 1
                    for e in Ti_.get_edges():
                        self.congestion[e] += 1
                    MSTs[i] = (Ti_, l)
            iteration += 1
        return MSTs
Example #4
0
 graph, hotspots, pois, nodes_by_sa1_code, _ = osm.generate_graph_for_file(file_, act[0], generator)
 terminals = osm.choose_terminals_according_to_vista(file_, dh, act[0], nodes_by_sa1_code)
 temp = list(hotspots)
 temp.extend(pois)
 temp.extend(terminals)
 graph.compute_dist_paths(origins=temp, destinations=temp, compute_paths=False)
 # print graph.issues_dist_paths
 #
 h = len(hotspots)
 t = len(terminals)
 p = len(pois)
 n = len(graph.keys())
 print dh, act, h, t, p, n
 experiment = {}
 #
 mz = VST_RS(graph, terminals, pois, 5, 8)
 try:
     start_time = time.clock()
     forest, cost, gr, avg_dr, num_trees, avg_or = mz.steiner_forest()
     elapsed_time = time.clock() - start_time
 except KeyError as err:
     print err
     continue
 hs_r = set(forest.keys()).intersection(hotspots)
 experiment['mVST-RS'] = ([sa3_code11, dh, act[0], cost, gr, avg_dr, num_trees, avg_or, elapsed_time, h,
                          t, p, n], hs_r)
 print 'mVST-RS', sa3_code11, dh, act[0], cost, gr, avg_dr, num_trees, avg_or, elapsed_time, h, t, p, n
 #
 hb = HotspotBased(graph, terminals, pois)
 start_time = time.clock()
 forest, cost, gr, avg_dr, num_trees, avg_or, lsv = hb.steiner_forest(k=5)
Example #5
0
 for num_pois in nums_pois:
     ind_pois = np.random.choice(a=num_terminals +
                                 max(nums_pois),
                                 size=num_pois,
                                 replace=False)
     pois = [terminals[ind] for ind in ind_pois]
     terminals_ = list(set(terminals).difference(pois))
     # pois = np.random.choice(a=size * size, size=num_pois, replace=False)
     # while len(set(hotspots).intersection(pois)) != 0 \
     #         or len(set(terminals).intersection(pois)) != 0:
     #     pois = np.random.choice(a=size * size, size=num_pois, replace=False)
     # print(pois, ";", terminals_)
     # ----------
     # Mustafiz
     # ----------
     mz = VST_RS(graph_temp, terminals_, pois,
                 num_seats, 10)
     try:
         start_time = time.clock()
         forest, cost, gr, avg_dr, num_cars, avg_or = mz.steiner_forest(
         )
         elapsed_time = time.clock() - start_time
         # cost2, _ = forest.calculate_costs()
         line = [
             "mVST-RS", seed, ms[msns] * ns[msns],
             num_hotspots, num_terminals, num_pois,
             num_seats, sample + 1, elapsed_time, cost,
             gr, avg_dr, num_cars, avg_or
         ]
     except ValueError:
         line = [
             "mVST-RS", seed, ms[msns] * ns[msns],
Example #6
0
def index(request):
    generator = SuitableNodeWeightGenerator()

    # Long integers seem not to be JSON serializable. Thus, str() function is used whenever the integer does not come
    # from session or from the DB. (Not pretty sure!)

    if 'op' in request.GET:
        #
        top = request.GET.get('top')
        left = request.GET.get('left')
        bottom = request.GET.get('bottom')
        right = request.GET.get('right')
        print top, left, bottom, right
        #
        min_lon = min(left, right)
        min_lat = min(top, bottom)
        max_lon = max(left, right)
        max_lat = max(top, bottom)
        #
        osm = OsmManager()

        # CREATE NETWORK SAMPLE ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
        if request.GET['op'] == 'show_pois':
            graph, _, pois, _, _ = \
                osm.generate_graph_for_bbox(min_lon, min_lat, max_lon, max_lat, generator, hotspots=False,
                                            cost_type="travel_time")
            #
            request.session['graph'] = graph
            # request.session['graph'] = {(str(e[0]), str(e[1])): v for e, v in graph.edges.iteritems()}
            request.session['pois'] = pois
            #
            geo_pois = [(graph[p][2]['lat'], graph[p][2]['lon'], p,
                         graph[p][2]['subtype']) for p in pois]

            return HttpResponse(
                json.dumps(
                    dict(
                        isOk=1,
                        content=render_to_string('congestion/index.html', {}),
                        pois=geo_pois,
                    )))  # , default=decimal_default))

        # SLICE POIS +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
        elif request.GET['op'] == 'slice_pois':
            graph = get_suitability_graph_from_session(request)
            pois = request.session['pois']
            #
            s_pois = osm.get_nodes_for_bbox(min_lon,
                                            min_lat,
                                            max_lon,
                                            max_lat,
                                            hotspots=False)
            s_pois = set(pois).intersection(s_pois)
            #
            request.session['pois'] = list(s_pois)
            #
            geo_pois = [(graph[p][2]['lat'], graph[p][2]['lon'], p,
                         graph[p][2]['subtype']) for p in s_pois]

            return HttpResponse(
                json.dumps(
                    dict(
                        isOk=1,
                        content=render_to_string('congestion/index.html', {}),
                        pois=geo_pois,
                    )))  # , default=decimal_default))

        # CREATE QUERIES +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
        elif request.GET['op'] == "create_queries":
            nuq = int(request.GET.get('nusers'))
            seed = request.GET.get('seed')
            #
            graph = get_suitability_graph_from_session(request)
            pois = request.session['pois']
            # How many different activities were sliced?
            ps_subtype = dict()
            for p in pois:
                ps_subtype.setdefault(graph[p][2]['subtype'], []).append(p)
            #
            s_nodes = osm.get_nodes_for_bbox(min_lon,
                                             min_lat,
                                             max_lon,
                                             max_lat,
                                             hotspots=False)
            s_nodes = set(graph.keys()).intersection(s_nodes).difference(pois)
            #
            queries = []
            ts_subtype = dict()
            occupied = set()
            np.random.seed(int(seed))
            for subtype, pois_ in ps_subtype.iteritems():
                where = set(s_nodes).difference(occupied)
                terminals = np.random.choice(a=list(where),
                                             size=nuq,
                                             replace=False)
                queries.append(([str(t) for t in terminals], pois_, subtype))
                occupied.update(terminals)
                ts_subtype[subtype] = list(terminals)
            #
            request.session['queries'] = queries
            #
            geo_pois = [(graph[p][2]['lat'], graph[p][2]['lon'], p,
                         graph[p][2]['subtype']) for p in pois]
            geo_terminals = []
            for subtype, ts in ts_subtype.iteritems():
                for t in ts:
                    geo_terminals.append((graph[t][2]['lat'],
                                          graph[t][2]['lon'], str(t), subtype))

            return HttpResponse(
                json.dumps(
                    dict(
                        isOk=1,
                        content=render_to_string('congestion/index.html', {}),
                        pois=geo_pois,
                        terminals=geo_terminals,
                    )))  # , default=decimal_default))

    elif 'alg' in request.GET:
        alg = request.GET.get('alg')
        print alg
        # Set up the graph.
        graph = get_suitability_graph_from_session(request)
        graph.capacitated = True
        graph.set_capacities({
            e: 2
            for e in graph.get_edges()
        })  # FIX THIS +++++++++++++++++++++++++++++++++++++++++++
        #
        queries = get_queries_from_session(request)
        queries_ = [(ts, pois) for ts, pois, _ in queries]
        #
        ni = 0
        #
        # with open('file_tt.txt', 'w') as file_:
        #     file_.write(json.dumps(graph))
        #
        merge_users = False
        max_iter = 20
        alpha = 1.0
        beta = 4.0
        vst_rs = VST_RS(graph)

        st = time.clock()
        if alg == 'vst-nca':
            plans, cost, warl, mwrl, mrl1, mrl2, entropy = \
                vst_rs.non_congestion_aware(queries_, 4, 8, bpr, merge_users=merge_users, alpha=alpha, beta=beta,
                                            verbose=True)
        elif alg == "vst-ca-mixed":
            plans, cost, warl, mwrl, mrl1, mrl2, entropy, ni = \
                vst_rs.congestion_aware(queries_, 4, 8, bpr, merge_users=merge_users, max_iter=max_iter, alpha=alpha,
                                        beta=beta, verbose=True, randomize=True)
        else:
            plans, cost, warl, mwrl, mrl1, mrl2, entropy, ni = \
                vst_rs.congestion_aware(queries_, 4, 8, bpr, merge_users=merge_users, max_iter=max_iter, alpha=alpha,
                                        beta=beta, verbose=True, randomize=False)
        elapsed_time = time.clock() - st
        #
        geo_edges = []
        for ord_, plan, _ in plans:
            geo_edges.extend(
                get_geo_forest_edges(queries[ord_][2], plan, graph))

        return HttpResponse(
            json.dumps(
                dict(content=render_to_string('congestion/index.html', {}),
                     route=geo_edges,
                     cost=cost,
                     elapsed_time=elapsed_time,
                     warl=warl,
                     mwrl=mwrl,
                     mrl1=mrl1,
                     mrl2=mrl2,
                     ent=entropy,
                     ni=ni)))

    else:
        return render(request, 'congestion/index.html', {})
Example #7
0
    no_pois = np.random.choice(a=range(1, 5), size=no_queries, replace=True)

    queries = []
    occupied = []
    for i in range(no_queries):
        np.random.seed(i)
        where = set(range(m * n)).difference(occupied)
        nodes = np.random.choice(a=list(where),
                                 size=no_users[i] + no_pois[i],
                                 replace=False)
        pois = nodes[:no_pois[i]]
        terminals = nodes[no_pois[i]:]
        queries.append((terminals, pois))
        occupied.extend(nodes)

    vst_rs = VST_RS(graph)

    start_time = time.clock()
    plans, cost, warl, mwrl, mrl1, mrl2, entropy = vst_rs.non_congestion_aware(
        queries, 4, 8, bpr)
    elapsed_time = time.clock() - start_time

    print "Non-congestion-aware:", cost, warl, mwrl, elapsed_time

    vst_rs = VST_RS(graph)

    start_time = time.clock()
    plans, cost, warl, mwrl, mrl1, mrl2, entropy, ni = \
        vst_rs.congestion_aware(queries, 4, 8, bpr, randomize=False, log_history=True)
    elapsed_time = time.clock() - start_time
Example #8
0
    terminals = np.random.choice(a=list(where), size=150, replace=False)
    pois_3 = terminals[:15]
    terminals_3 = terminals[15:]
    queries.append((terminals_3, pois_3))

    occupied.extend(terminals_3)
    occupied.extend(pois_3)

    np.random.seed(3)
    where = set(range(m * n)).difference(occupied)
    terminals = np.random.choice(a=list(where), size=150, replace=False)
    pois_4 = terminals[:15]
    terminals_4 = terminals[15:]
    queries.append((terminals_4, pois_4))

    vst_rs = VST_RS(graph)

    start_time = time.clock()
    plans, cost, warl, mwrl, mrl1, mrl2, entropy = vst_rs.non_congestion_aware(
        queries, 4, 8, bpr, parallelise=True)
    elapsed_time = time.clock() - start_time

    print cost, warl, mwrl, elapsed_time

    # ngh = NetworkXGraphHelper(graph)
    # # labels = graph.get_capacities()
    # # labels = {e: round(float(vst_rs.load[e]) / graph.get_capacities()[e], 2) for e in graph.get_edges()}
    # labels = {e: vst_rs.load[e] for e in graph.get_edges()}
    #
    # ngh.draw_graph(
    #     special_subgraphs=[(plan, None) for plan in plans],
Example #9
0
    # suitability_graph.extend_suitable_regions(seed, generator)

    hotspots = suitability_graph.get_suitable_nodes(generator)

    terminals = np.random.choice(a=m * n, size=30, replace=False)
    while set(suitability_graph.keys()).intersection(terminals) != set(
            terminals) or set(hotspots).intersection(terminals) != set():
        terminals = np.random.choice(a=m * n, size=30, replace=False)

    pois = terminals[:3]
    terminals = terminals[3:]

    regions = suitability_graph.get_suitable_regions(generator)

    mz = VST_RS(suitability_graph)
    start_time = time.clock()
    forest, cost, gr, _, _, _, _ = mz.steiner_forest(terminals,
                                                     pois,
                                                     5,
                                                     8,
                                                     merge_users=False)
    elapsed_time = time.clock() - start_time

    # cost2, _ = forest.calculate_costs()

    special_nodes = [(terminals, '#000000', 35), (pois, '#0000FF', 65)]

    ngh = NetworkXGraphHelper(suitability_graph)
    ngh.draw_graph(special_nodes=special_nodes,
                   special_subgraphs=[(forest, None)],
Example #10
0
from networkx_graph_helper import NetworkXGraphHelper
from vst_rs import VST_RS

if __name__ == '__main__':

    m = n = 30

    gh = GridDigraphGenerator()
    graph = gh.generate(m, n, edge_weighted=False)

    np.random.seed(0)
    terminals = np.random.choice(a=m * n, size=35, replace=False)
    pois = terminals[:5]
    terminals = terminals[5:]

    vst_rs = VST_RS(graph)
    start_time = time.clock()
    forest, cost, _, _, _, _, _ = vst_rs.steiner_forest(terminals,
                                                        pois,
                                                        4,
                                                        8,
                                                        merge_users=False)
    elapsed_time = time.clock() - start_time

    ngh = NetworkXGraphHelper(graph)
    ngh.draw_graph(special_nodes=[(pois, None, None), (terminals, None, None)],
                   special_subgraphs=[(forest, "#FF0000")],
                   title_1="VST-RS",
                   title_2="Cost: " + str(cost) + ", Elapsed, time: " +
                   str(elapsed_time),
                   edge_labels=vst_rs.load,
Example #11
0
    gh = GridDigraphGenerator()
    graph = gh.generate(m,
                        n,
                        capacitated=True,
                        capacities_range=(1, 100),
                        edge_weighted=False,
                        node_weighted=True,
                        node_weight_generator=generator,
                        seed=seed
                        )

    # terminals = np.random.choice(a=m * n, size=8, replace=False)
    terminals = [64, 75, 56, 7, 35]
    # pois = terminals[:3]
    pois = [20, 49]
    # terminals = terminals[3:]

    mz = VST_RS(graph)
    start_time = time.clock()
    forest, cost, _, _, _, _, sts, _ = mz.steiner_forest(terminals, pois, 5, 8)
    elapsed_time = time.clock() - start_time

    special_nodes = [(terminals, '#000000', 35), (pois, '#0000FF', 65)]

    ngh = NetworkXGraphHelper(graph)
    ngh.draw_graph(special_nodes=special_nodes,
                   # special_subgraphs=[(forest, "#FF0000")],
                   special_subgraphs=[(st[1], None) for st in sts],
                   title_1="Mustafiz's algorithm, seed = " + str(seed),
                   title_2="Cost: " + str(cost) + ", elapsed time: " + str(elapsed_time))
Example #12
0
    ngh = NetworkXGraphHelper(graph)
    ngh.draw_graph(special_nodes=[(req_1[0], '#000000', 50),
                                  (req_1[1], '#000000', 100),
                                  (req_2[0], '#0000FF', 50),
                                  (req_2[1], '#0000FF', 100),
                                  (req_3[0], '#13E853', 50),
                                  (req_3[1], '#13E853', 100),
                                  (req_4[0], '#FF0000', 50),
                                  (req_4[1], '#FF0000', 100),
                                  (req_5[0], '#E67E22', 50),
                                  (req_5[1], '#E67E22', 100)],
                   special_subgraphs=special_subgraphs,
                   print_edge_labels=True,
                   edge_labels=b.congestion)

    mz = VST_RS(graph)
    T1, l, _, _, _, _, _ = mz.steiner_forest(req_1[0], req_1[1], 4, sys.maxint)
    ngh = NetworkXGraphHelper(graph)
    ngh.draw_graph(special_nodes=[(req_1[0], '#000000', 50),
                                  (req_1[1], '#000000', 100)],
                   special_subgraphs=[(T1, '#000000')])

    mz = VST_RS(graph)
    T2, l, _, _, _, _, _ = mz.steiner_forest(req_2[0], req_2[1], 4, sys.maxint)
    ngh = NetworkXGraphHelper(graph)
    ngh.draw_graph(special_nodes=[(req_2[0], '#0000FF', 50),
                                  (req_2[1], '#0000FF', 100)],
                   special_subgraphs=[(T2, '#0000FF')])

    mz = VST_RS(graph)
    T3, l, _, _, _, _, _ = mz.steiner_forest(req_3[0], req_3[1], 4, sys.maxint)
Example #13
0
    terminals = np.random.choice(a=list(where), size=15, replace=False)
    pois_3 = terminals[:2]
    terminals_3 = terminals[2:]
    queries.append((terminals_3, pois_3))

    occupied.extend(terminals_3)
    occupied.extend(pois_3)

    np.random.seed(3)
    where = set(range(m * n)).difference(occupied)
    terminals = np.random.choice(a=list(where), size=15, replace=False)
    pois_4 = terminals[:2]
    terminals_4 = terminals[2:]
    queries.append((terminals_4, pois_4))

    vst_rs = VST_RS(graph)

    start_time = time.clock()
    plans, cost, warl, mwrl, mrl1, mrl2, entropy, ni = \
        vst_rs.congestion_aware(queries, 4, 8, bpr, randomize=False, max_iter=20)
    elapsed_time = time.clock() - start_time

    print cost, warl, mwrl, elapsed_time

    ngh = NetworkXGraphHelper(graph)
    # labels = graph.get_capacities()
    # labels = {e: round(float(vst_rs.load[e]) / graph.get_capacities()[e], 2) for e in graph.get_edges()}
    labels = {e: vst_rs.load[e] for e in graph.get_edges()}

    ngh.draw_graph(
        special_subgraphs=[(plan, None) for _, plan in plans],
Example #14
0
        m,
        n,
        edge_weighted=False,
        node_weighted=True,
        node_weight_generator=generator,
        # node_weights=node_weights,
        seed=seed)

    suitability_graph = SuitabilityGraph()
    suitability_graph.append_graph(node_weighted)
    # suitability_graph.compute_dist_paths()

    terminals = [88, 66, 77, 5, 33, 53, 71]
    pois = [65, 12]

    mz = VST_RS(suitability_graph)
    start_time = time.clock()
    forest, cost, gr, avg_dr, num_trees, avg_or, _ = mz.steiner_forest(
        terminals, pois, 5, 8)
    elapsed_time = time.clock() - start_time

    regions = suitability_graph.get_suitable_regions(generator)

    ngh = NetworkXGraphHelper(suitability_graph)
    ngh.draw_graph(special_nodes=[(pois, None, None), (terminals, None, None)],
                   special_subgraphs=[(forest, "#FF0000")],
                   title_1="Mustafiz's algorithm, seed = " + str(seed),
                   title_2="Cost: " + str(cost) + ", Gain ratio: " + str(gr) +
                   ", Avg. detour ratio: " + str(avg_dr) + ", Num. trees: " +
                   str(num_trees) + ", Avg. occ. rate: " + str(avg_or) +
                   ", elapsed time: " + str(elapsed_time),
Example #15
0
        special.append((pois_z, None, 65))
        #
        # where_p = set(range(m * n)).difference(occupied_p)
        # pois_u = np.random.choice(a=list(where_p), size=1, replace=False)
        # queries_u.append((terminals, pois_u))
        # occupied_p.update(pois_u)

    # np.random.shuffle(queries_z)

    merge_users = False
    max_iter = 50
    alpha = 1.0
    beta = 4.0

    # NCA ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
    vst_rs = VST_RS(graph)
    st = time.clock()
    plans, c, warl, mwrl, mrl1, mrl2, entropy = \
        vst_rs.non_congestion_aware(queries_z, 4, 8, bpr, merge_users=merge_users, alpha=alpha, beta=beta, verbose=True)
    et = time.clock() - st

    print c, warl, mwrl, mrl1, mrl2, entropy, et

    ngh = NetworkXGraphHelper(graph)
    labels = {
        e: vst_rs.load[e]
        for e in graph.get_edges() if vst_rs.load[e] > 1
    }
    ngh.draw_graph(special_subgraphs=[(plan, None) for _, plan, _ in plans],
                   special_nodes=special,
                   edge_labels=labels,
Example #16
0
def main(argv):

    p_method = "pp"

    try:
        opts, args = getopt.getopt(argv, "hm:")
    except getopt.GetoptError as error:
        print(error)
        print_usage()
        sys.exit(2)
    for opt, arg in opts:
        if opt == "-h":
            print_usage()
            sys.exit(0)
        elif opt == "-m":
            p_method = arg
            break

    comm = None
    rank = MASTER_RANK
    if p_method == "mpi":
        comm = MPI.COMM_WORLD
        rank = comm.Get_rank()

    if rank != MASTER_RANK:
        while True:
            res = comm.recv(source=MASTER_RANK)
            print res

    num_samples = 5
    num_queries = [16, 32]
    num_users_query = [16]
    prop_pois_users = 0.1

    m = n = 30
    N = m * n
    graph = GridDigraphGenerator().generate(m, n, edge_weighted=True)

    merge_users = False
    max_iter = 50
    alpha = 1.0
    beta = 4.0
    results = []
    for nq in num_queries:
        for nu in num_users_query:
            num_pois = max(int(prop_pois_users * nu), 1)
            graph.capacitated = True
            capacity = int(math.ceil((nu / 4.0 * nq) / 12.0))
            graph.set_capacities({e: capacity for e in graph.get_edges()})
            print "(nq, nu, np, cap):", (nq, nu, num_pois, capacity)
            for sample in range(num_samples):
                print "\tsample:", sample
                ppq = distribute_pois_in_queries((m, n), nq, num_pois, seed=0)
                queries_u = []
                queries_z = []
                #
                all_pois = []
                for ps in ppq.values():
                    all_pois.extend(ps)
                free_nodes = set(range(m * n)).difference(all_pois)
                #
                occupied_t = set()
                occupied_p = set()
                for i, pois_z in ppq.iteritems():
                    np.random.seed(sample * i)
                    #
                    where_t = set(free_nodes).difference(occupied_t)
                    terminals = np.random.choice(a=list(where_t), size=nu, replace=False)
                    queries_z.append((terminals, pois_z))
                    occupied_t.update(terminals)
                    occupied_p.update(terminals)
                    #
                    where_p = set(range(m * n)).difference(occupied_p)
                    pois_u = np.random.choice(a=list(where_p), size=num_pois, replace=False)
                    queries_u.append((terminals, pois_u))
                    occupied_p.update(pois_u)
                #
                # VST-NCA **********************************************************************************************
                # POIs Zipfian distributed.
                vst_rs = VST_RS(graph)
                st = time.clock()
                _, c, warl, mwrl, mrl1, mrl2, entropy = \
                    vst_rs.non_congestion_aware(queries_z, 4, 8, bpr, merge_users=merge_users, alpha=alpha, beta=beta,
                                                p_method=p_method, verbose=False)
                et = time.clock() - st

                line = ["VST-NCA", "N/A", "zipfian", N, capacity, merge_users, sample, nq, nu,
                        prop_pois_users, num_pois, c, warl, mwrl, mrl1, mrl2, 0, et, alpha, beta, entropy]
                print line
                results.append(line)

                # POIs Uniformly distributed.
                vst_rs = VST_RS(graph)
                st = time.clock()
                _, c, warl, mwrl, mrl1, mrl2, entropy = \
                    vst_rs.non_congestion_aware(queries_u, 4, 8, bpr, merge_users=merge_users, alpha=alpha, beta=beta,
                                                p_method=p_method, verbose=False)
                et = time.clock() - st

                line = ["VST-NCA", "N/A", "uniform", N, capacity, merge_users, sample, nq, nu,
                        prop_pois_users, num_pois, c, warl, mwrl, mrl1, mrl2, 0, et, alpha, beta, entropy]
                print line
                results.append(line)
                # VST-NCA **********************************************************************************************

                # VST-CA ***********************************************************************************************
                # MIXED
                # POIs Zipfian distributed.
                vst_rs = VST_RS(graph)
                st = time.clock()
                _, c, warl, mwrl, mrl1, mrl2, entropy, ni = \
                    vst_rs.congestion_aware(queries_z, 4, 8, bpr, merge_users=merge_users, max_iter=max_iter,
                                            alpha=alpha, beta=beta, verbose=False, randomize=True, p_method=p_method)
                et = time.clock() - st
                ni_ = str(ni)
                if ni == max_iter:
                    ni_ += "(*)"
                line = ["VST-CA", "mixed", "zipfian", N, capacity, merge_users, sample, nq, nu,
                        prop_pois_users, num_pois, c, warl, mwrl, mrl1, mrl2, ni_, et, alpha, beta, entropy]
                print line
                results.append(line)

                # POIs Uniformly distributed.
                vst_rs = VST_RS(graph)
                st = time.clock()
                _, c, warl, mwrl, mrl1, mrl2, entropy, ni = \
                    vst_rs.congestion_aware(queries_u, 4, 8, bpr, merge_users=merge_users, max_iter=max_iter,
                                            alpha=alpha, beta=beta, verbose=False, randomize=True, p_method=p_method)
                et = time.clock() - st
                ni_ = str(ni)
                if ni == max_iter:
                    ni_ += "(*)"
                line = ["VST-CA", "mixed", "uniform", N, capacity, merge_users, sample, nq, nu,
                        prop_pois_users, num_pois, c, warl, mwrl, mrl1, mrl2, ni_, et, alpha, beta, entropy]
                print line
                results.append(line)

                # PURE
                # POIs Zipfian distributed.
                vst_rs = VST_RS(graph)
                st = time.clock()
                _, c, warl, mwrl, mrl1, mrl2, entropy, ni = \
                    vst_rs.congestion_aware(queries_z, 4, 8, bpr, merge_users=merge_users, max_iter=max_iter,
                                            alpha=alpha, beta=beta, verbose=False, randomize=False, p_method=p_method)
                et = time.clock() - st
                ni_ = str(ni)
                if ni == max_iter:
                    ni_ += "(*)"
                line = ["VST-CA", "pure", "zipfian", N, capacity, merge_users, sample, nq, nu,
                        prop_pois_users, num_pois, c, warl, mwrl, mrl1, mrl2, ni_, et, alpha, beta, entropy]
                print line
                results.append(line)

                # POIs Uniformly distributed.
                vst_rs = VST_RS(graph)
                st = time.clock()
                _, c, warl, mwrl, mrl1, mrl2, entropy, ni = \
                    vst_rs.congestion_aware(queries_u, 4, 8, bpr, merge_users=merge_users, max_iter=max_iter,
                                            alpha=alpha, beta=beta, verbose=False, randomize=False, p_method=p_method)
                et = time.clock() - st
                ni_ = str(ni)
                if ni == max_iter:
                    ni_ += "(*)"
                line = ["VST-CA", "pure", "uniform", N, capacity, merge_users, sample, nq, nu,
                        prop_pois_users, num_pois, c, warl, mwrl, mrl1, mrl2, ni_, et, alpha, beta, entropy]
                print line
                results.append(line)

                # VST-CA ***********************************************************************************************

    result_file = open("files/vstca_vstnca_2_" + time.strftime("%d%b%Y_%H%M%S") + ".csv", 'wb')
    wr = csv.writer(result_file)
    wr.writerows(results)
Example #17
0
    graph.append_edge_2((14, 20), weight=1, capacity=1)
    graph.append_edge_2((15, 16), weight=0.5, capacity=1)
    graph.append_edge_2((15, 21), weight=1, capacity=1)
    graph.append_edge_2((16, 17), weight=1, capacity=1)
    graph.append_edge_2((16, 22), weight=1, capacity=1)
    graph.append_edge_2((17, 23), weight=1, capacity=1)
    graph.append_edge_2((18, 19), weight=1, capacity=1)
    graph.append_edge_2((19, 20), weight=1, capacity=1)
    graph.append_edge_2((20, 21), weight=0.5, capacity=1)
    graph.append_edge_2((21, 22), weight=0.5, capacity=1)
    graph.append_edge_2((22, 23), weight=1, capacity=1)

    queries = [([3, 8, 14, 15], [6, 12, 10]), ([20, 21], [1, 9]),
               ([0, 5, 23], [11])]

    vst_rs = VST_RS(graph)
    # plans, cost, weighted_avg_relative_load, max_relative_load = vst_rs.non_congestion_aware(queries, 4, 8, bpr_log)
    plans, cost, warl, mwrl, mrl1, mrl2, entropy, ni = \
        vst_rs.congestion_aware(queries, 4, 8, bpr, log_history=True, randomize=False)

    ngh = NetworkXGraphHelper(graph)
    ngh.draw_graph(special_nodes=[([3, 8, 14, 15], '#000000', 35),
                                  ([6, 12, 10], '#000000', 65),
                                  ([20, 21], '#0000FF', 35),
                                  ([1, 9], '#0000FF', 65),
                                  ([0, 5, 23], '#13E853', 35),
                                  ([11], '#13E853', 65)],
                   special_subgraphs=[(plan, None) for _, plan in plans],
                   title_2="Cost (w. congestion): " + str(round(cost, 2)),
                   edge_labels=vst_rs.load,
                   print_node_labels=True,
Example #18
0
def main():

    # Outer bbox.
    # bounds = [-78.51114567859952, -0.22156158994849384, -78.46239384754483, -0.12980902510699335]  # (small) Quito
    bounds = [-78.57160966654635, -0.4180073651030667, -78.36973588724948, -0.06610523586538203]  # (big) Quito
    # bounds = [144.58265438867193, -38.19424168942873, 145.36955014062505, -37.55250095415727]  # Melbourne
    # bounds = [-74.0326191484375, 40.69502239217181, -73.93236890429688, 40.845827729757275]  # Manhattan
    zone = "Quito"
    delta_meters = 3000.0

    delta = delta_meters / 111111
    num_samples = 100
    nuq = 5

    osm = OsmManager()
    generator = SuitableNodeWeightGenerator()

    results = []
    sample = 0
    initial_seed = 500
    while sample < num_samples:
        #
        np.random.seed(initial_seed)
        initial_seed += 1
        # Compute bbox coords (inner sample bbox of 25 km^2)
        min_lon = np.random.uniform(bounds[0], bounds[2] - delta)
        min_lat = np.random.uniform(bounds[1], bounds[3] - delta)
        max_lon = min_lon + delta
        max_lat = min_lat + delta
        # Generate network sample.
        graph, _, pois, _, _ = osm.generate_graph_for_bbox(min_lon, min_lat, max_lon, max_lat, generator,
                                                           hotspots=False, cost_type="travel_time")
        N = len(graph.keys())
        num_pois = len(pois)

        if num_pois == 0:
            continue

        # Group POIs by subtype (activity).
        ps_subtype = dict()
        for p in pois:
            ps_subtype.setdefault(graph[p][2]['subtype'], []).append(p)
        # Available nodes for users.
        nq = len(ps_subtype.keys())
        free_nodes = set(graph.keys()).difference(pois)
        if len(free_nodes) < nq * nuq:
            continue
        # Create queries.
        queries = []
        occupied = set()
        for _, pois_ in ps_subtype.iteritems():
            where = set(free_nodes).difference(occupied)
            terminals = np.random.choice(a=list(where), size=nuq, replace=False)
            queries.append((terminals, pois_))
            occupied.update(terminals)
        # Compute capacity for every road segment.
        graph.capacitated = True
        capacity = int(math.ceil((nuq / 4.0 * nq) / 4.0))
        graph.set_capacities({e: capacity for e in graph.get_edges()})
        #
        merge_users = False
        max_iter = 20
        alpha = 1.0
        beta = 4.0

        # VST-NCA ******************************************************************************************************
        vst_rs = VST_RS(graph)
        st = time.clock()
        try:
            _, c, warl, mwrl, mrl1, mrl2, entropy = vst_rs.non_congestion_aware(queries, 4, 8, bpr,
                                                                                merge_users=merge_users, alpha=alpha,
                                                                                beta=beta, verbose=False)
        except:
            continue
        et = time.clock() - st

        line = ["VST-NCA", "N/A", zone, N, capacity, merge_users, sample, nq, nuq, "N/A", num_pois, c, warl, mwrl, mrl1,
                mrl2, 0, et, alpha, beta, entropy]
        print line
        results.append(line)
        # VST-CA  MIXED ************************************************************************************************
        vst_rs = VST_RS(graph)
        st = time.clock()
        try:
            _, c, warl, mwrl, mrl1, mrl2, entropy, ni = vst_rs.congestion_aware(queries, 4, 8, bpr,
                                                                                merge_users=merge_users,
                                                                                max_iter=max_iter, alpha=alpha,
                                                                                beta=beta, verbose=False,
                                                                                randomize=True)
        except:
            continue
        et = time.clock() - st
        ni_ = str(ni)
        if ni == max_iter:
            ni_ += "(*)"
        line = ["VST-CA", "mixed", zone, N, capacity, merge_users, sample, nq, nuq, "N/A", num_pois, c, warl, mwrl,
                mrl1, mrl2, ni_, et, alpha, beta, entropy]
        print line
        results.append(line)
        # VST-CA  PURE *************************************************************************************************
        vst_rs = VST_RS(graph)
        st = time.clock()
        try:
            _, c, warl, mwrl, mrl1, mrl2, entropy, ni = vst_rs.congestion_aware(queries, 4, 8, bpr,
                                                                                merge_users=merge_users,
                                                                                max_iter=max_iter, alpha=alpha,
                                                                                beta=beta, verbose=False,
                                                                                randomize=False)
        except:
            continue
        et = time.clock() - st
        ni_ = str(ni)
        if ni == max_iter:
            ni_ += "(*)"
        line = ["VST-CA", "pure", zone, N, capacity, merge_users, sample, nq, nuq, "N/A", num_pois, c, warl, mwrl, mrl1,
                mrl2, ni_, et, alpha, beta, entropy]
        print line
        results.append(line)

        sample += 1

    result_file = open("files/vstca_vstnca_osm_1_" + time.strftime("%d%b%Y_%H%M%S") + ".csv", 'wb')
    wr = csv.writer(result_file)
    wr.writerows(results)
Example #19
0
def index(request):
    #
    generator = SuitableNodeWeightGenerator()
    if 'file_to_retrieve_dhs' in request.GET:
        #
        file_ = request.GET.get('file_to_retrieve_dhs')
        osm = OsmManager()
        dep_hours = osm.get_departure_hours(file_)
        return HttpResponse(json.dumps(dict(dh=dep_hours)))

    elif 'file_to_retrieve_acts' in request.GET and 'dh_to_retrieve_acts' in request.GET:
        #
        file_ = request.GET.get('file_to_retrieve_acts')
        dh = request.GET.get('dh_to_retrieve_acts')
        osm = OsmManager()
        dest_acts = osm.get_dest_activities(file_, dh)
        return HttpResponse(json.dumps(dict(acts=dest_acts)))

    elif 'file' in request.GET and 'dh' in request.GET and 'act' in request.GET:
        #
        file_ = request.GET.get('file')
        dh = request.GET.get('dh')
        act = request.GET.get('act')
        print file_, dh, act
        #
        osm = OsmManager()
        graph, hotspots, pois, nodes_by_sa1_code, nodes_by_sa2_code = osm.generate_graph_for_file(file_, act, generator)
        terminals = osm.choose_terminals_according_to_vista(file_, dh, act, nodes_by_sa1_code)

        reset_hotspots_weights = {h: generator.weights["WARNING"][0] for h in hotspots}
        graph.update_node_weights(reset_hotspots_weights)

        excluded = list(pois)
        excluded.extend(terminals)
        # rest_nodes = list(set(graph.keys()).difference(excluded))

        # # Option A: Hot-spots are the rest of the nodes, i.e., users can meet anywhere.
        # hotspots = list(rest_nodes)

        # # Option B: Hot-spots chosen randomly from the rest of the nodes, i.e., nodes that aren't terminals nor POIs.
        # ind = np.random.choice(a=len(rest_nodes), size=len(hotspots), replace=False)
        # hotspots = [rest_nodes[i] for i in ind]

        # Option C: Hot-spots chosen based on population distribution.
        # TODO: Dynamic sa3 code
        hotspots = osm.choose_hotspots_according_to_population(21303, len(hotspots), nodes_by_sa2_code, excluded)

        weights = {h: generator.weights["VERY_SUITABLE"][0] for h in hotspots}
        graph.update_node_weights(weights)

        temp = list(hotspots)
        temp.extend(pois)
        temp.extend(terminals)
        graph.compute_dist_paths(origins=temp, destinations=temp, compute_paths=False)
        #
        request.session['graph'] = graph
        request.session['dist'] = {str(k[0]) + "," + str(k[1]): v for k, v in graph.dist.iteritems()}
        request.session['pairs_dist_paths'] = [str(v) + "," + str(w) for v, w in graph.pairs_dist_paths]
        request.session['hotspots'] = hotspots
        request.session['pois'] = pois
        request.session['terminals'] = terminals
        #
        geo_hotspots = [(graph[h][2]['lat'], graph[h][2]['lon'], h) for h in hotspots]
        geo_pois = [(graph[p][2]['lat'], graph[p][2]['lon'], p) for p in pois]
        geo_terminals = [(graph[t][2]['lat'], graph[t][2]['lon'], t) for t in terminals]

        return HttpResponse(json.dumps(dict(
            isOk=1,
            content=render_to_string('hotspots/index.html', {}),
            hotspots=geo_hotspots,
            pois=geo_pois,
            terminals=geo_terminals,
        )))  # , default=decimal_default))

    elif 'alg' in request.GET:
        alg = request.GET.get('alg')
        print alg
        #
        graph = get_suitability_graph_from_session(request)
        hotspots = request.session['hotspots']
        pois = request.session['pois']
        terminals = request.session['terminals']
        # pdb.set_trace()
        #
        if alg == 'rahman':
            cap = int(request.GET.get('cap_r'))
            vst_rs = VST_RS(graph, nodes=hotspots)
            start_time = time.clock()
            forest, cost, gr, avg_dr, num_trees, avg_or, _, _ = vst_rs.steiner_forest(terminals, pois, cap, 8)
            elapsed_time = time.clock() - start_time
        else:
            cap = int(request.GET.get('cap_c'))
            mdr = request.GET.get('mdr')
            mwd = request.GET.get('mwd')
            if mdr is not None and mdr != '':
                mdr = float(request.GET.get('mdr'))
            else:
                mdr = sys.maxint
            if mwd is not None and mwd != '':
                mwd = float(request.GET.get('mwd'))
            else:
                mwd = sys.maxint
            # print mdr, mwd
            hb = HotspotBased(graph, terminals, pois)
            start_time = time.clock()
            forest, cost, gr, avg_dr, num_trees, avg_or, _ = \
                hb.steiner_forest(k=cap, max_dr=mdr, max_wd=mwd, get_lsv=False)
            elapsed_time = time.clock() - start_time
        #
        geo_steiner_tree_edges = get_geo_steiner_tree_edges(forest, graph)

        return HttpResponse(json.dumps(dict(
            content=render_to_string('hotspots/index.html', {}),
            route=geo_steiner_tree_edges,
            distance=cost,
            elapsed_time=elapsed_time,
            gr=gr,
            avg_dr=avg_dr,
            num_cars=num_trees,
            avg_or=avg_or
        )))

    else:
        return render(request, 'hotspots/index.html', {})