示例#1
0
def route_cnx(file_name,
              source_id=None,
              target_id=None,
              out_file=None,
              visualize=False,
              benchmark=None):
    from limic.util import start, end, load_pickled, save_path, save_pickled
    start("Loading from", file_name)
    g = load_pickled(file_name)
    end()
    start("Routing using condensed NX")
    if benchmark and out_file:
        routes = []
        for i in range(int(benchmark)):
            path = astar_cnx(g, None, None, routes)
    elif benchmark:
        for source, target in load_pickled(benchmark):
            path = astar_cnx(g, (source, ), (target, ))
    else:
        path = astar_cnx(g, source_id, target_id)
    end()
    if benchmark and out_file:
        start("Saving routes to", out_file)
        save_pickled(out_file, routes)
        end()
    elif not benchmark:
        start("Saving path to", out_file)
        save_path(path, out_file, visualize)
        end()
示例#2
0
def length_route_nx(file_name, source_id=None, target_id=None, benchmark=None):
    from limic.util import start, end, load_pickled, status
    start("Loading from", file_name)
    g = load_pickled(file_name)
    end()
    start("Routing using NX")
    if benchmark:
        for source, target in load_pickled(benchmark):
            length = shortest_length_nx(g, (source, ), (target, ))
    else:
        length = shortest_length_nx(g, source_id, target_id)
    end('')
    status(length)
示例#3
0
def route_direct(file_name,
                 source_id=None,
                 target_id=None,
                 out_file=None,
                 overpass_url=None,
                 disk_cache=False,
                 visualize=False):
    from limic.util import start, end, status, file_size, load_pickled, save_pickled, save_path, options, replace
    if disk_cache:
        start("Using disk cache", file_name)
        set_option('disk_cache', file_name)
    from limic.overpass import region, set_server
    if disk_cache:
        status("OK")
    from os.path import exists
    if not disk_cache and exists(file_name):
        start("Loading", file_name)
        region.backend._cache = load_pickled(file_name)
        end('')
        file_size(file_name)
    len_cache = len(region.backend._cache)
    start("Routing using direct algorithm")
    set_server(overpass_url)
    path = astar_direct(source_id, target_id)
    end()
    start("Saving path to", out_file)
    save_path(path, out_file, visualize)
    end()
    if not disk_cache and len_cache != len(region.backend._cache):
        file_name_tmp = file_name + ".tmp"
        start("Saving to", file_name, "via", file_name_tmp)
        save_pickled(file_name_tmp, region.backend._cache)
        replace(file_name_tmp, file_name)
        end('')
        file_size(file_name)
示例#4
0
def convert_cache_dbm(file_name_in, file_name_out):
    from limic.util import start, end, file_size, status, load_pickled, check_overwrite
    from dbm.gnu import open as dopen
    from os.path import exists
    from pickle import dumps
    if not check_overwrite(file_name_in, file_name_out):
        return
    start("Loading from", file_name_in)
    d = load_pickled(file_name_in)
    end('')
    file_size(file_name_in)
    start("Opening database", file_name_out)
    if not exists(file_name_out):
        db = dopen(file_name_out, "c")
        db.close()
    db = dopen(file_name_out, "c")
    end('')
    file_size(file_name_out)
    start("Computing set of entries to save")
    for key in db.keys():
        del d[key.decode("utf-8")]
    status(len(d))
    start("Saving entries to", file_name_out)
    for key, val in d.items():
        db[key.encode("utf-8")] = dumps(val)
    db.close()
    end('')
    file_size(file_name_out)
示例#5
0
def convert_nx_npz(file_name_in,
                   file_name_out,
                   indirect=False,
                   rescale=False,
                   penalize=20):
    from limic.util import start, end, file_size, load_pickled, save_npz, check_overwrite
    if not check_overwrite(file_name_in, file_name_out):
        return
    start("Loading graph from", file_name_in)
    g = load_pickled(file_name_in)
    end('')
    file_size(file_name_in)
    if indirect:
        start("Transforming to GT format")
        i = transform_nx_gt(g, rescale)
        end()
        start("Transforming to NPZ format")
        h = transform_gt_npz(i, penalize)
        end()
    else:
        start("Transforming to NPZ format")
        h = transform_nx_npz(g, rescale)
        end()
    start("Saving to", file_name_out)
    save_npz(file_name_out, h)
    end('')
    file_size(file_name_out)
示例#6
0
def prune_nx(file_name_in, file_name_out, polygon, overpass_url):
    from limic.util import start, end, file_size, status, save_pickled, load_pickled, check_overwrite
    if not check_overwrite(file_name_in, file_name_out):
        return
    start("Loading from", file_name_in)
    g = load_pickled(file_name_in)
    end('')
    file_size(file_name_in)
    polygon = list(map(float, polygon))
    polygon = list(zip(polygon[::2], polygon[1::2]))
    if not overpass_url:
        from limic.util import kdtree, nodes_in_geometry
        start("Building kd-tree from nodes")
        tree = kdtree(g.nodes(), get_latlon=lambda x: (x[1], x[2]))
        end()
        start("Querying tree for nodes in polygon")
        nodes = nodes_in_geometry(tree, polygon)
    else:
        from limic.overpass import nodes_in_geometry, set_server
        start("Query server for nodes in polygon")
        set_server(overpass_url)
        nodes = nodes_in_geometry(polygon)
    end('')
    status(len(nodes))
    start("Pruning graph")
    h = prune_ids_nx(g, nodes)
    end()
    start("Saving to", file_name_out)
    save_pickled(file_name_out, h)
    end('')
    file_size(file_name_out)
示例#7
0
def route_nx(file_name,
             source_id=None,
             target_id=None,
             out_file=None,
             visualize=False,
             benchmark=None):
    from limic.util import start, end, load_pickled, save_path
    start("Loading from", file_name)
    g = load_pickled(file_name)
    end()
    start("Routing using NX")
    if benchmark:
        for source, target in load_pickled(benchmark):
            path = astar_nx(g, (source, ), (target, ))
    else:
        path = astar_nx(g, source_id, target_id)
    end()
    if not benchmark:
        start("Saving path to", out_file)
        save_path(path, out_file, visualize)
        end()
示例#8
0
def length_graph_nx(file_name):
    from limic.util import start, end, status, load_pickled
    from limic.convert import transform_nx_gt
    start("Loading graph from", file_name)
    g = load_pickled(file_name)
    end()
    start("Transforming graph to rescaled GT format")
    h = transform_nx_gt(g, rescale=True)
    end()
    start("Computing length using rescaled GT")
    length = compute_length_gt(h)
    end('')
    status(length)
示例#9
0
def convert_nx_gt(file_name_in, file_name_out, rescale=False):
    from limic.util import start, end, file_size, load_pickled, save_gt, check_overwrite
    if not check_overwrite(file_name_in, file_name_out):
        return
    start("Loading graph from", file_name_in)
    g = load_pickled(file_name_in)
    end('')
    file_size(file_name_in)
    start("Initialzing id mapping and neighbours map")
    h = transform_nx_gt(g, rescale)
    end()
    start("Saving to", file_name_out)
    save_gt(file_name_out, h)
    end('')
    file_size(file_name_out)
示例#10
0
def render_nx(file_name_in,
              file_name_out,
              markers=False,
              lines=False,
              host="localhost",
              port=5000,
              prefix=""):
    from limic.util import start, end, load_pickled, check_overwrite
    if not check_overwrite(file_name_in, file_name_out):
        return
    start("Loading NX graph", file_name_in)
    g = load_pickled(file_name_in)
    nodes = list(g.nodes())
    edges = list(g.edges.data('weight'))
    end()
    return render(g, nodes, edges, file_name_out, markers, lines, host, port,
                  prefix)
示例#11
0
def merge_cache(file_names, file_name_out):
    from limic.util import start, end, file_size, status, load_pickled, save_pickled, check_overwrites
    if not check_overwrites(file_names, file_name_out):
        return
    g = {}
    for file_name_in in file_names:
        start("Loading cache from", file_name_in)
        h = load_pickled(file_name_in)
        end('')
        file_size(file_name_in)
        start("Adding", len(h), "entries")
        g.update(h)
        end('')
        status(len(g))
    start("Saving merged cache to", file_name_out)
    save_pickled(file_name_out, g)
    end('')
    file_size(file_name_out)
示例#12
0
def extract_osm_post(file_name_in,
                     file_name_out,
                     around=1000,
                     eps=0.01,
                     safe_dist=100,
                     penalize=20):
    from limic.util import start, end, file_size, load_pickled, check_overwrite
    if not check_overwrite(file_name_in, file_name_out):
        return
    start("Loading filtered OSM data from", file_name_in)
    lim = load_pickled(file_name_in)
    end('')
    file_size(file_name_in)
    osm_post(lim,
             file_name_out,
             around=1000,
             eps=0.01,
             safe_dist=100,
             penalize=20)
示例#13
0
def render_cnx(file_name_in,
               file_name_out,
               components=[],
               markers=False,
               lines=False,
               host="localhost",
               port=5000,
               prefix=""):
    from limic.util import start, end, load_pickled, check_overwrite
    if not check_overwrite(file_name_in, file_name_out):
        return
    start("Loading CNX graph", file_name_in)
    cs, _, _ = load_pickled(file_name_in)
    end()
    for i in components if components else range(len(cs)):
        nodes = list(cs[i].nodes())
        edges = list(cs[i].edges.data('weight'))
        file_out = file_name_out.split(".html")[0] + "." + str(i) + ".html"
        render(cs[i], nodes, edges, file_out, markers, lines, host, port,
               prefix)
示例#14
0
def merge_nx(file_names, file_name_out):
    from limic.util import start, end, file_size, status, load_pickled, save_pickled, check_overwrites
    from networkx import Graph
    if not check_overwrites(file_names, file_name_out):
        return
    g = Graph()
    for file_name_in in file_names:
        start("Loading graph from", file_name_in)
        h = load_pickled(file_name_in)
        end('')
        file_size(file_name_in)
        start("Adding", h.number_of_edges(), "edges")
        for from_node, to_node, data in h.edges(data=True):
            g.add_edge(from_node, to_node, **data)
        end('')
        status(g.number_of_edges())
    start("Saving merged graph to", file_name_out)
    save_pickled(file_name_out, g)
    end('')
    file_size(file_name_out)
示例#15
0
def select_nx(file_name_in,file_name_out,polygon,overpass_url):
    from limic.util import start, end, file_size, status, save_pickled, load_pickled, check_overwrite
    from limic.overpass import nodes_in_geometry, set_server
    if not check_overwrite(file_name_in,file_name_out):
        return
    start("Loading from",file_name_in)
    g = load_pickled(file_name_in)
    end('')
    file_size(file_name_in)
    start("Query server for nodes in polygon")
    set_server(overpass_url)
    nodes = nodes_in_geometry(zip(polygon[::2], polygon[1::2]))
    end('')
    status(len(nodes))
    start("Selecting area from graph")
    h = select_ids_nx(g,nodes)
    end()
    start("Saving to",file_name_out)
    save_pickled(file_name_out,h)
    end('')
    file_size(file_name_out)
示例#16
0
def condense(file_name_in, file_name_out, lengths=False, paths=False):
    from limic.util import start, end, file_size, status, save_pickled, load_pickled, check_overwrite
    if not check_overwrite(file_name_in, file_name_out):
        return
    start("Loading from", file_name_in)
    g = load_pickled(file_name_in)
    end('')
    file_size(file_name_in)
    start("Condensing edges")
    h = cnx(g)
    end()
    if lengths:
        start("Computing lengths")
        h.compute_lengths()
        end()
    if paths:
        start("Computing paths")
        h.compute_paths()
        end()
    start("Saving to", file_name_out)
    save_pickled(file_name_out, h)
    end('')
    file_size(file_name_out)
示例#17
0
def fill(overpass_url,
         file_name=None,
         area=None,
         around=1000,
         eps=0.01,
         safe_dist=100,
         penalize=20,
         max_workers=None):
    from limic.overpass import set_server, pylon, region, get_towers_by_area
    from limic.util import start, end, file_size, status, load_pickled, save_pickled, options, replace, options
    from networkx import Graph, relabel_nodes
    from os import cpu_count
    from os.path import exists
    from concurrent.futures import ThreadPoolExecutor, wait
    from signal import signal, SIGINT
    if not area and not file_name:
        if options.parser:
            options.parser.error("specify at least one of --area or CACHE")
        else:
            status("ERROR: specify at least area or cache name!")
            from sys import exit
            exit(-1)
    if not area:
        area = file_name.split(".")[1]
    if not file_name:
        file_name = "cache." + area
    if not max_workers:
        max_workers = cpu_count() * 4
    start("Number of workers")
    status(max_workers)
    if exists(file_name):
        start("Loading", file_name)
        region.backend._cache = load_pickled(file_name)
        end('')
        file_size(file_name)
    len_cache = len(region.backend._cache)
    start("Querying overpass for", area)
    set_server(overpass_url)
    towers = get_towers_by_area(area)
    end()
    fs = []
    executor = ThreadPoolExecutor(max_workers=max_workers)
    interrupt = 0

    def shutdown(sig, frame):
        nonlocal interrupt
        interrupt += 1
        print("Shutting down ...")
        for f in fs:
            f.cancel()
        print("Cancelled all futures ...")
        running = len(fs)
        total = running
        while running:
            print("Waiting for", running, "processes to shut down ...")
            wait(fs, timeout=60)
            running = sum(0 if f.done() else 1 for f in fs)
        if len_cache != len(region.backend._cache):
            file_name_tmp = file_name + "." + str(interrupt)
            start("Emergency saving to", file_name_tmp)
            save_pickled(file_name_tmp, region.backend._cache)
            end('')
            file_size(file_name_tmp)

    signal(SIGINT, shutdown)
    options.failed = True
    while options.failed:
        options.failed = False
        for tower in towers:
            fs.append(
                executor.submit(cache_tower, tower, around, eps, safe_dist,
                                penalize))
        running = len(fs)
        total = running
        while running:
            print("Waiting for", running, "out of", total, "processes ...")
            wait(fs, timeout=60)
            running = sum(0 if f.done() else 1 for f in fs)
    if len_cache != len(region.backend._cache):
        file_name_tmp = file_name + ".tmp"
        start("Saving to", file_name, "via", file_name_tmp)
        save_pickled(file_name_tmp, region.backend._cache)
        replace(file_name_tmp, file_name)
        end('')
        file_size(file_name)
示例#18
0
from limic.util import start, end, file_size, status, load_pickled, save_pickled
from sys import argv
if __name__ == "__main__":
    file_name_in = argv[1]
    file_name_out = argv[2]
    start("Loading graph from", file_name_in)
    g = load_pickled(file_name_in)
    end('')
    status(len(g.edges()), end='   ')
    file_size(file_name_in)
    start("Cleaning up graph")
    for u, v, d in g.edges(data=True):
        d['type'] = -1 if d['air'] else 0
        del d['air']
    end()
    start("Saving graph to", file_name_out)
    save_pickled(file_name_out, g)
    end('')
    file_size(file_name_out)
示例#19
0
def extract_cache(file_name_in,
                  file_name_out,
                  overpass_url,
                  area=None,
                  around=1000,
                  eps=0.01,
                  safe_dist=100,
                  penalize=20):
    #from limic.overpass import distance, find_all_neighbours, is_safe, set_server, pylon, region, get_towers_by_area
    #from limic.util import start, end, file_size, status, load_pickled, save_pickled, replace, check_overwrite
    from networkx import Graph
    if not check_overwrite(file_name_in, file_name_out):
        return
    start("Loading", file_name_in)
    region.backend._cache = load_pickled(file_name_in)
    len_cache = len(region.backend._cache)
    end('')
    file_size(file_name_in)
    if not area:
        area = file_name_in.split(".")[1]
    start("Querying overpass for", area)
    set_server(overpass_url)
    towers = get_towers_by_area(area)
    end()
    start("Building safe nodes")
    g = Graph()
    for tower in towers:
        if is_safe(tower, safe_dist):
            g.add_node(tower)


#        else:
#        if verbosity >= 2: print("NOT safe!")
    end('')
    total = len(g.nodes())
    status(total)
    start("Building edges")
    build_edges(g, find_all_neighbours, around, eps, safe_dist, penalize)
    end('')
    status(len(g.edges()))
    if len_cache != len(region.backend._cache):
        file_name_tmp = file_name_in + ".tmp"
        start("Saving to", file_name_in, "via", file_name_tmp)
        save_pickled(file_name_tmp, region.backend._cache)
        replace(file_name_tmp, file_name_in)
        end('')
        file_size(file_name_in)
    from limic.util import start, end, status, file_size, save_pickled
    from networkx import relabel_nodes
    start("Prune redundant edges (incomplete)")
    prune_incomplete(g)
    end('')
    status(len(g.edges()))
    start("Prune redundant edges (complete)")
    prune_complete(g)
    end('')
    status(len(g.edges()))
    start("Cleaning up graph")
    relabel = dict(
        map(
            lambda tower: (tower,
                           (tower.id, tower.latlon[0], tower.latlon[1])),
            g.nodes()))
    relabel_nodes(g, relabel, copy=False)
    for u, v, d in g.edges(data=True):
        d['type'] = -1 if d['type'] else 0
    end()
    start("Saving graph to", file_name_out)
    save_pickled(file_name_out, g)
    end('')
    file_size(file_name_out)