def download_osm(countries,url=None,show=False,max_workers=None): from limic.util import start,end,file_size,status from concurrent.futures import ProcessPoolExecutor, wait countries, url = common(countries,url,show,osm=True) if max_workers: executor = ProcessPoolExecutor(max_workers=max_workers) fs = [] start("Downloading OSM map data for"," ".join(countries)) for country in countries: file_url = OSM_URL+country.replace("_","/")+"-latest.osm.bz2" file_name = country+"-latest.osm.bz2" if max_workers: fs.append(executor.submit(download_file,file_url,file_name)) continue start("Downloading OSM map data for",country) file_name = download_file(file_url,file_name) end('') file_size(file_name) if max_workers: running = len(fs) total = running while running: print("Waiting for",running,"out of",total,"processes ...") wait(fs,timeout=10) running = sum(0 if f.done() else 1 for f in fs) for f in fs: file_name = f.result() status(file_name,end=':') file_size(file_name,end=' ') end()
def convert_nx_npz(file_name_in, file_name_out, indirect=False, rescale=False, penalize=20): from limic.util import start, end, file_size, load_pickled, save_npz, check_overwrite if not check_overwrite(file_name_in, file_name_out): return start("Loading graph from", file_name_in) g = load_pickled(file_name_in) end('') file_size(file_name_in) if indirect: start("Transforming to GT format") i = transform_nx_gt(g, rescale) end() start("Transforming to NPZ format") h = transform_gt_npz(i, penalize) end() else: start("Transforming to NPZ format") h = transform_nx_npz(g, rescale) end() start("Saving to", file_name_out) save_npz(file_name_out, h) end('') file_size(file_name_out)
def render_npz(file_name_in, file_name_out, markers=False, lines=False, host="localhost", port=5000, prefix=""): from limic.util import start, end, load_npz, check_overwrite from numpy import column_stack if not check_overwrite(file_name_in, file_name_out): return start("Loading NPZ graph", file_name_in) g = load_npz(file_name_in) nodes = list(map(tuple, column_stack((g['ids'], g['lat'], g['long'])))) edges = [] if lines: id2edges = g['id2edges'] edges_weight = g['edges_weight'] edges_neighbor = g['edges_neighbor'] for index in range(len(nodes)): left = id2edges[index] right = id2edges[index + 1] me = nodes[index] for i in range(left, right): edges.append((me, nodes[edges_neighbor[i]], edges_weight[i])) end() return render(g, nodes, edges, file_name_out, markers, lines, host, port, prefix)
def install_packages(no_gt): from limic.util import start, end, status from importlib.util import find_spec packages = PACKAGES.split(",") for package in packages: if no_gt and package == "graph_tool": continue start("Checking for module", package) if find_spec(package): status("OK") else: status("MISSING") if package == "graph_tool": no_gt = True if package in ("graph_tool", "dbm.gnu"): status( "WARNING: " + package + "is needed for some optional functionality - if needed, it has to be installed manually" ) continue start("Trying to install", package, "using PIP") if install(package) == 0: end() else: status("FAILED") raise Exception("could not install", package, "using PIP - manual install?") return no_gt
def run(self): delay = 0.5 sleep(delay) url = "http://%s:%d%s/" % (r_host, r_port, r_prefix) start("Open", url, "in browser") wopen(url, new=2) status("DONE")
def route_direct(file_name, source_id=None, target_id=None, out_file=None, overpass_url=None, disk_cache=False, visualize=False): from limic.util import start, end, status, file_size, load_pickled, save_pickled, save_path, options, replace if disk_cache: start("Using disk cache", file_name) set_option('disk_cache', file_name) from limic.overpass import region, set_server if disk_cache: status("OK") from os.path import exists if not disk_cache and exists(file_name): start("Loading", file_name) region.backend._cache = load_pickled(file_name) end('') file_size(file_name) len_cache = len(region.backend._cache) start("Routing using direct algorithm") set_server(overpass_url) path = astar_direct(source_id, target_id) end() start("Saving path to", out_file) save_path(path, out_file, visualize) end() if not disk_cache and len_cache != len(region.backend._cache): file_name_tmp = file_name + ".tmp" start("Saving to", file_name, "via", file_name_tmp) save_pickled(file_name_tmp, region.backend._cache) replace(file_name_tmp, file_name) end('') file_size(file_name)
def download_graph(suffix,countries,url=None,show=False,join=False): from limic.util import start,end,file_size countries, url = common(countries,url,show=show,join=join) for country in countries: start("Downloading",suffix.upper(),"graph for",country) file_url = url+"graph."+country.replace(" ","%20")+"."+suffix file_name = download_file(file_url,"graph."+country+"."+suffix) end('') file_size(file_name)
def tower(): lat = float(request.args.get('lat')) lng = float(request.args.get('lng')) start("Finding tower", lat, lng) tower = nodes[tree.query(transformer.transform(lat, lng))[1]] end('') res = jsonify(tower=tower) end() return res
def download_cache(countries,url=None,show=False): from limic.util import start,end,file_size countries, url = common(countries,url,show) for country in countries: start("Downloading cache for",country) file_url = url+"cache."+country.replace(" ","%20") file_name = download_file(file_url,"cache."+country) end('') file_size(file_name)
def download_merged(suffixes=("nx","gt","npz"),url=None): from limic.util import start,end,file_size if not url: url = BASE_URL for suffix in suffixes: start("Downloading merged",suffix.upper(),"graph for Europe") file_url = url+"merged.Europe."+suffix file_name = download_file(file_url,"merged.Europe."+suffix) end('') file_size(file_name)
def init_stage1_osm(countries,no_gt,url=None,show=False,conserve_mem=False,max_workers=None): from limic.download import download_osm, common from limic.util import start, status from os import cpu_count if not max_workers: max_workers = cpu_count()*4 start("Number of workers") status(max_workers) download_osm(countries,url=url,show=show,max_workers=max_workers) countries, url = common(countries,url,show,osm=True) extract_osm_all(countries,conserve_mem=conserve_mem,max_workers=max_workers) convert_merge_all(countries,no_gt)
def length_route_nx(file_name, source_id=None, target_id=None, benchmark=None): from limic.util import start, end, load_pickled, status start("Loading from", file_name) g = load_pickled(file_name) end() start("Routing using NX") if benchmark: for source, target in load_pickled(benchmark): length = shortest_length_nx(g, (source, ), (target, )) else: length = shortest_length_nx(g, source_id, target_id) end('') status(length)
def route_cnx(file_name, source_id=None, target_id=None, out_file=None, visualize=False, benchmark=None): from limic.util import start, end, load_pickled, save_path, save_pickled start("Loading from", file_name) g = load_pickled(file_name) end() start("Routing using condensed NX") if benchmark and out_file: routes = [] for i in range(int(benchmark)): path = astar_cnx(g, None, None, routes) elif benchmark: for source, target in load_pickled(benchmark): path = astar_cnx(g, (source, ), (target, )) else: path = astar_cnx(g, source_id, target_id) end() if benchmark and out_file: start("Saving routes to", out_file) save_pickled(out_file, routes) end() elif not benchmark: start("Saving path to", out_file) save_path(path, out_file, visualize) end()
def convert_cache_dbm(file_name_in, file_name_out): from limic.util import start, end, file_size, status, load_pickled, check_overwrite from dbm.gnu import open as dopen from os.path import exists from pickle import dumps if not check_overwrite(file_name_in, file_name_out): return start("Loading from", file_name_in) d = load_pickled(file_name_in) end('') file_size(file_name_in) start("Opening database", file_name_out) if not exists(file_name_out): db = dopen(file_name_out, "c") db.close() db = dopen(file_name_out, "c") end('') file_size(file_name_out) start("Computing set of entries to save") for key in db.keys(): del d[key.decode("utf-8")] status(len(d)) start("Saving entries to", file_name_out) for key, val in d.items(): db[key.encode("utf-8")] = dumps(val) db.close() end('') file_size(file_name_out)
def extract_osm_pre(file_name_in, file_name_out, white="{'power':'line'}", black="{'power':'substation'}", conserve_mem=False): from limic.util import start, end, save_pickled, file_size, check_overwrite if not check_overwrite(file_name_in, file_name_out): return white, black = list(eval(white).items()), list(eval(black).items()) lim = osm_pre(file_name_in, white, black, conserve_mem) start("Saving data to", file_name_out) save_pickled(file_name_out, lim) end('') file_size(file_name_out)
def render_nx(file_name_in, file_name_out, markers=False, lines=False, host="localhost", port=5000, prefix=""): from limic.util import start, end, load_pickled, check_overwrite if not check_overwrite(file_name_in, file_name_out): return start("Loading NX graph", file_name_in) g = load_pickled(file_name_in) nodes = list(g.nodes()) edges = list(g.edges.data('weight')) end() return render(g, nodes, edges, file_name_out, markers, lines, host, port, prefix)
def shutdown(sig, frame): nonlocal interrupt interrupt += 1 print("Shutting down ...") for f in fs: f.cancel() print("Cancelled all futures ...") running = len(fs) total = running while running: print("Waiting for", running, "processes to shut down ...") wait(fs, timeout=60) running = sum(0 if f.done() else 1 for f in fs) if len_cache != len(region.backend._cache): file_name_tmp = file_name + "." + str(interrupt) start("Emergency saving to", file_name_tmp) save_pickled(file_name_tmp, region.backend._cache) end('') file_size(file_name_tmp)
def extract_osm_post(file_name_in, file_name_out, around=1000, eps=0.01, safe_dist=100, penalize=20): from limic.util import start, end, file_size, load_pickled, check_overwrite if not check_overwrite(file_name_in, file_name_out): return start("Loading filtered OSM data from", file_name_in) lim = load_pickled(file_name_in) end('') file_size(file_name_in) osm_post(lim, file_name_out, around=1000, eps=0.01, safe_dist=100, penalize=20)
def render_cnx(file_name_in, file_name_out, components=[], markers=False, lines=False, host="localhost", port=5000, prefix=""): from limic.util import start, end, load_pickled, check_overwrite if not check_overwrite(file_name_in, file_name_out): return start("Loading CNX graph", file_name_in) cs, _, _ = load_pickled(file_name_in) end() for i in components if components else range(len(cs)): nodes = list(cs[i].nodes()) edges = list(cs[i].edges.data('weight')) file_out = file_name_out.split(".html")[0] + "." + str(i) + ".html" render(cs[i], nodes, edges, file_out, markers, lines, host, port, prefix)
def condense(file_name_in, file_name_out, lengths=False, paths=False): from limic.util import start, end, file_size, status, save_pickled, load_pickled, check_overwrite if not check_overwrite(file_name_in, file_name_out): return start("Loading from", file_name_in) g = load_pickled(file_name_in) end('') file_size(file_name_in) start("Condensing edges") h = cnx(g) end() if lengths: start("Computing lengths") h.compute_lengths() end() if paths: start("Computing paths") h.compute_paths() end() start("Saving to", file_name_out) save_pickled(file_name_out, h) end('') file_size(file_name_out)
def route(): source_lat = float(request.args.get('source[lat]')) source_lng = float(request.args.get('source[lng]')) target_lat = float(request.args.get('target[lat]')) target_lng = float(request.args.get('target[lng]')) print("SOURCE COMING UP") print(source_lat) start("Routing", source_lat, source_lng, target_lat, target_lng) source_index = tree.query(transformer.transform( source_lat, source_lng))[1] source = nodes[source_index] end('') target_index = tree.query(transformer.transform( target_lat, target_lng))[1] target = nodes[target_index] end('') path = astar(g, (source, source_index), (target, target_index)) end('') if path[1][-1][0] == float('inf'): path[1][-1] = (path[1][-1][1], ) + path[1][-1][1:] res = jsonify(path=path) end() return res
def length_graph_npz(file_name): from limic.util import start, end, status, load_npz from limic.convert import transform_npz_nx, transform_nx_gt start("Loading graph from", file_name) g = load_npz(file_name) end() start("Transforming graph to NX format") h = transform_npz_nx(g) end() start("Transforming graph to rescaled GT format") i = transform_nx_gt(h, rescale=True) end() start("Computing length using rescaled GT") length = compute_length_gt(i) end('') status(length)
def convert_gt_nx(file_name_in, file_name_out, penalize=20): from limic.util import start, end, file_size, load_gt, save_pickled, check_overwrite if not check_overwrite(file_name_in, file_name_out): return start("Loading graph from", file_name_in) g = load_gt(file_name_in) end('') file_size(file_name_in) start("Transforming to NPZ format") i = transform_gt_npz(g, penalize) end() start("Transforming to NX format") h = transform_npz_nx(i, penalize) end() start("Saving to", file_name_out) save_pickled(file_name_out, h) end('') file_size(file_name_out)
def convert_npz_gt(file_name_in, file_name_out, rescale=False, penalize=20): from limic.util import start, end, file_size, save_gt, load_npz, check_overwrite if not check_overwrite(file_name_in, file_name_out): return start("Loading graph from", file_name_in) g = load_npz(file_name_in) end('') file_size(file_name_in) start("Transforming to NX format") i = transform_npz_nx(g, penalize) end() start("Transforming to GT format") h = transform_nx_gt(i, rescale) end() start("Saving to", file_name_out) save_gt(file_name_out, h) end('') file_size(file_name_out)
def prune_gt(file_name_in, file_name_out, polygon, overpass_url): from limic.util import start, end, file_size, status, save_gt, load_gt, check_overwrite from limic.overpass import nodes_in_geometry, set_server if not check_overwrite(file_name_in, file_name_out): return start("Loading from", file_name_in) g = load_gt(file_name_in) end('') file_size(file_name_in) start("Query server for nodes in polygon") set_server(overpass_url) nodes = nodes_in_geometry(zip(polygon[::2], polygon[1::2])) end('') status(len(nodes)) start("Pruning graph") h = prune_ids_gt(g, nodes) end() start("Saving to", file_name_out) save_gt(file_name_out, h) end('') file_size(file_name_out)
def convert_nx_gt(file_name_in, file_name_out, rescale=False): from limic.util import start, end, file_size, load_pickled, save_gt, check_overwrite if not check_overwrite(file_name_in, file_name_out): return start("Loading graph from", file_name_in) g = load_pickled(file_name_in) end('') file_size(file_name_in) start("Initialzing id mapping and neighbours map") h = transform_nx_gt(g, rescale) end() start("Saving to", file_name_out) save_gt(file_name_out, h) end('') file_size(file_name_out)
def convert_gt_npz(file_name_in, file_name_out, penalize=20): from limic.util import start, end, file_size, load_gt, save_npz, check_overwrite if not check_overwrite(file_name_in, file_name_out): return start("Loading graph from", file_name_in) g = load_gt(file_name_in) end('') file_size(file_name_in) start("Initialzing id mapping and neighbours map") h = transform_gt_npz(g, penalize) end() start("Saving to", file_name_out) save_npz(file_name_out, h) end('') file_size(file_name_out)
def merge_cache(file_names, file_name_out): from limic.util import start, end, file_size, status, load_pickled, save_pickled, check_overwrites if not check_overwrites(file_names, file_name_out): return g = {} for file_name_in in file_names: start("Loading cache from", file_name_in) h = load_pickled(file_name_in) end('') file_size(file_name_in) start("Adding", len(h), "entries") g.update(h) end('') status(len(g)) start("Saving merged cache to", file_name_out) save_pickled(file_name_out, g) end('') file_size(file_name_out)
def convert_dbm_cache(file_name_in, file_name_out): from limic.util import start, end, file_size, status, save_pickled, check_overwrite from dbm.gnu import open as dopen from pickle import loads if not check_overwrite(file_name_in, file_name_out): return start("Opening database", file_name_in) db = dopen(file_name_in, "r") end('') file_size(file_name_in) start("Converting to dictionary") d = {} for key in db.keys(): d[key.decode("utf-8")] = loads(db[key]) db.close() end() start("Saving to", file_name_out) save_pickled(file_name_out, d) end('') file_size(file_name_out)
def merge_nx(file_names, file_name_out): from limic.util import start, end, file_size, status, load_pickled, save_pickled, check_overwrites from networkx import Graph if not check_overwrites(file_names, file_name_out): return g = Graph() for file_name_in in file_names: start("Loading graph from", file_name_in) h = load_pickled(file_name_in) end('') file_size(file_name_in) start("Adding", h.number_of_edges(), "edges") for from_node, to_node, data in h.edges(data=True): g.add_edge(from_node, to_node, **data) end('') status(g.number_of_edges()) start("Saving merged graph to", file_name_out) save_pickled(file_name_out, g) end('') file_size(file_name_out)