def route_npz(file_name, source_id=None, target_id=None, out_file=None, indirect=False, penalize=20, visualize=False): from limic.util import start, end, status, save_path, load_npz from limic.convert import transform_npz_nx from numpy import int32 start("Loading from", file_name) g = load_npz(file_name) end() start("Checking whether GT graph is rescaled") if g['edges_weight'].dtype == int32: indirect = True status("YES (forcing routing through NX)") else: status("NO") if indirect: start("Transforming graph to NX format") h = transform_npz_nx(g, penalize) end() start("Routing using NX") path = astar_nx(h, source_id, target_id) end() else: start("Routing using NPZ") path = astar_npz(g, source_id, target_id) end() start("Saving path to", out_file) save_path(path, out_file, visualize) end()
def route_gt(file_name, source_id=None, target_id=None, out_file=None, indirect=False, penalize=20, visualize=False): from limic.util import start, end, status, load_gt, save_path from limic.convert import transform_gt_npz start("Loading graph from", file_name) g = load_gt(file_name) end() start("Checking whether GT graph is rescaled") if g.gp.rescaled: indirect = True status("YES (forcing routing through NPZ)") else: status("NO") if indirect: start("Transforming graph to NPZ format") h = transform_gt_npz(g, penalize) end() start("Routing using NPZ") path = astar_npz(h, source_id, target_id) end() else: start("Routing using GT") path = astar_gt(g, source_id, target_id) end() start("Saving path to", out_file) save_path(path, out_file, visualize) end()
def download_osm(countries,url=None,show=False,max_workers=None): from limic.util import start,end,file_size,status from concurrent.futures import ProcessPoolExecutor, wait countries, url = common(countries,url,show,osm=True) if max_workers: executor = ProcessPoolExecutor(max_workers=max_workers) fs = [] start("Downloading OSM map data for"," ".join(countries)) for country in countries: file_url = OSM_URL+country.replace("_","/")+"-latest.osm.bz2" file_name = country+"-latest.osm.bz2" if max_workers: fs.append(executor.submit(download_file,file_url,file_name)) continue start("Downloading OSM map data for",country) file_name = download_file(file_url,file_name) end('') file_size(file_name) if max_workers: running = len(fs) total = running while running: print("Waiting for",running,"out of",total,"processes ...") wait(fs,timeout=10) running = sum(0 if f.done() else 1 for f in fs) for f in fs: file_name = f.result() status(file_name,end=':') file_size(file_name,end=' ') end()
def route_direct(file_name, source_id=None, target_id=None, out_file=None, overpass_url=None, disk_cache=False, visualize=False): from limic.util import start, end, status, file_size, load_pickled, save_pickled, save_path, options, replace if disk_cache: start("Using disk cache", file_name) set_option('disk_cache', file_name) from limic.overpass import region, set_server if disk_cache: status("OK") from os.path import exists if not disk_cache and exists(file_name): start("Loading", file_name) region.backend._cache = load_pickled(file_name) end('') file_size(file_name) len_cache = len(region.backend._cache) start("Routing using direct algorithm") set_server(overpass_url) path = astar_direct(source_id, target_id) end() start("Saving path to", out_file) save_path(path, out_file, visualize) end() if not disk_cache and len_cache != len(region.backend._cache): file_name_tmp = file_name + ".tmp" start("Saving to", file_name, "via", file_name_tmp) save_pickled(file_name_tmp, region.backend._cache) replace(file_name_tmp, file_name) end('') file_size(file_name)
def run(self): delay = 0.5 sleep(delay) url = "http://%s:%d%s/" % (r_host, r_port, r_prefix) start("Open", url, "in browser") wopen(url, new=2) status("DONE")
def convert_cache_dbm(file_name_in, file_name_out): from limic.util import start, end, file_size, status, load_pickled, check_overwrite from dbm.gnu import open as dopen from os.path import exists from pickle import dumps if not check_overwrite(file_name_in, file_name_out): return start("Loading from", file_name_in) d = load_pickled(file_name_in) end('') file_size(file_name_in) start("Opening database", file_name_out) if not exists(file_name_out): db = dopen(file_name_out, "c") db.close() db = dopen(file_name_out, "c") end('') file_size(file_name_out) start("Computing set of entries to save") for key in db.keys(): del d[key.decode("utf-8")] status(len(d)) start("Saving entries to", file_name_out) for key, val in d.items(): db[key.encode("utf-8")] = dumps(val) db.close() end('') file_size(file_name_out)
def prune_nx(file_name_in, file_name_out, polygon, overpass_url): from limic.util import start, end, file_size, status, save_pickled, load_pickled, check_overwrite if not check_overwrite(file_name_in, file_name_out): return start("Loading from", file_name_in) g = load_pickled(file_name_in) end('') file_size(file_name_in) polygon = list(map(float, polygon)) polygon = list(zip(polygon[::2], polygon[1::2])) if not overpass_url: from limic.util import kdtree, nodes_in_geometry start("Building kd-tree from nodes") tree = kdtree(g.nodes(), get_latlon=lambda x: (x[1], x[2])) end() start("Querying tree for nodes in polygon") nodes = nodes_in_geometry(tree, polygon) else: from limic.overpass import nodes_in_geometry, set_server start("Query server for nodes in polygon") set_server(overpass_url) nodes = nodes_in_geometry(polygon) end('') status(len(nodes)) start("Pruning graph") h = prune_ids_nx(g, nodes) end() start("Saving to", file_name_out) save_pickled(file_name_out, h) end('') file_size(file_name_out)
def init_stage1_osm(countries,no_gt,url=None,show=False,conserve_mem=False,max_workers=None): from limic.download import download_osm, common from limic.util import start, status from os import cpu_count if not max_workers: max_workers = cpu_count()*4 start("Number of workers") status(max_workers) download_osm(countries,url=url,show=show,max_workers=max_workers) countries, url = common(countries,url,show,osm=True) extract_osm_all(countries,conserve_mem=conserve_mem,max_workers=max_workers) convert_merge_all(countries,no_gt)
def length_route_nx(file_name, source_id=None, target_id=None, benchmark=None): from limic.util import start, end, load_pickled, status start("Loading from", file_name) g = load_pickled(file_name) end() start("Routing using NX") if benchmark: for source, target in load_pickled(benchmark): length = shortest_length_nx(g, (source, ), (target, )) else: length = shortest_length_nx(g, source_id, target_id) end('') status(length)
def length_graph_nx(file_name): from limic.util import start, end, status, load_pickled from limic.convert import transform_nx_gt start("Loading graph from", file_name) g = load_pickled(file_name) end() start("Transforming graph to rescaled GT format") h = transform_nx_gt(g, rescale=True) end() start("Computing length using rescaled GT") length = compute_length_gt(h) end('') status(length)
def install_packages(no_gt): from limic.util import start, end, status from importlib.util import find_spec packages = PACKAGES.split(",") for package in packages: if no_gt and package == "graph_tool": continue start("Checking for module", package) if find_spec(package): status("OK") else: status("MISSING") if package == "graph_tool": no_gt = True if package in ("graph_tool", "dbm.gnu"): status( "WARNING: " + package + "is needed for some optional functionality - if needed, it has to be installed manually" ) continue start("Trying to install", package, "using PIP") if install(package) == 0: end() else: status("FAILED") raise Exception("could not install", package, "using PIP - manual install?") return no_gt
def length_graph_gt(file_name): from limic.util import start, end, status, load_gt from limic.convert import transform_gt_npz, transform_npz_nx, transform_nx_gt start("Loading graph from", file_name) g = load_gt(file_name) end() start("Checking whether GT graph is rescaled") if g.gp.rescaled: status("YES") start("Computing length using GT") length = compute_length_gt(g) end('') status(length) else: status("NO (forcing reconversion)") start("Transforming graph to NPZ format") h = transform_gt_npz(g, penalize=20) end() start("Transforming graph to NX format") i = transform_npz_nx(h) end() start("Transforming graph to rescaled GT format") j = transform_nx_gt(i, rescale=True) end() start("Computing length using rescaled GT") length = compute_length_gt(j) end('') status(length)
def merge_cache(file_names, file_name_out): from limic.util import start, end, file_size, status, load_pickled, save_pickled, check_overwrites if not check_overwrites(file_names, file_name_out): return g = {} for file_name_in in file_names: start("Loading cache from", file_name_in) h = load_pickled(file_name_in) end('') file_size(file_name_in) start("Adding", len(h), "entries") g.update(h) end('') status(len(g)) start("Saving merged cache to", file_name_out) save_pickled(file_name_out, g) end('') file_size(file_name_out)
def merge_nx(file_names, file_name_out): from limic.util import start, end, file_size, status, load_pickled, save_pickled, check_overwrites from networkx import Graph if not check_overwrites(file_names, file_name_out): return g = Graph() for file_name_in in file_names: start("Loading graph from", file_name_in) h = load_pickled(file_name_in) end('') file_size(file_name_in) start("Adding", h.number_of_edges(), "edges") for from_node, to_node, data in h.edges(data=True): g.add_edge(from_node, to_node, **data) end('') status(g.number_of_edges()) start("Saving merged graph to", file_name_out) save_pickled(file_name_out, g) end('') file_size(file_name_out)
def prune_gt(file_name_in, file_name_out, polygon, overpass_url): from limic.util import start, end, file_size, status, save_gt, load_gt, check_overwrite from limic.overpass import nodes_in_geometry, set_server if not check_overwrite(file_name_in, file_name_out): return start("Loading from", file_name_in) g = load_gt(file_name_in) end('') file_size(file_name_in) start("Query server for nodes in polygon") set_server(overpass_url) nodes = nodes_in_geometry(zip(polygon[::2], polygon[1::2])) end('') status(len(nodes)) start("Pruning graph") h = prune_ids_gt(g, nodes) end() start("Saving to", file_name_out) save_gt(file_name_out, h) end('') file_size(file_name_out)
def fill(overpass_url, file_name=None, area=None, around=1000, eps=0.01, safe_dist=100, penalize=20, max_workers=None): from limic.overpass import set_server, pylon, region, get_towers_by_area from limic.util import start, end, file_size, status, load_pickled, save_pickled, options, replace, options from networkx import Graph, relabel_nodes from os import cpu_count from os.path import exists from concurrent.futures import ThreadPoolExecutor, wait from signal import signal, SIGINT if not area and not file_name: if options.parser: options.parser.error("specify at least one of --area or CACHE") else: status("ERROR: specify at least area or cache name!") from sys import exit exit(-1) if not area: area = file_name.split(".")[1] if not file_name: file_name = "cache." + area if not max_workers: max_workers = cpu_count() * 4 start("Number of workers") status(max_workers) if exists(file_name): start("Loading", file_name) region.backend._cache = load_pickled(file_name) end('') file_size(file_name) len_cache = len(region.backend._cache) start("Querying overpass for", area) set_server(overpass_url) towers = get_towers_by_area(area) end() fs = [] executor = ThreadPoolExecutor(max_workers=max_workers) interrupt = 0 def shutdown(sig, frame): nonlocal interrupt interrupt += 1 print("Shutting down ...") for f in fs: f.cancel() print("Cancelled all futures ...") running = len(fs) total = running while running: print("Waiting for", running, "processes to shut down ...") wait(fs, timeout=60) running = sum(0 if f.done() else 1 for f in fs) if len_cache != len(region.backend._cache): file_name_tmp = file_name + "." + str(interrupt) start("Emergency saving to", file_name_tmp) save_pickled(file_name_tmp, region.backend._cache) end('') file_size(file_name_tmp) signal(SIGINT, shutdown) options.failed = True while options.failed: options.failed = False for tower in towers: fs.append( executor.submit(cache_tower, tower, around, eps, safe_dist, penalize)) running = len(fs) total = running while running: print("Waiting for", running, "out of", total, "processes ...") wait(fs, timeout=60) running = sum(0 if f.done() else 1 for f in fs) if len_cache != len(region.backend._cache): file_name_tmp = file_name + ".tmp" start("Saving to", file_name, "via", file_name_tmp) save_pickled(file_name_tmp, region.backend._cache) replace(file_name_tmp, file_name) end('') file_size(file_name)
def download_file(url,file_name,retries=10): from requests import get from shutil import copyfileobj from sys import exit from limic.util import status, md5file, options from os.path import exists download = True if options.md5sum: file_md5 = md5file(file_name) if exists(file_name) else None md5_retries = retries while True: timed_out = False try: r = get(url+".md5",timeout=5) except: timed_out = True if not timed_out and r.status_code == 200: break md5_retries -= 1 if not md5_retries: status("ERROR(file.md5): HTTP status 200 expected, got "+str(r.status_code)+" for "+url+".md5") exit(-1) status("WARNING(download): RETRYING "+url+".md5") url_md5 = r.content.split()[0].decode('utf8') if file_md5 and url_md5 == file_md5: if options.verbosity >= 2: status('SKIPPING',end=' ') return file_name while True: r = get(url, stream=True) if r.status_code == 200: break retries -= 1 if not retries: status("ERROR(file): HTTP status 200 expected, got "+str(r.status_code)+" for "+url) exit(-1) status("WARNING(download): RETRYING "+url+".md5") f = open(file_name, 'wb') copyfileobj(r.raw, f) f.close() if options.md5sum: file_md5 = md5file(file_name) if url_md5 != file_md5: status("ERROR(md5): "+file_md5+" vs "+url_md5) exit(-1) f = open(file_name+".md5","wt") f.write(file_md5+" "+file_name) f.close() return file_name
def osm_post(lim, file_name_out, around=1000, eps=0.01, safe_dist=100, penalize=20): from limic.util import start, end, status, file_size, load_pickled, distance, save_pickled from scipy.spatial import cKDTree as KDTree from networkx import Graph, astar_path_length from pyproj import CRS, Transformer from itertools import chain from limic.overpass import intersect, pylon lines, substations, towers, id2tower, id2node, id2lines, id2types = lim start("Building KD-tree from white nodes") from limic.util import kdtree towers_tree = kdtree(towers, get_latlon=lambda x: x.latlon) end('') status(len(towers)) start("Deleting black nodes") to_delete = set() from limic.util import nodes_in_geometry for substation in substations: to_delete.update( nodes_in_geometry(towers_tree, list(map(lambda x: id2node[x], substation)))) towers = [tower for tower in towers if tower not in to_delete] end('') status(len(towers)) start("Building initial graph") g = Graph() g.add_nodes_from(towers) for line in lines: line_nodes = list(map(lambda x: id2tower[x], line)) for from_node, to_node in zip(line_nodes, line_nodes[1:]): if from_node in to_delete or to_node in to_delete: continue w = distance(from_node.latlon, to_node.latlon) g.add_edge(from_node, to_node, weight=w, type=id2types[from_node.id]) end('') status(len(g.nodes()), end='/') status(len(g.edges())) start("Finding neighbours within " + str(around) + "m") towers_tree = kdtree(towers, get_latlon=lambda x: x.latlon) end('') neighbour_indices, neighbours = towers_tree.get_neighbours(around=1000) end() start("Computing non-logical intersections") tower2index = {} for i, t in zip(range(len(towers)), towers): tower2index[t] = i for k, v in id2lines.items(): id2lines[k] = tuple(map(tuple, v)) end('') segments = set() for u, v in g.edges(): this = (u, v) if u < v else (v, u) ui, vi = tower2index[u], tower2index[v] lines = set() lines.update(id2lines[u.id]) lines.update(id2lines[v.id]) for neighbour in chain(neighbours[ui], neighbours[vi]): if neighbour == u or neighbour == v: continue if not lines.intersection(id2lines[neighbour.id]): for nn in g.neighbors(neighbour): other = (neighbour, nn) if neighbour < nn else (nn, neighbour) segments.add(tuple(sorted((this, other)))) end('') status(len(segments), end=' ') neighbours2intersection = {} minusid = 0 latlon2id = {} segments2intersections = {} for (t1, t2), (t3, t4) in segments: res = intersect(t1.latlon, t2.latlon, t3.latlon, t4.latlon, eps=eps, no_tu=False) if res: intersection, (t, u) = res if not intersection in latlon2id: minusid -= 1 latlon2id[intersection] = minusid segments2intersections.setdefault((t1, t2), []).append( (t, latlon2id[intersection], intersection)) segments2intersections.setdefault((t3, t4), []).append( (u, latlon2id[intersection], intersection)) end('') status(-minusid, end=' ') for (u, v), intersections in segments2intersections.items(): intersections.sort() g.remove_edge(u, v) type = id2types[u.id] assert (type == id2types[v.id]) seq = [u] for _, id, latlon in intersections: seq.append(pylon(id, latlon)) seq.append(v) for from_node, to_node in zip(seq, seq[1:]): w = distance(from_node.latlon, to_node.latlon) g.add_edge(from_node, to_node, weight=w, type=type) end() start("Adding routing through air") airs = set() for ns in neighbours: n = ns[0] for m in ns[1:]: if not g.has_edge(n, m): airs.add((n, m)) end('') for n, m in airs: w = penalize * distance(n.latlon, m.latlon) g.add_edge(n, m, weight=w, type=-1) end('') status(len(g.nodes()), end='/') status(len(g.edges())) from networkx import relabel_nodes start("Prune redundant edges (incomplete)") prune_incomplete(g) end('') status(len(g.edges())) start("Prune redundant edges (complete)") prune_complete(g) end('') status(len(g.edges())) start("Cleaning up graph") relabel = dict( map( lambda tower: (tower, (tower.id, tower.latlon[0], tower.latlon[1])), g.nodes())) relabel_nodes(g, relabel, copy=False) end() start("Saving graph to", file_name_out) save_pickled(file_name_out, g) end('') file_size(file_name_out)
def osm_pre(file_name_in, white=[("power", "line")], black=[("power", "substation")], conserve_mem=False): from limic.util import start, end, status, file_size, split, file_copy from bz2 import open as bopen from limic.overpass import pylon white = list( map(lambda x: (x[0], x[1][0] + '" v="' + x[1][1] + '"'), zip(range(len(white)), white))) black = list(map(lambda x: (-1, x[0] + '" v="' + x[1] + '"'), black)) white_black = white + black if not conserve_mem: start("Reading map data from", file_name_in) from mmap import mmap area = mmap(-1, 2**40) if file_name_in[-4:] == ".bz2": try: from subprocess import Popen, PIPE f = Popen(['lbzcat', file_name_in], stdout=PIPE).stdout except Exception as e: print(e) status( "WARNING: lbzcat failed - falling back on Python bz2 library" ) f = bopen(file_name_in, "rb") else: f = open(file_name_in, "rb") file_copy(f, area) f.close() area_length = area.tell() end() start("Extracting ways for white and black") if conserve_mem: f = bopen(file_name_in, "rb") if file_name_in[-4:] == ".bz2" else open( file_name_in, "rb") elems = split(f, '\n <') else: area.seek(0) elems = split(area, '\n <', maxlength=area_length) lines = [] substations = [] id2types = {} for elem in elems: if elem.startswith('way id="'): found = False for i, t in white_black: if t in elem: found = True if not found: continue tags = elem.split('\n <tag k="') for tag in tags[1:]: for i, t in white_black: if tag.startswith(t): nds = list( map(lambda x: int(x.split('"')[0]), tags[0].split('\n <nd ref="')[1:])) if i >= 0: lines.append(nds) for nd in nds: id2types[nd] = i else: substations.append(nds) end('') status(len(lines), end=' ') status(len(substations)) id2lines, id2substations = {}, {} for line in lines: for node in line: id2lines.setdefault(node, []).append(line) for substation in substations: for node in substation: id2substations.setdefault(node, []).append(substation) start("Extracting nodes for white and black") if conserve_mem: f = bopen(file_name_in, "rb") if file_name_in[-4:] == ".bz2" else open( file_name_in, "rb") elems = split(f, '\n <') else: area.seek(0) elems = split(area, '\n <', maxlength=area_length) towers = [] id2tower = {} id2node = {} ids = set() ids.update(map(lambda x: str(x), id2lines.keys())) ids.update(map(lambda x: str(x), id2substations.keys())) for elem in elems: if elem.startswith('node id="'): if not elem[9:elem.find('"', 10)] in ids: continue parts = elem.split("\n")[0].split('"') assert (parts[-5] == ' lat=') assert (parts[-3] == ' lon=') i = int(parts[1]) if i in id2lines: elem = pylon(i, (float(parts[-4]), float(parts[-2]))) towers.append(elem) id2tower[i] = elem if i in id2substations: elem = (float(parts[-4]), float(parts[-2])) id2node[i] = elem end('') status(len(towers)) start("Checking all nodes were extracted") assert (set(id2lines.keys()) == set(map(lambda x: x.id, towers))) end() return lines, substations, towers, id2tower, id2node, id2lines, id2types
from limic.util import start, end, file_size, status, load_pickled, save_pickled from sys import argv if __name__ == "__main__": file_name_in = argv[1] file_name_out = argv[2] start("Loading graph from", file_name_in) g = load_pickled(file_name_in) end('') status(len(g.edges()), end=' ') file_size(file_name_in) start("Cleaning up graph") for u, v, d in g.edges(data=True): d['type'] = -1 if d['air'] else 0 del d['air'] end() start("Saving graph to", file_name_out) save_pickled(file_name_out, g) end('') file_size(file_name_out)
def extract_cache(file_name_in, file_name_out, overpass_url, area=None, around=1000, eps=0.01, safe_dist=100, penalize=20): #from limic.overpass import distance, find_all_neighbours, is_safe, set_server, pylon, region, get_towers_by_area #from limic.util import start, end, file_size, status, load_pickled, save_pickled, replace, check_overwrite from networkx import Graph if not check_overwrite(file_name_in, file_name_out): return start("Loading", file_name_in) region.backend._cache = load_pickled(file_name_in) len_cache = len(region.backend._cache) end('') file_size(file_name_in) if not area: area = file_name_in.split(".")[1] start("Querying overpass for", area) set_server(overpass_url) towers = get_towers_by_area(area) end() start("Building safe nodes") g = Graph() for tower in towers: if is_safe(tower, safe_dist): g.add_node(tower) # else: # if verbosity >= 2: print("NOT safe!") end('') total = len(g.nodes()) status(total) start("Building edges") build_edges(g, find_all_neighbours, around, eps, safe_dist, penalize) end('') status(len(g.edges())) if len_cache != len(region.backend._cache): file_name_tmp = file_name_in + ".tmp" start("Saving to", file_name_in, "via", file_name_tmp) save_pickled(file_name_tmp, region.backend._cache) replace(file_name_tmp, file_name_in) end('') file_size(file_name_in) from limic.util import start, end, status, file_size, save_pickled from networkx import relabel_nodes start("Prune redundant edges (incomplete)") prune_incomplete(g) end('') status(len(g.edges())) start("Prune redundant edges (complete)") prune_complete(g) end('') status(len(g.edges())) start("Cleaning up graph") relabel = dict( map( lambda tower: (tower, (tower.id, tower.latlon[0], tower.latlon[1])), g.nodes())) relabel_nodes(g, relabel, copy=False) for u, v, d in g.edges(data=True): d['type'] = -1 if d['type'] else 0 end() start("Saving graph to", file_name_out) save_pickled(file_name_out, g) end('') file_size(file_name_out)