def test_simple(self): self.assertEqual(pghstore.loads('"key" => "value"'), {"key": "value"}) self.assertEqual( pghstore.loads('"key" => "value", "key2" => "value2"'), { "key": "value", "key2": "value2" })
def test_null(self): self.assertEqual(pghstore.loads('"key" => null'), {"key": None}) self.assertEqual(pghstore.loads('"key" => NULL'), {"key": None}) self.assertEqual(pghstore.loads( '"key" => NULL, "key2": "value2"'), {"key": None, "key2": "value2"}); self.assertEqual(pghstore.loads( '"key0" => "value0", "key" => NULL, "key2": "value2"'), {"key0": "value0", "key": None, "key2": "value2"});
def objects_member_of(objects, other_selects, self_selects, options): if "relation" in other_selects: plan = plpy.prepare( "select *, (select name from users where id=user_id) as user from relation_members join relations on relation_members.relation_id=relations.id where member_id=$1 and member_type=$2", ["bigint", "text"], ) for ob in objects: member_id = ob["id"] res = plpy.cursor(plan, [int(member_id[1:]), member_id[0:1].upper()]) for r in res: t = {"id": "r" + str(r["id"]), "tags": pghstore.loads(r["tags"]), "types": ["relation"], "geo": None} link_tags = { "sequence_id": str(r["sequence_id"]), "role": str(r["member_role"]), "member_id": r["member_type"].lower() + str(r["member_id"]), } t["tags"]["osm:id"] = str(t["id"]) t["tags"]["osm:version"] = str(r["version"]) t["tags"]["osm:user_id"] = str(r["user_id"]) t["tags"]["osm:user"] = r["user"] t["tags"]["osm:timestamp"] = str(r["tstamp"]) t["tags"]["osm:changeset"] = str(r["changeset_id"]) yield ((ob, t, link_tags)) if "way" in other_selects: plan = plpy.prepare( "select *, (select name from users where id=user_id) as user from way_nodes join ways on way_nodes.way_id=ways.id where node_id=$1", ["bigint"], ) for o in objects: member_id = o["id"] if member_id[0] != "n": continue num_id = int(member_id[1:]) res = plpy.cursor(plan, [num_id]) for r in res: t = { "id": "w" + str(r["id"]), "tags": pghstore.loads(r["tags"]), "types": ["way"], "geo": r["linestring"], } link_tags = {"member_id": member_id, "sequence_id": str(r["sequence_id"])} t["tags"]["osm:id"] = str(t["id"]) t["tags"]["osm:version"] = str(r["version"]) t["tags"]["osm:user_id"] = str(r["user_id"]) t["tags"]["osm:user"] = r["user"] t["tags"]["osm:timestamp"] = str(r["tstamp"]) t["tags"]["osm:changeset"] = str(r["changeset_id"]) yield ((ob, t, link_tags))
def test_utf8(self): self.maxDiff = None #self.assertEqual(pghstore.loads('"åäö" => "åäö"'), {"åäö": "åäö"}) s = '"name"=>"Noorwe\xc3\xab", "name2"=>"öäå"' self.assertEqual(pghstore.loads(s), {"name": "Noorwe\xc3\xab", "name2": "öäå"}) names = '"name"=>"Norge/Noreg", "name:af"=>"Noorwe\xc3\xab", "name:ar"=>"\xd8\xa7\xd9\x84\xd9\x86\xd8\xb1\xd9\x88\xd9\x8a\xd8\xac", "name:be"=>"\xd0\x9d\xd0\xb0\xd1\x80\xd0\xb2\xd0\xb5\xd0\xb3\xd1\x96\xd1\x8f", "name:br"=>"Norvegia", "name:ca"=>"Noruega", "name:cs"=>"Norsko", "name:cy"=>"Norwy", "name:da"=>"Norge", "name:de"=>"Norwegen", "name:el"=>"\xce\x9d\xce\xbf\xcf\x81\xce\xb2\xce\xb7\xce\xb3\xce\xaf\xce\xb1", "name:en"=>"Norway", "name:eo"=>"Norvegio", "name:es"=>"Noruega", "name:et"=>"Norra", "name:fa"=>"\xd9\x86\xd8\xb1\xd9\x88\xda\x98", "name:fi"=>"Norja", "name:fo"=>"Noregur", "name:fr"=>"Norv\xc3\xa8ge", "name:fy"=>"Noarwegen", "name:ga"=>"An Iorua", "name:gd"=>"Nirribhidh", "name:he"=>"\xd7\xa0\xd7\x95\xd7\xa8\xd7\x95\xd7\x95\xd7\x92\xd7\x99\xd7\x94", "name:hr"=>"Norve\xc5\xa1ka", "name:hu"=>"Norv\xc3\xa9gia", "name:hy"=>"\xd5\x86\xd5\xb8\xd6\x80\xd5\xbe\xd5\xa5\xd5\xa3\xd5\xab\xd5\xa1", "name:id"=>"Norwegia", "name:is"=>"Noregur", "name:it"=>"Norvegia", "name:ja"=>"\xe3\x83\x8e\xe3\x83\xab\xe3\x82\xa6\xe3\x82\xa7\xe3\x83\xbc", "name:la"=>"Norvegia", "name:lb"=>"Norwegen", "name:li"=>"Noorwege", "name:lt"=>"Norvegija", "name:lv"=>"Norv\xc4\x93\xc4\xa3ija", "name:mn"=>"\xd0\x9d\xd0\xbe\xd1\x80\xd0\xb2\xd0\xb5\xd0\xb3\xd0\xb8", "name:nb"=>"Norge", "name:nl"=>"Noorwegen", "name:nn"=>"Noreg", "name:no"=>"Norge", "name:pl"=>"Norwegia", "name:ru"=>"\xd0\x9d\xd0\xbe\xd1\x80\xd0\xb2\xd0\xb5\xd0\xb3\xd0\xb8\xd1\x8f", "name:sk"=>"N\xc3\xb3rsko", "name:sl"=>"Norve\xc5\xa1ka", "name:sv"=>"Norge", "name:th"=>"\xe0\xb8\x9b\xe0\xb8\xa3\xe0\xb8\xb0\xe0\xb9\x80\xe0\xb8\x97\xe0\xb8\xa8\xe0\xb8\x99\xe0\xb8\xad\xe0\xb8\xa3\xe0\xb9\x8c\xe0\xb9\x80\xe0\xb8\xa7\xe0\xb8\xa2\xe0\xb9\x8c", "name:tr"=>"Norve\xc3\xa7", "name:uk"=>"\xd0\x9d\xd0\xbe\xd1\x80\xd0\xb2\xd0\xb5\xd0\xb3\xd1\x96\xd1\x8f", "name:vi"=>"Na Uy", "name:zh"=>"\xe6\x8c\xaa\xe5\xa8\x81", "name:haw"=>"Nolewai", "name:zh_py"=>"Nuowei", "name:zh_pyt"=>"Nu\xc3\xb3w\xc4\x93i", "official_name"=>"Kongeriket Norge", "official_name:be"=>"\xd0\x9a\xd0\xb0\xd1\x80\xd0\xb0\xd0\xbb\xd0\xb5\xd1\x9e\xd1\x81\xd1\x82\xd0\xb2\xd0\xb0 \xd0\x9d\xd0\xb0\xd1\x80\xd0\xb2\xd0\xb5\xd0\xb3\xd1\x96\xd1\x8f", "official_name:el"=>"\xce\x92\xce\xb1\xcf\x83\xce\xaf\xce\xbb\xce\xb5\xce\xb9\xce\xbf \xcf\x84\xce\xb7\xcf\x82 \xce\x9d\xce\xbf\xcf\x81\xce\xb2\xce\xb7\xce\xb3\xce\xaf\xce\xb1\xcf\x82", "official_name:en"=>"Kingdom of Norway", "official_name:id"=>"Kerajaan Norwegia", "official_name:it"=>"Regno di Norvegia", "official_name:ja"=>"\xe3\x83\x8e\xe3\x83\xab\xe3\x82\xa6\xe3\x82\xa7\xe3\x83\xbc\xe7\x8e\x8b\xe5\x9b\xbd", "official_name:lb"=>"Kinneksr\xc3\xa4ich Norwegen", "official_name:lt"=>"Norvegijos Karalyst\xc4\x97", "official_name:sk"=>"N\xc3\xb3rske kr\xc3\xa1\xc4\xbeovstvo", "official_name:sv"=>"Konungariket Norge", "official_name:vi"=>"V\xc6\xb0\xc6\xa1ng qu\xe1\xbb\x91c Na Uy"' r = pghstore.loads(names) self.assertEqual( pghstore.loads(names), {"name": "Norge/Noreg", "name:af": "Noorwe\xc3\xab", "name:ar": "\xd8\xa7\xd9\x84\xd9\x86\xd8\xb1\xd9\x88\xd9\x8a\xd8\xac", "name:be": "\xd0\x9d\xd0\xb0\xd1\x80\xd0\xb2\xd0\xb5\xd0\xb3\xd1\x96\xd1\x8f", "name:br": "Norvegia", "name:ca": "Noruega", "name:cs": "Norsko", "name:cy": "Norwy", "name:da": "Norge", "name:de": "Norwegen", "name:el": "\xce\x9d\xce\xbf\xcf\x81\xce\xb2\xce\xb7\xce\xb3\xce\xaf\xce\xb1", "name:en": "Norway", "name:eo": "Norvegio", "name:es": "Noruega", "name:et": "Norra", "name:fa": "\xd9\x86\xd8\xb1\xd9\x88\xda\x98", "name:fi": "Norja", "name:fo": "Noregur", "name:fr": "Norv\xc3\xa8ge", "name:fy": "Noarwegen", "name:ga": "An Iorua", "name:gd": "Nirribhidh", "name:he": "\xd7\xa0\xd7\x95\xd7\xa8\xd7\x95\xd7\x95\xd7\x92\xd7\x99\xd7\x94", "name:hr": "Norve\xc5\xa1ka", "name:hu": "Norv\xc3\xa9gia", "name:hy": "\xd5\x86\xd5\xb8\xd6\x80\xd5\xbe\xd5\xa5\xd5\xa3\xd5\xab\xd5\xa1", "name:id": "Norwegia", "name:is": "Noregur", "name:it": "Norvegia", "name:ja": "\xe3\x83\x8e\xe3\x83\xab\xe3\x82\xa6\xe3\x82\xa7\xe3\x83\xbc", "name:la": "Norvegia", "name:lb": "Norwegen", "name:li": "Noorwege", "name:lt": "Norvegija", "name:lv": "Norv\xc4\x93\xc4\xa3ija", "name:mn": "\xd0\x9d\xd0\xbe\xd1\x80\xd0\xb2\xd0\xb5\xd0\xb3\xd0\xb8", "name:nb": "Norge", "name:nl": "Noorwegen", "name:nn": "Noreg", "name:no": "Norge", "name:pl": "Norwegia", "name:ru": "\xd0\x9d\xd0\xbe\xd1\x80\xd0\xb2\xd0\xb5\xd0\xb3\xd0\xb8\xd1\x8f", "name:sk": "N\xc3\xb3rsko", "name:sl": "Norve\xc5\xa1ka", "name:sv": "Norge", "name:th": "\xe0\xb8\x9b\xe0\xb8\xa3\xe0\xb8\xb0\xe0\xb9\x80\xe0\xb8\x97\xe0\xb8\xa8\xe0\xb8\x99\xe0\xb8\xad\xe0\xb8\xa3\xe0\xb9\x8c\xe0\xb9\x80\xe0\xb8\xa7\xe0\xb8\xa2\xe0\xb9\x8c", "name:tr": "Norve\xc3\xa7", "name:uk": "\xd0\x9d\xd0\xbe\xd1\x80\xd0\xb2\xd0\xb5\xd0\xb3\xd1\x96\xd1\x8f", "name:vi": "Na Uy", "name:zh": "\xe6\x8c\xaa\xe5\xa8\x81", "name:haw": "Nolewai", "name:zh_py": "Nuowei", "name:zh_pyt": "Nu\xc3\xb3w\xc4\x93i", "official_name": "Kongeriket Norge", "official_name:be": "\xd0\x9a\xd0\xb0\xd1\x80\xd0\xb0\xd0\xbb\xd0\xb5\xd1\x9e\xd1\x81\xd1\x82\xd0\xb2\xd0\xb0 \xd0\x9d\xd0\xb0\xd1\x80\xd0\xb2\xd0\xb5\xd0\xb3\xd1\x96\xd1\x8f", "official_name:el": "\xce\x92\xce\xb1\xcf\x83\xce\xaf\xce\xbb\xce\xb5\xce\xb9\xce\xbf \xcf\x84\xce\xb7\xcf\x82 \xce\x9d\xce\xbf\xcf\x81\xce\xb2\xce\xb7\xce\xb3\xce\xaf\xce\xb1\xcf\x82", "official_name:en": "Kingdom of Norway", "official_name:id": "Kerajaan Norwegia", "official_name:it": "Regno di Norvegia", "official_name:ja": "\xe3\x83\x8e\xe3\x83\xab\xe3\x82\xa6\xe3\x82\xa7\xe3\x83\xbc\xe7\x8e\x8b\xe5\x9b\xbd", "official_name:lb": "Kinneksr\xc3\xa4ich Norwegen", "official_name:lt": "Norvegijos Karalyst\xc4\x97", "official_name:sk": "N\xc3\xb3rske kr\xc3\xa1\xc4\xbeovstvo", "official_name:sv": "Konungariket Norge", "official_name:vi": "V\xc6\xb0\xc6\xa1ng qu\xe1\xbb\x91c Na Uy"})
def test_null(self): self.assertEqual(pghstore.loads('"key" => null'), {"key": None}) self.assertEqual(pghstore.loads('"key" => NULL'), {"key": None}) self.assertEqual(pghstore.loads('"key" => NULL, "key2": "value2"'), { "key": None, "key2": "value2" }) self.assertEqual( pghstore.loads( '"key0" => "value0", "key" => NULL, "key2": "value2"'), { "key0": "value0", "key": None, "key2": "value2" })
def objects_member_of(member_id, parent_type, parent_conditions): if parent_type == 'relation': plan = plpy.prepare('select * from planet_osm_rels where members @> Array[$1]', ['text']); res = plpy.execute(plan, [member_id]) for r in res: for member in flatarray_to_members(r['members']): if member['member_id'] == member_id: t = { 'id': 'r' + str(r['id']), 'tags': flatarray_to_tags(r['tags']) if r['tags'] else {}, 'type': ['relation'], 'geo': None, 'link_tags': member } yield(t) if parent_type == 'way': num_id = int(member_id[1:]) plan = plpy.prepare('select id, nodes, planet_osm_line.tags, way as geo from planet_osm_ways left join planet_osm_line on planet_osm_ways.id=planet_osm_line.osm_id where nodes::bigint[] @> Array[$1]', ['bigint']); res = plpy.execute(plan, [num_id]) for r in res: for i, member in enumerate(r['nodes']): if member == num_id: t = { 'id': 'w' + str(r['id']), 'tags': pghstore.loads(r['tags']) if r['tags'] else {}, 'type': ['way'], 'geo': r['geo'], 'link_tags': { 'member_id': member_id, 'sequence_id': str(i) } } yield(t)
def objects_by_id(id_list): _id_list = [ int(i[1:]) for i in id_list if i[0] == 'n' ] plan = plpy.prepare('select * from planet_osm_point where osm_id=any($1)', ['bigint[]']); res = plpy.execute(plan, [_id_list]) for r in res: yield { 'id': 'n' + str(r['osm_id']), 'members': [], 'tags': pghstore.loads(r['tags']), 'geo': r['way'], 'types': ['node', 'point'] } _id_list = [ int(i[1:]) for i in id_list if i[0] == 'w' ] plan = plpy.prepare("select t.*, planet_osm_ways.nodes from (select osm_id, tags, way, 'line' as _type from planet_osm_line where osm_id=any($1) union select osm_id, tags, way, 'way' as _type from planet_osm_polygon where osm_id=any($1)) t left join planet_osm_ways on t.osm_id=planet_osm_ways.id", ['bigint[]']); res = plpy.execute(plan, [_id_list]) for r in res: yield { 'id': 'w' + str(r['osm_id']), 'members': [ { 'member_id': 'n' + str(m), 'sequence_id': str(i) } for i, m in enumerate(r['nodes']) ], 'tags': pghstore.loads(r['tags']), 'geo': r['way'], 'types': ['way', r['_type']] } _id_list = [ int(i[1:]) for i in id_list if i[0] == 'r' ] plan = plpy.prepare("select id, planet_osm_rels.tags, members, planet_osm_polygon.way from planet_osm_rels left join planet_osm_polygon on -planet_osm_rels.id=planet_osm_polygon.osm_id where id=any($1)", ['bigint[]']) res = plpy.execute(plan, [_id_list]) for r in res: yield { 'id': 'r' + str(r['id']), 'tags': flatarray_to_tags(r['tags']), 'members': flatarray_to_members(r['members']), 'geo': r['way'], 'types': ['relation'] if r['way'] is None else ['relation', 'area'] }
def process_mapnik(style_id, args, stat, conn): f1 = resource_stream(__name__, args.base_style + '.mapnik') f2 = open(style_id + '.mapnik', 'w') replacement = { 'style_id': style_id, 'host': args.host, 'password': args.password, 'database': args.database, 'user': args.user, 'columns': '{columns}' } stat['mapnik_columns'] = set() # dirty hack - when render_context.bbox is null, pass type 'canvas' instead of style-element res = db.prepare("select * from pgmapcss_{style_id}(null, 0, Array['canvas']) where pseudo_element='default'".format(**replacement)) result = res() if len(result) > 0: canvas_properties = result[0][res.column_names.index('properties')] canvas_properties = pghstore.loads(canvas_properties) for (k, v) in canvas_properties.items(): replacement['canvas|' + k] = v or '' text = process(f1, replacement, stat) # style-element is an extra column in result set stat['mapnik_columns'].remove('style-element') # finally replace 'columns' replacement = {} replacement['columns'] = ',\n '.join([ ' || \' \' || '.join([ '(properties->' + db.format(p) + ')' for p in props.split(' ') ]) + ' as "' + shorten_column(props) + '"' for props in stat['mapnik_columns'] ]) f2.write(text.format(**replacement)) f1.close() f2.close() print('File ' + style_id + '.mapnik successfully written.')
def process_mapnik(style_id, args, stat, conn): f1 = resource_stream(__name__, args.base_style + '.mapnik') f2 = open(style_id + '.mapnik', 'w') replacement = { 'style_id': style_id, 'host': args.host, 'password': args.password, 'database': args.database, 'user': args.user } replacement['columns'] = ', '.join([ 'properties->' + db.format(prop) + ' as ' + db.ident(prop) for prop in stat_properties(stat) ]) # dirty hack - when render_context.bbox is null, pass type 'canvas' instead of style-element res = db.prepare("select * from {style_id}_match(null, 0, Array['canvas'])".format(**replacement)) result = res() if len(result) > 0: canvas_properties = result[0][res.column_names.index('properties')] canvas_properties = pghstore.loads(canvas_properties) for (k, v) in canvas_properties.items(): replacement['canvas|' + k] = v or '' f2.write(process(f1, replacement, stat)) f1.close() f2.close() print('File ' + style_id + '.mapnik successfully written.') if len(unresolvable_properties): print('WARNING: Not all values for the following properties could be guessed (e.g. as they are the result of an eval-expression, and therefore some features in the resulting image(s) may be missing: ' + ', '.join(unresolvable_properties))
def objects_by_id(id_list, options): _id_list = [int(i[1:]) for i in id_list if i[0] == "n"] plan = plpy.prepare( "select *, (select name from users where id=user_id) as user from nodes where id=any($1)", ["bigint[]"] ) res = plpy.cursor(plan, [_id_list]) for r in res: t = { "id": "n" + str(r["id"]), "members": [], "tags": pghstore.loads(r["tags"]), "geo": r["geom"], "types": ["node", "point"], } t["tags"]["osm:id"] = str(t["id"]) t["tags"]["osm:version"] = str(r["version"]) t["tags"]["osm:user_id"] = str(r["user_id"]) t["tags"]["osm:user"] = r["user"] t["tags"]["osm:timestamp"] = str(r["tstamp"]) t["tags"]["osm:changeset"] = str(r["changeset_id"]) yield (t) _id_list = [int(i[1:]) for i in id_list if i[0] == "w"] plan = plpy.prepare( "select id, tags, version, user_id, (select name from users where id=user_id) as user, tstamp, changeset_id, linestring as linestring, array_agg(node_id) as member_ids from (select ways.*, node_id from ways left join way_nodes on ways.id=way_nodes.way_id where ways.id=any($1) order by way_nodes.sequence_id) t group by id, tags, version, user_id, tstamp, changeset_id, linestring", ["bigint[]"], ) res = plpy.cursor(plan, [_id_list]) for r in res: t = { "id": "w" + str(r["id"]), "members": [{"member_id": "n" + str(m), "sequence_id": str(i)} for i, m in enumerate(r["member_ids"])], "tags": pghstore.loads(r["tags"]), "geo": r["linestring"], "types": ["way", "line", "area"], } t["tags"]["osm:id"] = str(t["id"]) t["tags"]["osm:version"] = str(r["version"]) t["tags"]["osm:user_id"] = str(r["user_id"]) t["tags"]["osm:user"] = r["user"] t["tags"]["osm:timestamp"] = str(r["tstamp"]) t["tags"]["osm:changeset"] = str(r["changeset_id"]) yield (t) _id_list = [int(i[1:]) for i in id_list if i[0] == "r"] plan = plpy.prepare( "select id, tags, version, user_id, (select name from users where id=user_id) as user, tstamp, changeset_id, array_agg(lower(member_type) || member_id) as member_ids, array_agg(member_role) as member_roles from (select relations.*, member_type, member_id, member_role from relations left join relation_members on relations.id=relation_members.relation_id where relations.id=any($1) order by relation_members.sequence_id) t group by id, tags, version, user_id, tstamp, changeset_id", ["bigint[]"], ) res = plpy.cursor(plan, [_id_list]) for r in res: t = { "id": "r" + str(r["id"]), "tags": pghstore.loads(r["tags"]), "members": [ {"member_id": m[0], "role": m[1], "sequence_id": i} for i, m in enumerate(zip(r["member_ids"], r["member_roles"])) ], "geo": None, "types": ["relation"], } t["tags"]["osm:id"] = str(t["id"]) t["tags"]["osm:version"] = str(r["version"]) t["tags"]["osm:user_id"] = str(r["user_id"]) t["tags"]["osm:user"] = r["user"] t["tags"]["osm:timestamp"] = str(r["tstamp"]) t["tags"]["osm:changeset"] = str(r["changeset_id"]) yield (t)
def objects_bbox(_bbox, db_selects, options, add_columns={}, add_param_type=[], add_param_value=[]): import pghstore qry = "" if len(add_columns): add_columns_qry = ", " + ", ".join([q + ' as "' + k + '"' for k, q in add_columns.items()]) else: add_columns_qry = "" if _bbox: param_type = ["geometry"] + add_param_type param_value = [_bbox] + add_param_value else: param_type = add_param_type param_value = add_param_value # nodes w = [] for t in ("*", "node", "point"): if t in db_selects: w.append(db_selects[t]) if len(w): bbox = "" if _bbox is not None: bbox = "geom && $1 and ST_Intersects(geom, $1) and" qry = """ select 'n' || cast(id as text) as id, version, user_id, (select name from users where id=user_id) as user, tstamp, changeset_id, tags, geom as geo, Array['point', 'node'] as types {add_columns} from nodes where {bbox} ( {w} ) """.format( bbox=bbox, w=" or ".join(w), add_columns=add_columns_qry.replace("__geo__", "geom") ) plan = plpy.prepare(qry, param_type) res = plpy.cursor(plan, param_value) for r in res: r["types"] = list(r["types"]) r["tags"] = pghstore.loads(r["tags"]) r["tags"]["osm:id"] = str(r["id"]) r["tags"]["osm:version"] = str(r["version"]) r["tags"]["osm:user_id"] = str(r["user_id"]) r["tags"]["osm:user"] = r["user"] r["tags"]["osm:timestamp"] = str(r["tstamp"]) r["tags"]["osm:changeset"] = str(r["changeset_id"]) yield (r) # ways w = [] for t in ("*", "line", "area", "way"): if t in db_selects: w.append(db_selects[t]) if len(w): bbox = "" if _bbox is not None: bbox = "linestring && $1 and (ST_NPoints(linestring) = 1 or ST_Intersects(linestring, $1)) and" qry = """ select * {add_columns} from ( select 'w' || cast(id as text) as id, version, user_id, (select name from users where id=user_id) as user, tstamp, changeset_id, tags, (CASE WHEN ST_NPoints(linestring) >= 4 and ST_IsClosed(linestring) THEN ST_MakePolygon(linestring) ELSE linestring END) as geo, (ST_NPoints(linestring) >= 4) and ST_IsClosed(linestring) as is_closed, Array['line', 'way'] as types """ # START db.multipolygons # START db.multipolygons-v0.2 # deprecated by osmosis-multipolygon v0.3 qry += """ , (select array_agg(has_outer_tags) from relation_members join multipolygons on relation_members.relation_id=multipolygons.id where relation_members.member_id=ways.id and relation_members.member_type='W' and relation_members.member_role in ('outer', 'exclave')) part_of_mp_outer """ # ELSE db.multipolygons-v0.2 qry += """ , (select array_agg(true) from multipolygons where hide_outer_ways @> Array[ways.id]) part_of_mp_outer """ # END db.multipolygons-v0.2 # END db.multipolygons qry += """ from ways where {bbox} ( {w} ) offset 0) t """ qry = qry.format(bbox=bbox, w=" or ".join(w), add_columns=add_columns_qry.replace("__geo__", "geo")) plan = plpy.prepare(qry, param_type) res = plpy.cursor(plan, param_value) for r in res: r["types"] = list(r["types"]) r["tags"] = pghstore.loads(r["tags"]) if r["is_closed"]: # START db.multipolygons if not r["part_of_mp_outer"] or True not in r["part_of_mp_outer"]: # END db.multipolygons r["types"].append("area") r["tags"]["osm:id"] = str(r["id"]) r["tags"]["osm:version"] = str(r["version"]) r["tags"]["osm:user_id"] = str(r["user_id"]) r["tags"]["osm:user"] = r["user"] r["tags"]["osm:timestamp"] = str(r["tstamp"]) r["tags"]["osm:changeset"] = str(r["changeset_id"]) yield (r) done_multipolygons = set() # START db.multipolygons # multipolygons w = [] for t in ("*", "relation", "area"): if t in db_selects: w.append(db_selects[t]) if len(w): bbox = "" if _bbox is not None: bbox = "geom && $1 and ST_Intersects(geom, $1) and" qry = """ select * {add_columns} from ( select (CASE WHEN has_outer_tags THEN 'm' ELSE 'r' END) || cast(id as text) as id, id as rid, version, user_id, (select name from users where id=user_id) as user, tstamp, changeset_id, has_outer_tags, tags, geom as geo, Array['area'] as types from (select multipolygons.*, relations.version, relations.user_id, relations.tstamp, relations.changeset_id from multipolygons left join relations on multipolygons.id = relations.id) t where {bbox} ( {w} ) offset 0) t """.format( bbox=bbox, w=" or ".join(w), add_columns=add_columns_qry.replace("__geo__", "geo") ) plan = plpy.prepare(qry, param_type) res = plpy.cursor(plan, param_value) for r in res: r["types"] = list(r["types"]) r["tags"] = pghstore.loads(r["tags"]) r["tags"]["osm:id"] = str(r["id"]) r["tags"]["osm:version"] = str(r["version"]) r["tags"]["osm:user_id"] = str(r["user_id"]) r["tags"]["osm:user"] = r["user"] r["tags"]["osm:timestamp"] = str(r["tstamp"]) r["tags"]["osm:changeset"] = str(r["changeset_id"]) if r["has_outer_tags"]: r["tags"]["osm:has_outer_tags"] = "yes" else: done_multipolygons.add(r["rid"]) r["types"].append("relation") yield (r) # END db.multipolygons # relations - (no bbox match!) w = [] for t in ("*", "relation"): if t in db_selects: w.append(db_selects[t]) if len(w): qry = """ select * {add_columns} from ( select 'r' || cast(id as text) as id, version, user_id, (select name from users where id=user_id) as user, tstamp, changeset_id, tags, null as geo, Array['relation'] as types from relations where ({w}) and not id = ANY(Array[{done}]::bigint[])) t """.format( w=" or ".join(w), add_columns=add_columns_qry, done=",".join({str(d) for d in done_multipolygons}) ) plan = plpy.prepare(qry, param_type) res = plpy.cursor(plan, param_value) for r in res: r["types"] = list(r["types"]) r["tags"] = pghstore.loads(r["tags"]) r["tags"]["osm:id"] = str(r["id"]) r["tags"]["osm:version"] = str(r["version"]) r["tags"]["osm:user_id"] = str(r["user_id"]) r["tags"]["osm:user"] = r["user"] r["tags"]["osm:timestamp"] = str(r["tstamp"]) r["tags"]["osm:changeset"] = str(r["changeset_id"]) yield (r)
def objects_by_id(id_list, options): _id_list = [int(i[1:]) for i in id_list if i[0] == "n"] plan = plpy.prepare("select * from planet_osm_point where osm_id=any($1)", ["bigint[]"]) res = plpy.cursor(plan, [_id_list]) for r in res: t = {"id": "n" + str(r["osm_id"]), "members": [], "geo": r["way"], "types": ["node", "point"]} # START db.columns.node t["tags"] = {k: r[k] for k in r if k not in ["id", "geo", "types", "tags", "way", "osm_id"] if r[k] is not None} # START db.has-hstore t["tags"] = dict(pghstore.loads(r["tags"]).items() | t["tags"].items()) # END db.has-hstore # END db.columns.node # START db.hstore-only t["tags"] = pghstore.loads(r["tags"]) # END db.hstore-only t["tags"]["osm:id"] = t["id"] yield t _id_list = [int(i[1:]) for i in id_list if i[0] == "w"] plan = plpy.prepare( "select t.*, planet_osm_ways.nodes from (select osm_id, tags, way, 'line' as _type from planet_osm_line where osm_id=any($1) union select osm_id, tags, way, 'way' as _type from planet_osm_polygon where osm_id=any($1)) t left join planet_osm_ways on t.osm_id=planet_osm_ways.id", ["bigint[]"], ) res = plpy.cursor(plan, [_id_list]) for r in res: t = { "id": "w" + str(r["osm_id"]), "members": [{"member_id": "n" + str(m), "sequence_id": str(i)} for i, m in enumerate(r["nodes"])], "geo": r["way"], "types": ["way", r["_type"]], } # START db.columns.way t["tags"] = { k: r[k] for k in r if k not in ["osm_id", "geo", "types", "tags", "nodes", "_type", "way"] if r[k] is not None } # START db.has-hstore t["tags"] = dict(pghstore.loads(r["tags"]).items() | t["tags"].items()) # END db.has-hstore # END db.columns.way # START db.hstore-only t["tags"] = pghstore.loads(r["tags"]) # END db.hstore-only t["tags"]["osm:id"] = t["id"] yield t _id_list = [int(i[1:]) for i in id_list if i[0] == "r"] plan = plpy.prepare( "select id, planet_osm_rels.tags, members, planet_osm_polygon.way from planet_osm_rels left join planet_osm_polygon on -planet_osm_rels.id=planet_osm_polygon.osm_id where id=any($1)", ["bigint[]"], ) res = plpy.cursor(plan, [_id_list]) for r in res: t = { "id": "r" + str(r["id"]), "tags": flatarray_to_tags(r["tags"]) if r["tags"] else {}, "members": flatarray_to_members(r["members"]), "geo": r["way"], "types": ["relation"] if r["way"] is None else ["relation", "area"], } t["tags"]["osm:id"] = t["id"] yield t
def objects_bbox(_bbox, db_selects, options, add_columns={}, add_param_type=[], add_param_value=[]): import pghstore qry = "" bbox = "" replacements = {"parent_bbox": ""} if _bbox is not None: bbox = "way && $1 and ST_Intersects(way, $1) and" replacements["parent_bbox"] = "way && $1 and ST_Intersects(way, $1) and" if len(add_columns): add_columns_qry = ", " + ", ".join([q + ' as "' + k + '"' for k, q in add_columns.items()]) else: add_columns_qry = "" if _bbox: param_type = ["geometry"] + add_param_type param_value = [_bbox] + add_param_value else: param_type = add_param_type param_value = add_param_value # planet_osm_point w = [] for t in ("*", "node", "point"): if t in db_selects: w.append(db_selects[t]) if len(w): qry = """ select 'n' || cast(osm_id as text) as id, way as geo, Array['point', 'node'] as types # START db.has-hstore , tags # END db.has-hstore {sql.columns.node} {add_columns} from planet_osm_point where {bbox} ( {w} ) """.format( bbox=bbox, w=" or ".join(w), add_columns=add_columns_qry ) qry = qry.replace("__PARENT_BBOX__", replacements["parent_bbox"]) qry = qry.replace("__TYPE_SHORT__", "n") qry = qry.replace("__TYPE_MODIFY__", "") plan = plpy.prepare(qry, param_type) res = plpy.cursor(plan, param_value) for r in res: t = {"id": r["id"], "geo": r["geo"], "types": r["types"]} # START db.columns.node t["tags"] = { k: r[k] for k in r if k not in ["id", "geo", "types", "tags"] and k not in add_columns if r[k] is not None } # START db.has-hstore t["tags"] = dict(pghstore.loads(r["tags"]).items() | t["tags"].items()) # END db.has-hstore # END db.columns.node # START db.hstore-only t["tags"] = pghstore.loads(r["tags"]) # END db.hstore-only t["tags"]["osm:id"] = str(r["id"]) for k in add_columns: t[k] = r[k] yield (t) # planet_osm_line - ways w = [] for t in ("*", "line", "way"): if t in db_selects: w.append(db_selects[t]) if len(w): qry = """ select 'w' || cast(osm_id as text) as id, way as geo, Array['line', 'way'] as types # START db.has-hstore , tags # END db.has-hstore {sql.columns.way} {add_columns} from planet_osm_line where osm_id>0 and {bbox} ( {w} ) """.format( bbox=bbox, w=" or ".join(w), add_columns=add_columns_qry ) qry = qry.replace("__PARENT_BBOX__", replacements["parent_bbox"]) qry = qry.replace("__TYPE_SHORT__", "w") qry = qry.replace("__TYPE_MODIFY__", "") plan = plpy.prepare(qry, param_type) res = plpy.cursor(plan, param_value) for r in res: t = {"id": r["id"], "geo": r["geo"], "types": r["types"]} # START db.columns.way t["tags"] = { k: r[k] for k in r if k not in ["id", "geo", "types", "tags"] and k not in add_columns if r[k] is not None } # START db.has-hstore t["tags"] = dict(pghstore.loads(r["tags"]).items() | t["tags"].items()) # END db.has-hstore # END db.columns.way # START db.hstore-only t["tags"] = pghstore.loads(r["tags"]) # END db.hstore-only t["tags"]["osm:id"] = str(r["id"]) for k in add_columns: t[k] = r[k] yield (t) # planet_osm_line - relations w = [] for t in ("*", "line", "relation"): if t in db_selects: w.append(db_selects[t]) if len(w): qry = """ select 'r' || cast(-osm_id as text) as id, way as geo, Array['line', 'relation'] as types # START db.has-hstore , tags # END db.has-hstore {sql.columns.way} {add_columns} from planet_osm_line where osm_id<0 and {bbox} ( {w} ) """.format( bbox=bbox, w=" or ".join(w), add_columns=add_columns_qry ) qry = qry.replace("__PARENT_BBOX__", replacements["parent_bbox"]) qry = qry.replace("__TYPE_SHORT__", "w") qry = qry.replace("__TYPE_MODIFY__", "") plan = plpy.prepare(qry, param_type) res = plpy.cursor(plan, param_value) for r in res: t = {"id": r["id"], "geo": r["geo"], "types": r["types"]} # START db.columns.way t["tags"] = { k: r[k] for k in r if k not in ["id", "geo", "types", "tags"] and k not in add_columns if r[k] is not None } # START db.has-hstore t["tags"] = dict(pghstore.loads(r["tags"]).items() | t["tags"].items()) # END db.has-hstore # END db.columns.way # START db.hstore-only t["tags"] = pghstore.loads(r["tags"]) # END db.hstore-only t["tags"]["osm:id"] = str(r["id"]) for k in add_columns: t[k] = r[k] yield (t) # planet_osm_polygon - ways w = [] for t in ("*", "area", "way"): if t in db_selects: w.append(db_selects[t]) if len(w): qry = """ select 'w' || cast(osm_id as text) as id, way as geo, Array['area', 'way'] as types # START db.has-hstore , tags # END db.has-hstore {sql.columns.way} {add_columns} from planet_osm_polygon where osm_id>0 and {bbox} ( {w} ) """.format( bbox=bbox, w=" or ".join(w), add_columns=add_columns_qry ) qry = qry.replace("__PARENT_BBOX__", replacements["parent_bbox"]) qry = qry.replace("__TYPE_SHORT__", "r") qry = qry.replace("__TYPE_MODIFY__", "-") plan = plpy.prepare(qry, param_type) res = plpy.cursor(plan, param_value) for r in res: t = {"id": r["id"], "geo": r["geo"], "types": r["types"]} # START db.columns.way t["tags"] = { k: r[k] for k in r if k not in ["id", "geo", "types", "tags"] and k not in add_columns if r[k] is not None } # START db.has-hstore t["tags"] = dict(pghstore.loads(r["tags"]).items() | t["tags"].items()) # END db.has-hstore # END db.columns.way # START db.hstore-only t["tags"] = pghstore.loads(r["tags"]) # END db.hstore-only t["tags"]["osm:id"] = str(r["id"]) for k in add_columns: t[k] = r[k] yield (t) # planet_osm_polygon - relations w = [] for t in ("*", "area", "relation"): if t in db_selects: w.append(db_selects[t]) if len(w): qry = """ select 'r' || cast(-osm_id as text) as id, way as geo, Array['area', 'relation'] as types # START db.has-hstore , tags # END db.has-hstore {sql.columns.way} {add_columns} from planet_osm_polygon where osm_id<0 and {bbox} ( {w} ) """.format( bbox=bbox, w=" or ".join(w), add_columns=add_columns_qry ) qry = qry.replace("__PARENT_BBOX__", replacements["parent_bbox"]) qry = qry.replace("__TYPE_SHORT__", "r") qry = qry.replace("__TYPE_MODIFY__", "-") plan = plpy.prepare(qry, param_type) res = plpy.cursor(plan, param_value) for r in res: t = {"id": r["id"], "geo": r["geo"], "types": r["types"]} # START db.columns.way t["tags"] = { k: r[k] for k in r if k not in ["id", "geo", "types", "tags"] and k not in add_columns if r[k] is not None } # START db.has-hstore t["tags"] = dict(pghstore.loads(r["tags"]).items() | t["tags"].items()) # END db.has-hstore # END db.columns.way # START db.hstore-only t["tags"] = pghstore.loads(r["tags"]) # END db.hstore-only t["tags"]["osm:id"] = str(r["id"]) for k in add_columns: t[k] = r[k] yield (t)
def costAlgo(r): parsedJson = json.loads(r[1]) # This needs to change depending on the stream information. Then we handle that and calculate cost. fromHash = str(parsedJson["from"]) geohash = Geohash.encode(fromHash.get("lat"), fromHash.get("lon"), 9) dest = str(parsedJson["to"]) toHash = Geohash.encode(dest.get("lat"), dest.get("lon"), 8) keyHash = str(parsedJson["key"]) tStamp = str(parsedJson["timestamp"]) value = str(parsedJson["value"]) # Try to connect and insert into our database try: conn = psycopg2.connect( "dbname='DRP' user='******' password='******'") except: print "I am unable to connect to the database." cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) try: result = None while result == None: cur.execute( """SELECT * FROM geohashed_ways WHERE geohash LIKE """ + "'" + geohash + "'%;") result = cur.fetchone() geohash = geohash[:-1] except: print "I can't SELECT." # Compute the cost using the updated data that was streamed and data already existing in our database. tagDict = pghstore.loads(result['tags']) isAccident = False if keyHash == 'TRAFFIC_INCIDENT': cost = value * 10 isAccident = True else: cost = value for k, v in tagDict: if k == 'lanes': if tagDict[k] == 2 and result['oneway'] == 'yes': if isAccident: cost = cost + (value * 10) else: cost = cost + 20 elif tagDict[k] == 2 and result['oneway'] == 'no': if isAccident: cost = cost + (value * 5) else: cost = cost + 15 elif tagDict[k] > 2 and result('oneway') == 'yes': if isAccident: cost = cost + (value * 5) else: cost = cost + 10 elif tagDict[k] > 2 and result('oneway') == 'no': if isAccident: cost = cost + (value * 2) else: cost = cost + 5 elif k == 'highway': if tagDict[k] == 'primary': cost = cost - 20 elif tagDict[k] == 'secondary': cost = cost - 15 elif tagDict[k] == 'trunk': cost = cost - 10 elif tagDict[k] == 'trunk_link': cost = cost - 5 elif k == 'maxspeed': if tagDict[k] >= 50: cost = cost - tagDict[k] else: cost = cost + tagDict[k] try: # Insert the updated cost for the way into our database. cur.execute("""INSERT INTO ways cost VALUES """ + cost + " where way.id=" + result['id'] + ';') except: print "I can't INSERT" cur.close() conn.close()
def test_utf8(self): self.maxDiff = None #self.assertEqual(pghstore.loads('"åäö" => "åäö"'), {"åäö": "åäö"}) s = '"name"=>"Noorwe\xc3\xab", "name2"=>"öäå"' self.assertEqual(pghstore.loads(s), { "name": "Noorwe\xc3\xab", "name2": "öäå" }) names = '"name"=>"Norge/Noreg", "name:af"=>"Noorwe\xc3\xab", "name:ar"=>"\xd8\xa7\xd9\x84\xd9\x86\xd8\xb1\xd9\x88\xd9\x8a\xd8\xac", "name:be"=>"\xd0\x9d\xd0\xb0\xd1\x80\xd0\xb2\xd0\xb5\xd0\xb3\xd1\x96\xd1\x8f", "name:br"=>"Norvegia", "name:ca"=>"Noruega", "name:cs"=>"Norsko", "name:cy"=>"Norwy", "name:da"=>"Norge", "name:de"=>"Norwegen", "name:el"=>"\xce\x9d\xce\xbf\xcf\x81\xce\xb2\xce\xb7\xce\xb3\xce\xaf\xce\xb1", "name:en"=>"Norway", "name:eo"=>"Norvegio", "name:es"=>"Noruega", "name:et"=>"Norra", "name:fa"=>"\xd9\x86\xd8\xb1\xd9\x88\xda\x98", "name:fi"=>"Norja", "name:fo"=>"Noregur", "name:fr"=>"Norv\xc3\xa8ge", "name:fy"=>"Noarwegen", "name:ga"=>"An Iorua", "name:gd"=>"Nirribhidh", "name:he"=>"\xd7\xa0\xd7\x95\xd7\xa8\xd7\x95\xd7\x95\xd7\x92\xd7\x99\xd7\x94", "name:hr"=>"Norve\xc5\xa1ka", "name:hu"=>"Norv\xc3\xa9gia", "name:hy"=>"\xd5\x86\xd5\xb8\xd6\x80\xd5\xbe\xd5\xa5\xd5\xa3\xd5\xab\xd5\xa1", "name:id"=>"Norwegia", "name:is"=>"Noregur", "name:it"=>"Norvegia", "name:ja"=>"\xe3\x83\x8e\xe3\x83\xab\xe3\x82\xa6\xe3\x82\xa7\xe3\x83\xbc", "name:la"=>"Norvegia", "name:lb"=>"Norwegen", "name:li"=>"Noorwege", "name:lt"=>"Norvegija", "name:lv"=>"Norv\xc4\x93\xc4\xa3ija", "name:mn"=>"\xd0\x9d\xd0\xbe\xd1\x80\xd0\xb2\xd0\xb5\xd0\xb3\xd0\xb8", "name:nb"=>"Norge", "name:nl"=>"Noorwegen", "name:nn"=>"Noreg", "name:no"=>"Norge", "name:pl"=>"Norwegia", "name:ru"=>"\xd0\x9d\xd0\xbe\xd1\x80\xd0\xb2\xd0\xb5\xd0\xb3\xd0\xb8\xd1\x8f", "name:sk"=>"N\xc3\xb3rsko", "name:sl"=>"Norve\xc5\xa1ka", "name:sv"=>"Norge", "name:th"=>"\xe0\xb8\x9b\xe0\xb8\xa3\xe0\xb8\xb0\xe0\xb9\x80\xe0\xb8\x97\xe0\xb8\xa8\xe0\xb8\x99\xe0\xb8\xad\xe0\xb8\xa3\xe0\xb9\x8c\xe0\xb9\x80\xe0\xb8\xa7\xe0\xb8\xa2\xe0\xb9\x8c", "name:tr"=>"Norve\xc3\xa7", "name:uk"=>"\xd0\x9d\xd0\xbe\xd1\x80\xd0\xb2\xd0\xb5\xd0\xb3\xd1\x96\xd1\x8f", "name:vi"=>"Na Uy", "name:zh"=>"\xe6\x8c\xaa\xe5\xa8\x81", "name:haw"=>"Nolewai", "name:zh_py"=>"Nuowei", "name:zh_pyt"=>"Nu\xc3\xb3w\xc4\x93i", "official_name"=>"Kongeriket Norge", "official_name:be"=>"\xd0\x9a\xd0\xb0\xd1\x80\xd0\xb0\xd0\xbb\xd0\xb5\xd1\x9e\xd1\x81\xd1\x82\xd0\xb2\xd0\xb0 \xd0\x9d\xd0\xb0\xd1\x80\xd0\xb2\xd0\xb5\xd0\xb3\xd1\x96\xd1\x8f", "official_name:el"=>"\xce\x92\xce\xb1\xcf\x83\xce\xaf\xce\xbb\xce\xb5\xce\xb9\xce\xbf \xcf\x84\xce\xb7\xcf\x82 \xce\x9d\xce\xbf\xcf\x81\xce\xb2\xce\xb7\xce\xb3\xce\xaf\xce\xb1\xcf\x82", "official_name:en"=>"Kingdom of Norway", "official_name:id"=>"Kerajaan Norwegia", "official_name:it"=>"Regno di Norvegia", "official_name:ja"=>"\xe3\x83\x8e\xe3\x83\xab\xe3\x82\xa6\xe3\x82\xa7\xe3\x83\xbc\xe7\x8e\x8b\xe5\x9b\xbd", "official_name:lb"=>"Kinneksr\xc3\xa4ich Norwegen", "official_name:lt"=>"Norvegijos Karalyst\xc4\x97", "official_name:sk"=>"N\xc3\xb3rske kr\xc3\xa1\xc4\xbeovstvo", "official_name:sv"=>"Konungariket Norge", "official_name:vi"=>"V\xc6\xb0\xc6\xa1ng qu\xe1\xbb\x91c Na Uy"' r = pghstore.loads(names) self.assertEqual( pghstore.loads(names), { "name": "Norge/Noreg", "name:af": "Noorwe\xc3\xab", "name:ar": "\xd8\xa7\xd9\x84\xd9\x86\xd8\xb1\xd9\x88\xd9\x8a\xd8\xac", "name:be": "\xd0\x9d\xd0\xb0\xd1\x80\xd0\xb2\xd0\xb5\xd0\xb3\xd1\x96\xd1\x8f", "name:br": "Norvegia", "name:ca": "Noruega", "name:cs": "Norsko", "name:cy": "Norwy", "name:da": "Norge", "name:de": "Norwegen", "name:el": "\xce\x9d\xce\xbf\xcf\x81\xce\xb2\xce\xb7\xce\xb3\xce\xaf\xce\xb1", "name:en": "Norway", "name:eo": "Norvegio", "name:es": "Noruega", "name:et": "Norra", "name:fa": "\xd9\x86\xd8\xb1\xd9\x88\xda\x98", "name:fi": "Norja", "name:fo": "Noregur", "name:fr": "Norv\xc3\xa8ge", "name:fy": "Noarwegen", "name:ga": "An Iorua", "name:gd": "Nirribhidh", "name:he": "\xd7\xa0\xd7\x95\xd7\xa8\xd7\x95\xd7\x95\xd7\x92\xd7\x99\xd7\x94", "name:hr": "Norve\xc5\xa1ka", "name:hu": "Norv\xc3\xa9gia", "name:hy": "\xd5\x86\xd5\xb8\xd6\x80\xd5\xbe\xd5\xa5\xd5\xa3\xd5\xab\xd5\xa1", "name:id": "Norwegia", "name:is": "Noregur", "name:it": "Norvegia", "name:ja": "\xe3\x83\x8e\xe3\x83\xab\xe3\x82\xa6\xe3\x82\xa7\xe3\x83\xbc", "name:la": "Norvegia", "name:lb": "Norwegen", "name:li": "Noorwege", "name:lt": "Norvegija", "name:lv": "Norv\xc4\x93\xc4\xa3ija", "name:mn": "\xd0\x9d\xd0\xbe\xd1\x80\xd0\xb2\xd0\xb5\xd0\xb3\xd0\xb8", "name:nb": "Norge", "name:nl": "Noorwegen", "name:nn": "Noreg", "name:no": "Norge", "name:pl": "Norwegia", "name:ru": "\xd0\x9d\xd0\xbe\xd1\x80\xd0\xb2\xd0\xb5\xd0\xb3\xd0\xb8\xd1\x8f", "name:sk": "N\xc3\xb3rsko", "name:sl": "Norve\xc5\xa1ka", "name:sv": "Norge", "name:th": "\xe0\xb8\x9b\xe0\xb8\xa3\xe0\xb8\xb0\xe0\xb9\x80\xe0\xb8\x97\xe0\xb8\xa8\xe0\xb8\x99\xe0\xb8\xad\xe0\xb8\xa3\xe0\xb9\x8c\xe0\xb9\x80\xe0\xb8\xa7\xe0\xb8\xa2\xe0\xb9\x8c", "name:tr": "Norve\xc3\xa7", "name:uk": "\xd0\x9d\xd0\xbe\xd1\x80\xd0\xb2\xd0\xb5\xd0\xb3\xd1\x96\xd1\x8f", "name:vi": "Na Uy", "name:zh": "\xe6\x8c\xaa\xe5\xa8\x81", "name:haw": "Nolewai", "name:zh_py": "Nuowei", "name:zh_pyt": "Nu\xc3\xb3w\xc4\x93i", "official_name": "Kongeriket Norge", "official_name:be": "\xd0\x9a\xd0\xb0\xd1\x80\xd0\xb0\xd0\xbb\xd0\xb5\xd1\x9e\xd1\x81\xd1\x82\xd0\xb2\xd0\xb0 \xd0\x9d\xd0\xb0\xd1\x80\xd0\xb2\xd0\xb5\xd0\xb3\xd1\x96\xd1\x8f", "official_name:el": "\xce\x92\xce\xb1\xcf\x83\xce\xaf\xce\xbb\xce\xb5\xce\xb9\xce\xbf \xcf\x84\xce\xb7\xcf\x82 \xce\x9d\xce\xbf\xcf\x81\xce\xb2\xce\xb7\xce\xb3\xce\xaf\xce\xb1\xcf\x82", "official_name:en": "Kingdom of Norway", "official_name:id": "Kerajaan Norwegia", "official_name:it": "Regno di Norvegia", "official_name:ja": "\xe3\x83\x8e\xe3\x83\xab\xe3\x82\xa6\xe3\x82\xa7\xe3\x83\xbc\xe7\x8e\x8b\xe5\x9b\xbd", "official_name:lb": "Kinneksr\xc3\xa4ich Norwegen", "official_name:lt": "Norvegijos Karalyst\xc4\x97", "official_name:sk": "N\xc3\xb3rske kr\xc3\xa1\xc4\xbeovstvo", "official_name:sv": "Konungariket Norge", "official_name:vi": "V\xc6\xb0\xc6\xa1ng qu\xe1\xbb\x91c Na Uy" })
def objects_by_id(id_list, options): _id_list = [int(i[1:]) for i in id_list if i[0] == 'n'] plan = plpy.prepare( 'select *, (select name from users where id=user_id) as user from nodes where id=any($1)', ['bigint[]']) res = plpy.cursor(plan, [_id_list]) for r in res: t = { 'id': 'n' + str(r['id']), 'members': [], 'tags': pghstore.loads(r['tags']), 'geo': r['geom'], 'types': ['node', 'point'] } t['tags']['osm:id'] = str(t['id']) t['tags']['osm:version'] = str(r['version']) t['tags']['osm:user_id'] = str(r['user_id']) t['tags']['osm:user'] = r['user'] t['tags']['osm:timestamp'] = str(r['tstamp']) t['tags']['osm:changeset'] = str(r['changeset_id']) yield (t) _id_list = [int(i[1:]) for i in id_list if i[0] == 'w'] plan = plpy.prepare( 'select id, tags, version, user_id, (select name from users where id=user_id) as user, tstamp, changeset_id, linestring as linestring, array_agg(node_id) as member_ids from (select ways.*, node_id from ways left join way_nodes on ways.id=way_nodes.way_id where ways.id=any($1) order by way_nodes.sequence_id) t group by id, tags, version, user_id, tstamp, changeset_id, linestring', ['bigint[]']) res = plpy.cursor(plan, [_id_list]) for r in res: t = { 'id': 'w' + str(r['id']), 'members': [{ 'member_id': 'n' + str(m), 'sequence_id': str(i) } for i, m in enumerate(r['member_ids'])], 'tags': pghstore.loads(r['tags']), 'geo': r['linestring'], 'types': ['way', 'line', 'area'] } t['tags']['osm:id'] = str(t['id']) t['tags']['osm:version'] = str(r['version']) t['tags']['osm:user_id'] = str(r['user_id']) t['tags']['osm:user'] = r['user'] t['tags']['osm:timestamp'] = str(r['tstamp']) t['tags']['osm:changeset'] = str(r['changeset_id']) yield (t) _id_list = [int(i[1:]) for i in id_list if i[0] == 'r'] plan = plpy.prepare( 'select id, tags, version, user_id, (select name from users where id=user_id) as user, tstamp, changeset_id, array_agg(lower(member_type) || member_id) as member_ids, array_agg(member_role) as member_roles from (select relations.*, member_type, member_id, member_role from relations left join relation_members on relations.id=relation_members.relation_id where relations.id=any($1) order by relation_members.sequence_id) t group by id, tags, version, user_id, tstamp, changeset_id', ['bigint[]']) res = plpy.cursor(plan, [_id_list]) for r in res: t = { 'id': 'r' + str(r['id']), 'tags': pghstore.loads(r['tags']), 'members': [{ 'member_id': m[0], 'role': m[1], 'sequence_id': i } for i, m in enumerate(zip(r['member_ids'], r['member_roles']))], 'geo': None, 'types': ['relation'] } t['tags']['osm:id'] = str(t['id']) t['tags']['osm:version'] = str(r['version']) t['tags']['osm:user_id'] = str(r['user_id']) t['tags']['osm:user'] = r['user'] t['tags']['osm:timestamp'] = str(r['tstamp']) t['tags']['osm:changeset'] = str(r['changeset_id']) yield (t)
def costAlgo(r): parsedJson = json.loads(r[1]) # This needs to change depending on the stream information. Then we handle that and calculate cost. fromHash = str(parsedJson["from"]) geohash = Geohash.encode(fromHash.get("lat"),fromHash.get("lon"), 9) dest = str(parsedJson["to"]) toHash = Geohash.encode(dest.get("lat"),dest.get("lon"), 8) keyHash = str(parsedJson["key"]) tStamp = str(parsedJson["timestamp"]) value = str(parsedJson["value"]) # Try to connect and insert into our database try: conn = psycopg2.connect("dbname='DRP' user='******' password='******'") except: print "I am unable to connect to the database." cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) try: result = None while result == None: cur.execute("""SELECT * FROM geohashed_ways WHERE geohash LIKE """ + "'" + geohash + "'%;") result = cur.fetchone() geohash = geohash[:-1] except: print "I can't SELECT." # Compute the cost using the updated data that was streamed and data already existing in our database. tagDict = pghstore.loads(result['tags']) isAccident = False if keyHash == 'TRAFFIC_INCIDENT': cost = value*10 isAccident = True else: cost = value for k,v in tagDict: if k == 'lanes': if tagDict[k] == 2 and result['oneway'] == 'yes': if isAccident: cost=cost+(value*10) else: cost=cost+20 elif tagDict[k] == 2 and result['oneway'] == 'no': if isAccident: cost=cost+(value*5) else: cost=cost+15 elif tagDict[k] > 2 and result('oneway') == 'yes': if isAccident: cost=cost+(value*5) else: cost=cost+10 elif tagDict[k] > 2 and result('oneway') == 'no': if isAccident: cost=cost+(value*2) else: cost=cost+5 elif k == 'highway': if tagDict[k] == 'primary': cost=cost-20 elif tagDict[k] == 'secondary': cost=cost-15 elif tagDict[k] == 'trunk': cost=cost-10 elif tagDict[k] == 'trunk_link': cost=cost-5 elif k == 'maxspeed': if tagDict[k] >= 50: cost = cost-tagDict[k] else: cost = cost+tagDict[k] try: # Insert the updated cost for the way into our database. cur.execute("""INSERT INTO ways cost VALUES """ + cost + " where way.id=" + result['id'] + ';') except: print "I can't INSERT" cur.close() conn.close()
def test_escaped_double_quote(self): self.assertEqual(pghstore.loads('"k\\"ey" => "va\\"lue"'), {"k\\\"ey": "va\\\"lue"})
def test_escaped_double_quote(self): self.assertEqual( pghstore.loads('"k\\"ey" => "va\\"lue"'), {"k\\\"ey": "va\\\"lue"});
def objects_by_id(id_list, options): _id_list = [int(i[1:]) for i in id_list if i[0] == 'n'] plan = plpy.prepare('select * from planet_osm_point where osm_id=any($1)', ['bigint[]']) res = plpy.cursor(plan, [_id_list]) for r in res: t = { 'id': 'n' + str(r['osm_id']), 'members': [], 'geo': r['way'], 'types': ['node', 'point'] } # START db.columns.node t['tags'] = { k: r[k] for k in r if k not in ['id', 'geo', 'types', 'tags', 'way', 'osm_id'] if r[k] is not None } # START db.has-hstore t['tags'] = dict(pghstore.loads(r['tags']).items() | t['tags'].items()) # END db.has-hstore # END db.columns.node # START db.hstore-only t['tags'] = pghstore.loads(r['tags']) # END db.hstore-only t['tags']['osm:id'] = t['id'] yield t _id_list = [int(i[1:]) for i in id_list if i[0] == 'w'] plan = plpy.prepare( "select t.*, planet_osm_ways.nodes from (select osm_id, tags, way, 'line' as _type from planet_osm_line where osm_id=any($1) union select osm_id, tags, way, 'way' as _type from planet_osm_polygon where osm_id=any($1)) t left join planet_osm_ways on t.osm_id=planet_osm_ways.id", ['bigint[]']) res = plpy.cursor(plan, [_id_list]) for r in res: t = { 'id': 'w' + str(r['osm_id']), 'members': [{ 'member_id': 'n' + str(m), 'sequence_id': str(i) } for i, m in enumerate(r['nodes'])], 'geo': r['way'], 'types': ['way', r['_type']] } # START db.columns.way t['tags'] = { k: r[k] for k in r if k not in ['osm_id', 'geo', 'types', 'tags', 'nodes', '_type', 'way'] if r[k] is not None } # START db.has-hstore t['tags'] = dict(pghstore.loads(r['tags']).items() | t['tags'].items()) # END db.has-hstore # END db.columns.way # START db.hstore-only t['tags'] = pghstore.loads(r['tags']) # END db.hstore-only t['tags']['osm:id'] = t['id'] yield t _id_list = [int(i[1:]) for i in id_list if i[0] == 'r'] plan = plpy.prepare( "select id, planet_osm_rels.tags, members, planet_osm_polygon.way from planet_osm_rels left join planet_osm_polygon on -planet_osm_rels.id=planet_osm_polygon.osm_id where id=any($1)", ['bigint[]']) res = plpy.cursor(plan, [_id_list]) for r in res: t = { 'id': 'r' + str(r['id']), 'tags': flatarray_to_tags(r['tags']) if r['tags'] else {}, 'members': flatarray_to_members(r['members']), 'geo': r['way'], 'types': ['relation'] if r['way'] is None else ['relation', 'area'] } t['tags']['osm:id'] = t['id'] yield t
def objects_bbox(_bbox, db_selects, options, add_columns={}, add_param_type=[], add_param_value=[]): import pghstore qry = '' bbox = '' replacements = { 'parent_bbox': '', } if _bbox is not None: bbox = 'way && $1 and ST_Intersects(way, $1) and' replacements[ 'parent_bbox'] = 'way && $1 and ST_Intersects(way, $1) and' if len(add_columns): add_columns_qry = ', ' + ', '.join( [q + ' as "' + k + '"' for k, q in add_columns.items()]) else: add_columns_qry = '' if _bbox: param_type = ['geometry'] + add_param_type param_value = [_bbox] + add_param_value else: param_type = add_param_type param_value = add_param_value # planet_osm_point w = [] for t in ('*', 'node', 'point'): if t in db_selects: w.append(db_selects[t]) if len(w): qry = ''' select 'n' || cast(osm_id as text) as id, way as geo, Array['point', 'node'] as types # START db.has-hstore , tags # END db.has-hstore {sql.columns.node} {add_columns} from planet_osm_point where {bbox} ( {w} ) '''.format(bbox=bbox, w=' or '.join(w), add_columns=add_columns_qry) qry = qry.replace('__PARENT_BBOX__', replacements['parent_bbox']) qry = qry.replace('__TYPE_SHORT__', 'n') qry = qry.replace('__TYPE_MODIFY__', '') plan = plpy.prepare(qry, param_type) res = plpy.cursor(plan, param_value) for r in res: t = { 'id': r['id'], 'geo': r['geo'], 'types': r['types'], } # START db.columns.node t['tags'] = { k: r[k] for k in r if k not in ['id', 'geo', 'types', 'tags'] and k not in add_columns if r[k] is not None } # START db.has-hstore t['tags'] = dict( pghstore.loads(r['tags']).items() | t['tags'].items()) # END db.has-hstore # END db.columns.node # START db.hstore-only t['tags'] = pghstore.loads(r['tags']) # END db.hstore-only t['tags']['osm:id'] = str(r['id']) for k in add_columns: t[k] = r[k] yield (t) # planet_osm_line - ways w = [] for t in ('*', 'line', 'way'): if t in db_selects: w.append(db_selects[t]) if len(w): qry = ''' select 'w' || cast(osm_id as text) as id, way as geo, Array['line', 'way'] as types # START db.has-hstore , tags # END db.has-hstore {sql.columns.way} {add_columns} from planet_osm_line where osm_id>0 and {bbox} ( {w} ) '''.format(bbox=bbox, w=' or '.join(w), add_columns=add_columns_qry) qry = qry.replace('__PARENT_BBOX__', replacements['parent_bbox']) qry = qry.replace('__TYPE_SHORT__', 'w') qry = qry.replace('__TYPE_MODIFY__', '') plan = plpy.prepare(qry, param_type) res = plpy.cursor(plan, param_value) for r in res: t = { 'id': r['id'], 'geo': r['geo'], 'types': r['types'], } # START db.columns.way t['tags'] = { k: r[k] for k in r if k not in ['id', 'geo', 'types', 'tags'] and k not in add_columns if r[k] is not None } # START db.has-hstore t['tags'] = dict( pghstore.loads(r['tags']).items() | t['tags'].items()) # END db.has-hstore # END db.columns.way # START db.hstore-only t['tags'] = pghstore.loads(r['tags']) # END db.hstore-only t['tags']['osm:id'] = str(r['id']) for k in add_columns: t[k] = r[k] yield (t) # planet_osm_line - relations w = [] for t in ('*', 'line', 'relation'): if t in db_selects: w.append(db_selects[t]) if len(w): qry = ''' select 'r' || cast(-osm_id as text) as id, way as geo, Array['line', 'relation'] as types # START db.has-hstore , tags # END db.has-hstore {sql.columns.way} {add_columns} from planet_osm_line where osm_id<0 and {bbox} ( {w} ) '''.format(bbox=bbox, w=' or '.join(w), add_columns=add_columns_qry) qry = qry.replace('__PARENT_BBOX__', replacements['parent_bbox']) qry = qry.replace('__TYPE_SHORT__', 'w') qry = qry.replace('__TYPE_MODIFY__', '') plan = plpy.prepare(qry, param_type) res = plpy.cursor(plan, param_value) for r in res: t = { 'id': r['id'], 'geo': r['geo'], 'types': r['types'], } # START db.columns.way t['tags'] = { k: r[k] for k in r if k not in ['id', 'geo', 'types', 'tags'] and k not in add_columns if r[k] is not None } # START db.has-hstore t['tags'] = dict( pghstore.loads(r['tags']).items() | t['tags'].items()) # END db.has-hstore # END db.columns.way # START db.hstore-only t['tags'] = pghstore.loads(r['tags']) # END db.hstore-only t['tags']['osm:id'] = str(r['id']) for k in add_columns: t[k] = r[k] yield (t) # planet_osm_polygon - ways w = [] for t in ('*', 'area', 'way'): if t in db_selects: w.append(db_selects[t]) if len(w): qry = ''' select 'w' || cast(osm_id as text) as id, way as geo, Array['area', 'way'] as types # START db.has-hstore , tags # END db.has-hstore {sql.columns.way} {add_columns} from planet_osm_polygon where osm_id>0 and {bbox} ( {w} ) '''.format(bbox=bbox, w=' or '.join(w), add_columns=add_columns_qry) qry = qry.replace('__PARENT_BBOX__', replacements['parent_bbox']) qry = qry.replace('__TYPE_SHORT__', 'r') qry = qry.replace('__TYPE_MODIFY__', '-') plan = plpy.prepare(qry, param_type) res = plpy.cursor(plan, param_value) for r in res: t = { 'id': r['id'], 'geo': r['geo'], 'types': r['types'], } # START db.columns.way t['tags'] = { k: r[k] for k in r if k not in ['id', 'geo', 'types', 'tags'] and k not in add_columns if r[k] is not None } # START db.has-hstore t['tags'] = dict( pghstore.loads(r['tags']).items() | t['tags'].items()) # END db.has-hstore # END db.columns.way # START db.hstore-only t['tags'] = pghstore.loads(r['tags']) # END db.hstore-only t['tags']['osm:id'] = str(r['id']) for k in add_columns: t[k] = r[k] yield (t) # planet_osm_polygon - relations w = [] for t in ('*', 'area', 'relation'): if t in db_selects: w.append(db_selects[t]) if len(w): qry = ''' select 'r' || cast(-osm_id as text) as id, way as geo, Array['area', 'relation'] as types # START db.has-hstore , tags # END db.has-hstore {sql.columns.way} {add_columns} from planet_osm_polygon where osm_id<0 and {bbox} ( {w} ) '''.format(bbox=bbox, w=' or '.join(w), add_columns=add_columns_qry) qry = qry.replace('__PARENT_BBOX__', replacements['parent_bbox']) qry = qry.replace('__TYPE_SHORT__', 'r') qry = qry.replace('__TYPE_MODIFY__', '-') plan = plpy.prepare(qry, param_type) res = plpy.cursor(plan, param_value) for r in res: t = { 'id': r['id'], 'geo': r['geo'], 'types': r['types'], } # START db.columns.way t['tags'] = { k: r[k] for k in r if k not in ['id', 'geo', 'types', 'tags'] and k not in add_columns if r[k] is not None } # START db.has-hstore t['tags'] = dict( pghstore.loads(r['tags']).items() | t['tags'].items()) # END db.has-hstore # END db.columns.way # START db.hstore-only t['tags'] = pghstore.loads(r['tags']) # END db.hstore-only t['tags']['osm:id'] = str(r['id']) for k in add_columns: t[k] = r[k] yield (t)
def objects_member_of(objects, other_selects, self_selects, options): if 'relation' in other_selects: plan = plpy.prepare( 'select *, (select name from users where id=user_id) as user from relation_members join relations on relation_members.relation_id=relations.id where member_id=$1 and member_type=$2', ['bigint', 'text']) for ob in objects: member_id = ob['id'] res = plpy.cursor(plan, [int(member_id[1:]), member_id[0:1].upper()]) for r in res: t = { 'id': 'r' + str(r['id']), 'tags': pghstore.loads(r['tags']), 'types': ['relation'], 'geo': None, } link_tags = { 'sequence_id': str(r['sequence_id']), 'role': str(r['member_role']), 'member_id': r['member_type'].lower() + str(r['member_id']), } t['tags']['osm:id'] = str(t['id']) t['tags']['osm:version'] = str(r['version']) t['tags']['osm:user_id'] = str(r['user_id']) t['tags']['osm:user'] = r['user'] t['tags']['osm:timestamp'] = str(r['tstamp']) t['tags']['osm:changeset'] = str(r['changeset_id']) yield ((ob, t, link_tags)) if 'way' in other_selects: plan = plpy.prepare( 'select *, (select name from users where id=user_id) as user from way_nodes join ways on way_nodes.way_id=ways.id where node_id=$1', ['bigint']) for o in objects: member_id = o['id'] if member_id[0] != 'n': continue num_id = int(member_id[1:]) res = plpy.cursor(plan, [num_id]) for r in res: t = { 'id': 'w' + str(r['id']), 'tags': pghstore.loads(r['tags']), 'types': ['way'], 'geo': r['linestring'], } link_tags = { 'member_id': member_id, 'sequence_id': str(r['sequence_id']) } t['tags']['osm:id'] = str(t['id']) t['tags']['osm:version'] = str(r['version']) t['tags']['osm:user_id'] = str(r['user_id']) t['tags']['osm:user'] = r['user'] t['tags']['osm:timestamp'] = str(r['tstamp']) t['tags']['osm:changeset'] = str(r['changeset_id']) yield ((ob, t, link_tags))
def objects(_bbox, where_clauses, add_columns=[], add_param_type=[], add_param_value=[]): import pghstore qry = '' bbox = '' if _bbox is not None: bbox = 'way && $1 and' if len(add_columns): add_columns = ', ' + ', '.join(add_columns) else: add_columns = '' param_type = [ 'geometry' ] + add_param_type param_value = [ _bbox ] + add_param_value # planet_osm_point w = [] for t in ('*', 'node', 'point'): if t in where_clauses: w.append(where_clauses[t]) if len(w): qry = ''' select 'n' || cast(osm_id as text) as id, tags, way as geo, Array['point', 'node'] as types {add_columns} from planet_osm_point where {bbox} ( {w} ) '''.format(bbox=bbox, w=' or '.join(w), add_columns=add_columns) plan = plpy.prepare(qry, param_type ) res = plpy.execute(plan, param_value ) for r in res: r['tags'] = pghstore.loads(r['tags']) yield(r) # planet_osm_line - ways w = [] for t in ('*', 'line', 'way'): if t in where_clauses: w.append(where_clauses[t]) if len(w): qry = ''' select 'w' || cast(osm_id as text) as id, tags, way as geo, Array['line', 'way'] as types {add_columns} from planet_osm_line where osm_id>0 and {bbox} ( {w} ) '''.format(bbox=bbox, w=' or '.join(w), add_columns=add_columns) plan = plpy.prepare(qry, param_type ) res = plpy.execute(plan, param_value ) for r in res: r['tags'] = pghstore.loads(r['tags']) yield(r) # planet_osm_line - relations w = [] for t in ('*', 'line', 'relation'): if t in where_clauses: w.append(where_clauses[t]) if len(w): qry = ''' select 'r' || cast(-osm_id as text) as id, tags, way as geo, Array['line', 'relation'] as types {add_columns} from planet_osm_line where osm_id<0 and {bbox} ( {w} ) '''.format(bbox=bbox, w=' or '.join(w), add_columns=add_columns) plan = plpy.prepare(qry, param_type ) res = plpy.execute(plan, param_value ) for r in res: r['tags'] = pghstore.loads(r['tags']) yield(r) # planet_osm_polygon - ways w = [] for t in ('*', 'area', 'way'): if t in where_clauses: w.append(where_clauses[t]) if len(w): qry = ''' select 'w' || cast(osm_id as text) as id, tags, way as geo, Array['area', 'way'] as types {add_columns} from planet_osm_polygon where osm_id>0 and {bbox} ( {w} ) '''.format(bbox=bbox, w=' or '.join(w), add_columns=add_columns) plan = plpy.prepare(qry, param_type ) res = plpy.execute(plan, param_value ) for r in res: r['tags'] = pghstore.loads(r['tags']) yield(r) # planet_osm_polygon - relations w = [] for t in ('*', 'area', 'relation'): if t in where_clauses: w.append(where_clauses[t]) if len(w): qry = ''' select 'r' || cast(-osm_id as text) as id, tags, way as geo, Array['area', 'relation'] as types {add_columns} from planet_osm_polygon where osm_id<0 and {bbox} ( {w} ) '''.format(bbox=bbox, w=' or '.join(w), add_columns=add_columns) plan = plpy.prepare(qry, param_type ) res = plpy.execute(plan, param_value ) for r in res: r['tags'] = pghstore.loads(r['tags']) yield(r)
def test_empty(self): self.assertEqual(pghstore.loads(''), {})
def test_simple(self): self.assertEqual(pghstore.loads('"key" => "value"'), {"key": "value"}) self.assertEqual( pghstore.loads('"key" => "value", "key2" => "value2"'), {"key": "value", "key2": "value2"});
def parse(self): """ Start parsing the osm file """ # Configuration for OGR gdal.SetConfigOption('OSM_CONFIG_FILE', self._osm_conf) gdal.SetConfigOption('OSM_USE_CUSTOM_INDEXING', 'NO') if not isfile(self.__osmFile): raise GeoAlgorithmExecutionException("File doesn't exist") uri = self.__osmFile + "|layername=" layers = {} # If loadOnly, no parsing required: # It's used only when we ask to open an osm file if self.__loadOnly: file_name = basename(self.__osmFile) for layer in self.__layers: layers[layer] = QgsVectorLayer( uri + layer, file_name + " " + layer, "ogr") if not layers[layer].isValid(): print "Error on the layer", layers[layer].lastError() return layers # Check if the order is node before way,relation # We don't check way before relation, # because we can have only nodes and relations with open(self.__osmFile) as f: for line in f: if re.search(r'node', line): break if re.search(r'(way|relation)', line): raise WrongOrderOSMException # Foreach layers for layer in self.__layers: self.signalText.emit(tr("QuickOSM", u"Parsing layer : " + layer)) layers[layer] = {} # Reading it with a QgsVectorLayer layers[layer]['vectorLayer'] = QgsVectorLayer( uri + layer, "test_" + layer, "ogr") if not layers[layer]['vectorLayer'].isValid(): msg = "Error on the layer : " + \ layers[layer]['vectorLayer'].lastError() raise GeoAlgorithmExecutionException(msg) # Set some default tags layers[layer]['tags'] = ['full_id', 'osm_id', 'osm_type'] # Save the geometry type of the layer layers[layer]['geomType'] = layers[layer]['vectorLayer'].wkbType() # Set a featureCount layers[layer]['featureCount'] = 0 # Get the other_tags fields = layers[layer]['vectorLayer'].pendingFields() field_names = [field.name() for field in fields] other_tags_index = field_names.index('other_tags') features = layers[layer]['vectorLayer'].getFeatures() for i, feature in enumerate(features): layers[layer]['featureCount'] += 1 # Improve the parsing if comma in whitelist, # we skip the parsing of tags, but featureCount is needed if self.__whiteListColumn[layer] == ',': continue # Get the "others_tags" field attributes = feature.attributes()[other_tags_index] if attributes: h_store = pghstore.loads(attributes) for key in h_store: if key not in layers[layer]['tags']: # If the key in OSM is not already in the table if self.__whiteListColumn[layer]: if key in self.__whiteListColumn[layer]: layers[layer]['tags'].append(key) else: layers[layer]['tags'].append(key) percent = int(100 / len(self.__layers) * (i + 1)) self.signalPercentage.emit(percent) # Delete empty layers if this option is set to True if self.__deleteEmptyLayers: delete_layers = [] for keys, values in layers.iteritems(): if values['featureCount'] < 1: delete_layers.append(keys) for layer in delete_layers: del layers[layer] # Creating GeoJSON files for each layers for layer in self.__layers: msg = tr("QuickOSM", u"Creating GeoJSON file : " + layer) self.signalText.emit(msg) self.signalPercentage.emit(0) # Creating the temp file tf = tempfile.NamedTemporaryFile( delete=False, suffix="_" + layer + ".geojson") layers[layer]['geojsonFile'] = tf.name tf.flush() tf.close() # Adding the attribute table fields = QgsFields() for key in layers[layer]['tags']: fields.append(QgsField(key, QVariant.String)) encoding = get_default_encoding() file_writer = QgsVectorFileWriter( layers[layer]['geojsonFile'], encoding, fields, layers[layer]['geomType'], layers[layer]['vectorLayer'].crs(), 'GeoJSON') # Foreach feature in the layer features = layers[layer]['vectorLayer'].getFeatures() for i, feature in enumerate(features): fet = QgsFeature() fet.setGeometry(feature.geometry()) new_attributes = [] attributes = feature.attributes() if layer in ['points', 'lines', 'multilinestrings']: if layer == 'points': osm_type = "node" elif layer == 'lines': osm_type = "way" elif layer == 'multilinestrings': osm_type = 'relation' new_attributes.append( self.DIC_OSM_TYPE[osm_type] + str(attributes[0])) new_attributes.append(attributes[0]) new_attributes.append(osm_type) if attributes[1]: h_store = pghstore.loads(attributes[1]) for tag in layers[layer]['tags'][3:]: if unicode(tag) in h_store: new_attributes.append(h_store[tag]) else: new_attributes.append("") fet.setAttributes(new_attributes) file_writer.addFeature(fet) elif layer == 'multipolygons': if attributes[0]: osm_type = "relation" new_attributes.append( self.DIC_OSM_TYPE[osm_type] + str(attributes[0])) new_attributes.append(str(attributes[0])) else: osm_type = "way" new_attributes.append( self.DIC_OSM_TYPE[osm_type] + str(attributes[1])) new_attributes.append(attributes[1]) new_attributes.append(osm_type) h_store = pghstore.loads(attributes[2]) for tag in layers[layer]['tags'][3:]: if unicode(tag) in h_store: new_attributes.append(h_store[tag]) else: new_attributes.append("") fet.setAttributes(new_attributes) file_writer.addFeature(fet) percentage = int( 100 / layers[layer]['featureCount'] * (i + 1)) self.signalPercentage.emit(percentage) del file_writer return layers
def objects_bbox(_bbox, db_selects, options, add_columns={}, add_param_type=[], add_param_value=[]): import pghstore qry = '' if len(add_columns): add_columns_qry = ', ' + ', '.join( [q + ' as "' + k + '"' for k, q in add_columns.items()]) else: add_columns_qry = '' if _bbox: param_type = ['geometry'] + add_param_type param_value = [_bbox] + add_param_value else: param_type = add_param_type param_value = add_param_value # nodes w = [] for t in ('*', 'node', 'point'): if t in db_selects: w.append(db_selects[t]) if len(w): bbox = '' if _bbox is not None: bbox = 'geom && $1 and ST_Intersects(geom, $1) and' qry = ''' select 'n' || cast(id as text) as id, version, user_id, (select name from users where id=user_id) as user, tstamp, changeset_id, tags, geom as geo, Array['point', 'node'] as types {add_columns} from nodes where {bbox} ( {w} ) '''.format(bbox=bbox, w=' or '.join(w), add_columns=add_columns_qry.replace('__geo__', 'geom')) plan = plpy.prepare(qry, param_type) res = plpy.cursor(plan, param_value) for r in res: r['types'] = list(r['types']) r['tags'] = pghstore.loads(r['tags']) r['tags']['osm:id'] = str(r['id']) r['tags']['osm:version'] = str(r['version']) r['tags']['osm:user_id'] = str(r['user_id']) r['tags']['osm:user'] = r['user'] r['tags']['osm:timestamp'] = str(r['tstamp']) r['tags']['osm:changeset'] = str(r['changeset_id']) yield (r) # ways w = [] for t in ('*', 'line', 'area', 'way'): if t in db_selects: w.append(db_selects[t]) if len(w): bbox = '' if _bbox is not None: bbox = 'linestring && $1 and (ST_NPoints(linestring) = 1 or ST_Intersects(linestring, $1)) and' qry = ''' select * {add_columns} from ( select 'w' || cast(id as text) as id, version, user_id, (select name from users where id=user_id) as user, tstamp, changeset_id, tags, (CASE WHEN ST_NPoints(linestring) >= 4 and ST_IsClosed(linestring) THEN ST_MakePolygon(linestring) ELSE linestring END) as geo, (ST_NPoints(linestring) >= 4) and ST_IsClosed(linestring) as is_closed, Array['line', 'way'] as types ''' # START db.multipolygons # START db.multipolygons-v0.2 # deprecated by osmosis-multipolygon v0.3 qry += ''' , (select array_agg(has_outer_tags) from relation_members join multipolygons on relation_members.relation_id=multipolygons.id where relation_members.member_id=ways.id and relation_members.member_type='W' and relation_members.member_role in ('outer', 'exclave')) part_of_mp_outer ''' # ELSE db.multipolygons-v0.2 qry += ''' , (select array_agg(true) from multipolygons where hide_outer_ways @> Array[ways.id]) part_of_mp_outer ''' # END db.multipolygons-v0.2 # END db.multipolygons qry += ''' from ways where {bbox} ( {w} ) offset 0) t ''' qry = qry.format(bbox=bbox, w=' or '.join(w), add_columns=add_columns_qry.replace('__geo__', 'geo')) plan = plpy.prepare(qry, param_type) res = plpy.cursor(plan, param_value) for r in res: r['types'] = list(r['types']) r['tags'] = pghstore.loads(r['tags']) if r['is_closed']: # START db.multipolygons if not r['part_of_mp_outer'] or True not in r[ 'part_of_mp_outer']: # END db.multipolygons r['types'].append('area') r['tags']['osm:id'] = str(r['id']) r['tags']['osm:version'] = str(r['version']) r['tags']['osm:user_id'] = str(r['user_id']) r['tags']['osm:user'] = r['user'] r['tags']['osm:timestamp'] = str(r['tstamp']) r['tags']['osm:changeset'] = str(r['changeset_id']) yield (r) done_multipolygons = set() # START db.multipolygons # multipolygons w = [] for t in ('*', 'relation', 'area'): if t in db_selects: w.append(db_selects[t]) if len(w): bbox = '' if _bbox is not None: bbox = 'geom && $1 and ST_Intersects(geom, $1) and' qry = ''' select * {add_columns} from ( select (CASE WHEN has_outer_tags THEN 'm' ELSE 'r' END) || cast(id as text) as id, id as rid, version, user_id, (select name from users where id=user_id) as user, tstamp, changeset_id, has_outer_tags, tags, geom as geo, Array['area'] as types from (select multipolygons.*, relations.version, relations.user_id, relations.tstamp, relations.changeset_id from multipolygons left join relations on multipolygons.id = relations.id) t where {bbox} ( {w} ) offset 0) t '''.format(bbox=bbox, w=' or '.join(w), add_columns=add_columns_qry.replace('__geo__', 'geo')) plan = plpy.prepare(qry, param_type) res = plpy.cursor(plan, param_value) for r in res: r['types'] = list(r['types']) r['tags'] = pghstore.loads(r['tags']) r['tags']['osm:id'] = str(r['id']) r['tags']['osm:version'] = str(r['version']) r['tags']['osm:user_id'] = str(r['user_id']) r['tags']['osm:user'] = r['user'] r['tags']['osm:timestamp'] = str(r['tstamp']) r['tags']['osm:changeset'] = str(r['changeset_id']) if r['has_outer_tags']: r['tags']['osm:has_outer_tags'] = 'yes' else: done_multipolygons.add(r['rid']) r['types'].append('relation') yield (r) # END db.multipolygons # relations - (no bbox match!) w = [] for t in ('*', 'relation'): if t in db_selects: w.append(db_selects[t]) if len(w): qry = ''' select * {add_columns} from ( select 'r' || cast(id as text) as id, version, user_id, (select name from users where id=user_id) as user, tstamp, changeset_id, tags, null as geo, Array['relation'] as types from relations where ({w}) and not id = ANY(Array[{done}]::bigint[])) t '''.format(w=' or '.join(w), add_columns=add_columns_qry, done=','.join({str(d) for d in done_multipolygons})) plan = plpy.prepare(qry, param_type) res = plpy.cursor(plan, param_value) for r in res: r['types'] = list(r['types']) r['tags'] = pghstore.loads(r['tags']) r['tags']['osm:id'] = str(r['id']) r['tags']['osm:version'] = str(r['version']) r['tags']['osm:user_id'] = str(r['user_id']) r['tags']['osm:user'] = r['user'] r['tags']['osm:timestamp'] = str(r['tstamp']) r['tags']['osm:changeset'] = str(r['changeset_id']) yield (r)