def export_gtfs(self, path=''): """ exports the data to the path """ with Connection() as conn: self.conn = conn self.set_search_path() cur = self.conn.cursor() sql = '''SET CLIENT_ENCODING TO '{encoding}';''' encoding = 'LATIN9' cur.execute(sql.format(encoding=encoding)) tables = [ 'stops', 'agency', 'stop_times', 'routes', 'trips', 'shapes', 'calendar', 'calendar_dates', 'transfers' ] for table in tables: tn = 'gtfs_{tn}'.format(tn=table) fn = os.path.join(path, '{tn}.txt'.format(tn=table)) with open(fn, 'w') as f: sql = self.conn.copy_sql.format(tn=tn, fn=fn) logger.info(sql) cur.copy_expert(sql, f) sql = '''RESET CLIENT_ENCODING;''' cur.execute(sql)
def export(self): with Connection(login=self.login) as conn: self.conn = conn folder = os.path.abspath( os.path.join(self.folder, 'projekte', self.login.db, 'Pendlerdaten', ) ) os.makedirs(folder, exist_ok=True) file_path = os.path.join(folder, 'Pendlerdaten.xlsx') if os.path.exists(file_path): os.remove(file_path) with pd.ExcelWriter(file_path) as excel_writer: tbl = 'ein_auspendler_zusammengefasst' df = pd.read_sql(f'SELECT * FROM {self.schema}.{tbl}', conn) df.to_excel(excel_writer=excel_writer, sheet_name='zusammengefasst') tbl = 'ein_auspendler' df = pd.read_sql(f'SELECT * FROM {self.schema}.{tbl}', conn) df.to_excel(excel_writer=excel_writer, sheet_name='Rohdaten')
def convert(self): with Connection() as conn: self.conn = conn self.set_search_path() self.conn.commit() pass
def extract(self): self.set_pg_path() exists = self.check_if_database_exists(self.destination_db) if not exists: msg = 'database {} does not exist' logger.info(msg.format(self.destination_db)) return with Connection(login=self.login0) as conn: cursor = conn.cursor() sql = """ SELECT can_be_deleted FROM meta_master.projekte WHERE projektname_kurz = '{}'; """.format(self.destination_db) cursor.execute(sql) row = cursor.fetchone() if row is None: msg = 'database {} not in project table' logger.info(msg.format(self.destination_db)) return if not row.can_be_deleted: msg = 'database {} is protected and cannot be deleted' logger.info(msg.format(self.destination_db)) return self.drop_database(dbname=self.destination_db, conn=conn) sql = """ UPDATE meta_master.projekte SET deleted = True WHERE projektname_kurz = '{}';""" cursor.execute(sql.format(self.destination_db)) conn.commit() msg = 'database {} successfully deleted' logger.info(msg.format(self.destination_db))
def run(self): """ """ with Connection(login=self.login) as conn: # preparation self.conn = conn self.import_pendler() self.conn.commit()
def run(self): """ """ with Connection(login=self.login) as conn: # preparation self.conn = conn self.do_stuff() self.conn.commit()
def user_choices(source_db) -> List[str]: login = create_login() sql = 'SELECT rolname FROM pg_catalog.pg_roles WHERE rolsuper = False;' with Connection(login=login) as conn: cursor = conn.cursor() cursor.execute(sql) rows = cursor.fetchall() return [r.rolname for r in rows if not r.rolname.startswith('pg_')]
def run(self): """ """ with Connection(login=self.login) as conn: # preparation self.conn = conn self.validate_table_exists(self.pendlerspinne_gebiete) self.create_spinne() self.conn.commit()
def build(self): """ Build the network """ self.logger.info(f'Build Network in schema {self.network}') with Connection(login=self.login) as conn: # preparation self.conn = conn self.get_srid() # self.set_session_authorization(self.conn) self.create_schema() self.create_streets_view() # select roads and junctions self.logger.info(f'Create Views') self.create_roads() self.create_junctions() self.conn.commit() # create functions self.create_functions() self.conn.commit() # create links self.fill_link_points_and_create_links() self.create_chunks() self.create_links() self.fill_links() self.conn.commit() # update link attributes self.update_linktypes() self.update_oneway() self.update_lanes() self.create_slope() self.update_speed() self.update_time() self.create_index() self.create_barriers() self.conn.commit() # prepare the search for accessible links self.create_pgrouting_network() self.conn.commit() self.update_egde_table() self.conn.commit() self.create_topology() self.create_edge_reached() self.conn.commit() # search accessible links including links reached by planned roads self.try_startvertices(n=20) self.copy_edge_reached_with_planned() # search links accessible only by existing roads self.update_edge_table_with_construction() self.try_startvertices(n=20) # create the final views self.create_view_accessible_links() self.create_views_roadtypes() self.conn.commit() self.reset_authorization(self.conn)
def run(self): """ """ with Connection(login=self.login) as conn: # preparation self.conn = conn self.set_session_authorization(self.conn) self.do_stuff() self.conn.commit() self.reset_authorization(self.conn)
def copy(self): """ main program """ with Connection(login=self.login) as conn: # preparation self.conn = conn self.set_session_authorization(self.conn) self.create_views() self.conn.commit() self.reset_authorization(self.conn) self.copy2pbf()
def final_stuff(self): # foreign tables seem not to pass the pkeys # so the pkeys are retrieved from the foreign server directly self.logger.info(f'Creating indexes') with Connection(login=self.foreign_login) as conn: for tn, geom in self.tables.items(): self.add_geom_index(tn, geom) pkey = self.get_primary_key(self.schema, tn, conn=conn) if pkey: self.add_pkey(tn, pkey) else: self.add_pkey(tn, 'ags')
def convert(self): with Connection(self.login) as conn: self.conn = conn self.set_search_path() self.create_aggregate_functions() self.create_timetable_tables() self.create_routes_tables() self.create_gtfs_tables() self.truncate_routes_tables() self.truncate_gtfs_tables() self.count() self.delete_invalid_fahrten() self.set_stop_id() self.write_calendar() # self.workaround_sommerzeit() self.test_for_negative_travel_times() self.set_arrival_to_day_before() self.set_departure_to_day_before() self.shift_trips() self.test_for_negative_travel_times() self.set_ein_aus() self.count_multiple_abfahrten() # self.show_multiple_trips() self.save_haltestellen_id() self.delete_multiple_abfahrten() # Haltestellen # self.update_haltestellen_d() self.reset_stop_id() self.conn.commit() self.set_eindeutige_stop_id() self.conn.commit() self.update_stop_id_from_database() self.conn.commit() # self.cleanup_haltestellen() self.update_stop_id_from_similar_routes() self.intersect_kreise(self.tbl_kreise) # GTFS self.fill_gtfs_stops() self.identify_kreis(self.tbl_kreise) self.line_type() self.make_routes() self.make_agencies() self.make_shapes() self.make_stop_times() # self.mark_abfahrten_around_kreis() self.conn.commit()
def check_if_features(self, layer): """ copy layer Parameters ---------- layer : str """ with Connection(self.login) as conn: cur = conn.cursor() sql = ''' SELECT * FROM {schema}.{layer} LIMIT 1; '''.format(schema=self.schema, layer=layer) cur.execute(sql) return cur.rowcount
def extract(self): self.set_pg_path() exists = self.check_if_database_exists(self.destination_db) if not exists: msg = 'database {} does not exist' self.logger.info(msg.format(self.destination_db)) return login = deepcopy(self.login) login.db = 'postgres' with Connection(login=login) as conn: self.drop_database(dbname=self.destination_db, conn=conn) conn.commit() msg = 'database {} successfully deleted' self.logger.info(msg.format(self.destination_db))
def create_views(self): """Create the osm views that should be exported""" with Connection(login=self.login1) as conn: self.conn = conn self.create_dest_schema() self.create_railways() self.create_amenity() self.create_buildings() self.create_leisure() self.create_natural() self.create_waterways() self.create_tourism() self.create_shops() self.conn.commit()
def convert(self): with Connection() as conn: self.conn = conn self.set_search_path() # GTFS self.fill_gtfs_stops() self.identify_kreis() self.line_type() self.make_routes() self.make_agencies() self.make_shapes() self.make_stop_times() # self.mark_abfahrten_around_kreis() self.conn.commit() pass
def update_metatable(self): """update the row in the meta-table of the database""" sql_delete = """ DELETE FROM meta_master.projekte WHERE projektname_kurz = '{name}'; """ sql_insert = """ INSERT INTO meta_master.projekte ( projektname_kurz, projektname_lang, projektnummer, bearbeiter, srid, "left", "right", "top", "bottom", date_areas, date_timetable) VALUES (%(destination_db)s, %(name_long)s, %(project_number)s, %(bearbeiter)s, %(srid)s, %(left)s, %(right)s, %(top)s, %(bottom)s, %(date_areas)s, %(date_timetable)s ); """ op = self.options login = Login(host=op.host, port=op.port, user=op.user, db=op.source_db) with Connection(login) as conn: self.conn = conn cursor = self.conn.cursor() cursor.execute(sql_delete.format(name=op.destination_db)) cursor.execute(sql_insert, op.__dict__) self.conn.commit()
def choose_scripts(self): """ mark selected scripts in the target database """ with Connection(login=self.login) as conn: self.conn = conn sql = ''' UPDATE meta.scripts SET todo = False; ''' self.run_query(sql) sql = ''' UPDATE meta.scripts SET todo = True WHERE scriptcode = %(sc)s ''' cursor = self.conn.cursor() if self.options.scripts is not None: for script in self.options.scripts: cursor.execute(sql, {'sc': script}) self.conn.commit()
def update_script_ids(self): """ Update the script ids from the master scripts """ with Connection(login=self.login) as conn: self.conn = conn sql = ''' UPDATE meta.scripts s SET id = -m.id FROM meta.master_scripts m WHERE s.scriptcode = m.scriptcode AND s.id != m.id; UPDATE meta.scripts s SET id = m.id FROM meta.master_scripts m WHERE s.scriptcode = m.scriptcode AND s.id != m.id; ''' self.run_query(sql) self.conn.commit()
def pg_raster_to_array(self, schema, tablename, grid_folder='%TEMP%'): """ read array from postgis raster table """ virtual_path = '/vsimem/from_postgis' with Connection(self.login1) as conn: cur = conn.cursor() sql = ''' SELECT ST_AsGDALRaster(st_union(rast), 'GTiff') FROM {schema}.{table}; '''.format(schema=schema, table=tablename) cur.execute(sql) # read results gdal.FileFromMemBuffer(virtual_path, bytes(cur.fetchone()[0])) ds = gdal.Open(virtual_path) self.grids = Grids.from_gdal_virtual_tiff(ds, grid_folder, grid_name=tablename, max_rings=10) ds = None gdal.Unlink(virtual_path) return getattr(self.grids, tablename).array
def export_gtfs(self): """ exports the data to the path """ path = os.path.join(self.base_path, self.destination_db, self.subfolder) with Connection(self.login) as conn: self.conn = conn self.set_search_path() cur = self.conn.cursor() sql = '''SET CLIENT_ENCODING TO '{encoding}';''' encoding = 'UTF8' cur.execute(sql.format(encoding=encoding)) tables = ['stops', 'agency', 'stop_times', 'routes', 'trips', 'shapes', 'calendar', 'calendar_dates', 'transfers'] folder = path.replace('~', os.environ['HOME']) self.make_folder(folder) zipfilename = os.path.join(folder, '{}.zip'.format(self.destination_db)) with zipfile.ZipFile(zipfilename, 'w') as z: for table in tables: self.logger.info(f'Exporting table {table} to gtfs') tn = 'gtfs_{tn}'.format(tn=table) tablename = '{tn}.txt'.format(tn=table) fn = os.path.join(folder, tablename) self.logger.info('write {}'.format(fn)) with open(fn, 'w') as f: sql = self.conn.copy_sql.format(tn=tn, fn=fn) self.logger.info(sql) cur.copy_expert(sql, f) z.write(fn, tablename) os.remove(fn) sql = '''RESET CLIENT_ENCODING;''' cur.execute(sql)
def get_foreign_tables(database, schema) -> dict: login = create_foreign_login(database) sql = f""" SELECT * FROM information_schema.tables WHERE table_schema = '{schema}' ORDER BY table_name; """ with Connection(login=login) as conn: cursor = conn.cursor() cursor.execute(sql) rows = cursor.fetchall() tables = {row.table_name: '' for row in rows} sql = f''' SELECT d.description, c.relname FROM pg_catalog.pg_description as d join pg_catalog.pg_class as c on d.objoid = c.oid join pg_catalog.pg_namespace as n on c.relnamespace = n.oid where nspname='{schema}'; ''' cursor.execute(sql) rows = cursor.fetchall() for row in rows: tables[row.relname] = row.description return tables
def scrape(self, destination_table: str, max_distance: int = None): s = destination_table.split('.') schema, table = s if len(s) > 1 else ('public', s[0]) target_table = f"db_fastest_{table}_{self.date.strftime('%Y_%m_%d')}" with Connection(login=self.login) as conn: self.logger.info(f'Creating target table "{target_table}" ' f'in schema "{schema}"') pk = self.get_primary_key(schema, table, conn=conn) sql = f'SELECT pg_typeof({pk}) FROM "{schema}"."{table}" LIMIT 1;' cur = conn.cursor() cur.execute(sql) pk_type = cur.fetchone()[0] sql = f''' DROP TABLE IF EXISTS "{schema}"."{target_table}"; CREATE TABLE "{schema}"."{target_table}" ( "origin_H_ID" bigint NOT NULL, destination_id {pk_type} NOT NULL, "destination_H_ID" bigint NOT NULL, "destination_H_NAME" text, modes text, changes integer, duration integer, departure text, PRIMARY KEY ("origin_H_ID", destination_id), CONSTRAINT "{target_table}_dest_pk" FOREIGN KEY (destination_id) REFERENCES "{schema}"."{table}" ({pk}) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE NO ACTION, CONSTRAINT "{target_table}_orig_pk" FOREIGN KEY ("origin_H_ID") REFERENCES "{self.schema}".haltestellen ("H_ID") MATCH SIMPLE ON UPDATE NO ACTION ON DELETE NO ACTION ); ''' self.run_query(sql, conn=conn) conn.commit() self.logger.info( 'Fetching combinations of origins and destinations' ' within radius') sql = f''' SELECT "H_ID", "H_Name", d.{pk} AS dest_id, ST_X(ST_TRANSFORM(d.geom, 4326)), ST_Y(ST_TRANSFORM(d.geom, 4326)) FROM "{self.schema}".haltestellen h, "{schema}"."{table}" d WHERE ST_DWithin(h.geom, d.geom, 70000); ''' cur.execute(sql) routes = cur.fetchall() closest_stops = {} self.logger.info(f'Scraping {len(routes)} Routes ' 'from Deutsche Bahn') for i, (origin_stop_id, origin_stop_name, dest_id, dest_x, dest_y) in enumerate(routes): closest_stop = closest_stops.get(dest_id) if not closest_stop: cs_res = self.db_query.stops_near((dest_x, dest_y), max_distance=2000) closest_stop = cs_res[0] if len(cs_res) > 0 else -1 closest_stops[dest_id] = closest_stop if closest_stop == -1: continue (duration, departure, changes, modes) = self.db_query.fastest_route(origin_stop_name, closest_stop['name'], self.times, max_retries=5) if duration > 10000000: continue if 'character' in pk_type or 'text' in pk_type: dest_id = f"'{dest_id}'" sql = f''' INSERT INTO "{schema}"."{target_table}" ("origin_H_ID", destination_id, "destination_H_ID", "destination_H_NAME", modes, changes, duration, departure) VALUES ({origin_stop_id}, {dest_id}, {closest_stop['id']}, '{closest_stop['name']}', '{modes}', {changes}, {duration}, '{departure}') ''' cur.execute(sql) if (i + 1) % 100 == 0: self.logger.info(f'{i+1}/{len(routes)} Routes processed') conn.commit()
def create_poly_and_multipolygons(self, schema='osm'): """ Create Polygons and Multipolygons for OSM-Data """ sql_create_simple_polygons = """ -- -- -- -- -- -- -- POLYGONS - -- -- -- -- -- -- -- -- CREATE TABLE WITH POLYGONS MADE OF A SINGLE LINESTRING DROP TABLE IF EXISTS osm.simple_polys CASCADE; CREATE TABLE osm.simple_polys (id bigint PRIMARY KEY, geom geometry(MULTIPOLYGON, {srid})); INSERT INTO osm.simple_polys SELECT w.id, st_multi(ST_MakePolygon(w.linestring)) as geom FROM osm.ways w WHERE st_IsClosed(w.linestring) and st_NPoints(w.linestring) > 3 ; """ sql_create_polygons_with_holes = """ -- -- -- -- -- -- -- POLYGONS WITH HOLES - -- -- -- -- -- -- -- DROP TABLE IF EXISTS osm.polygon_with_holes CASCADE; CREATE TABLE osm.polygon_with_holes ( relation_id bigint PRIMARY KEY, tags hstore, outerring_linestring geometry, outerring_array bigint[], innerring_linestring geometry[], polygon geometry(MULTIPOLYGON, {srid}), poly_type text NOT NULL DEFAULT 'unknown'); -- fill the table with all relations tagged "multipolygon" INSERT INTO osm.polygon_with_holes (relation_id, tags) SELECT r.id, r.tags FROM osm.relations r WHERE r.tags -> 'type' = 'multipolygon' AND r.tags ? 'type'; -- set array of outerrings UPDATE osm.polygon_with_holes r SET outerring_array = a.arr, outerring_linestring = a.geom FROM ( SELECT r.relation_id, array_agg(rm.member_id) as arr, ST_LineMerge(ST_Collect(w.linestring)) AS geom FROM osm.polygon_with_holes r, osm.relation_members rm, osm.ways w WHERE rm.member_role = 'outer' and rm.relation_id = r.relation_id and w.id = rm.member_id and st_NPoints(w.linestring) > 1 and st_IsValid(w.linestring) GROUP BY r.relation_id) a WHERE a.relation_id = r.relation_id; -- set innerrings UPDATE osm.polygon_with_holes r SET innerring_linestring = b.geom_arr FROM ( SELECT a.relation_id, array_agg(geom) AS geom_arr FROM ( SELECT rr.relation_id, (st_dump(ST_LineMerge(ST_Collect(w.linestring)))).geom FROM osm.relation_members rm, osm.ways w, osm.polygon_with_holes rr WHERE rm.member_role = 'inner' and rm.relation_id = rr.relation_id and w.id = rm.member_id GROUP BY rr.relation_id ) a GROUP BY a.relation_id ) b WHERE b.relation_id = r.relation_id; -- a ring with only 3 points is flat: A-B-A (1st point = 3rd point), hence buggy UPDATE osm.polygon_with_holes r SET poly_type= 'no valid outerring' WHERE st_NPoints(r.outerring_linestring) < 4 -- 5 relations are buggy in italy.osm or r.outerring_linestring IS NULL -- about 16000 (relations between simple nodes?) ; -- the above must be done before what follows, because if less than 3 points, test may crash UPDATE osm.polygon_with_holes r SET poly_type= 'no valid outerring' WHERE r.poly_type = 'unknown' and NOT st_IsClosed(r.outerring_linestring); -- 136 are buggy in italy.osm UPDATE osm.polygon_with_holes r SET poly_type= 'no valid outerring' WHERE r.poly_type = 'unknown' and NOT st_IsSimple(r.outerring_linestring); -- 102 more are buggy in italy.osm -- If (NOT poly_type= 'no valid outerring') after the above, -- it means there is a valid outerring. Now, l -- et us see if there is a valid innerring (or several) -- if there is no inner line, there is no valid innerring UPDATE osm.polygon_with_holes r SET poly_type= 'no valid innerring' WHERE r.poly_type = 'unknown' and r.innerring_linestring IS NULL ; -- 3015 more have no valid innerring -- innering must be closed UPDATE osm.polygon_with_holes r SET poly_type= 'no valid innerring' WHERE r.poly_type = 'unknown' and (NOT st_ISClosed(ST_LineMerge(ST_Collect(r.innerring_linestring)))) ; -- 44 more are buggy -- innering must be big enough -- all innerrings must have at least three points UPDATE osm.polygon_with_holes r SET poly_type= 'no valid innerring' FROM ( SELECT r2.relation_id FROM ( SELECT r.relation_id, st_NPoints(unnest(r.innerring_linestring)) n FROM osm.polygon_with_holes r ) r2 GROUP BY r2.relation_id HAVING not bool_and(r2.n > 3)) r3 WHERE r.poly_type = 'unknown' AND r3.relation_id = r.relation_id ; -- check further validity of innerring: closed (multi)linestring? UPDATE osm.polygon_with_holes r SET poly_type= 'valid innerring' WHERE r.poly_type = 'unknown' and (st_ISClosed(ST_LineMerge(ST_Collect(r.innerring_linestring)))) ; --finally create the Polygon UPDATE osm.polygon_with_holes r SET polygon = st_multi(ST_MakePolygon(r.outerring_linestring, (r.innerring_linestring ))) WHERE poly_type= 'valid innerring' and GeometryType(r.outerring_linestring) ='LINESTRING'; """ sql_create_multipolygons = """ -- MULTIPOLYGONS DROP TABLE IF EXISTS osm.multi_polygons; CREATE TABLE osm.multi_polygons (relation_id bigint, path integer, outerring geometry(LINESTRING, {srid}), polygon geometry(POLYGON, {srid})); ALTER TABLE osm.multi_polygons ADD PRIMARY KEY (relation_id, path); -- fill the multipolygon_table INSERT INTO osm.multi_polygons (relation_id, path, outerring) SELECT r.relation_id, (st_dump(r.outerring_linestring)).path[1], (st_dump(r.outerring_linestring)).geom FROM osm.polygon_with_holes r WHERE geometrytype(r.outerring_linestring) = 'MULTILINESTRING'; -- build the outerring polygon UPDATE osm.multi_polygons m SET polygon = st_makepolygon(outerring) WHERE st_isclosed(outerring) AND st_npoints(outerring) > 3; -- create the full polygon UPDATE osm.multi_polygons m SET polygon = st_makepolygon(m.outerring, i2.geom_agg) FROM (SELECT i.relation_id, i.path, array_agg(i.geom) AS geom_agg FROM (SELECT m.relation_id, m.path, m.polygon, -- one row per innerring unnest(r.innerring_linestring) AS geom FROM osm.polygon_with_holes r, osm.multi_polygons m WHERE r.relation_id = m.relation_id ) i -- check which innerring is in which outer polygon for each relation WHERE st_within(i.geom, i.polygon) -- only consider valid innerrings AND st_isclosed(i.geom) AND st_npoints(i.geom) > 3 -- regroup the valid innerrings to a geometry array GROUP BY i.relation_id, i.path) AS i2 WHERE m.relation_id = i2.relation_id AND m.path = i2.path; -- and the Multipolygons UPDATE osm.polygon_with_holes r SET polygon = mm.geom, poly_type = 'multipolygon' FROM (SELECT m.relation_id, st_multi(st_collect(m.polygon)) AS geom FROM osm.multi_polygons m, osm.polygon_with_holes r WHERE m.relation_id = r.relation_id GROUP BY m.relation_id) mm WHERE mm.relation_id = r.relation_id; """ sql_delete_unused_simple_polygons = """ -- the complex polygons that are valid no longer need to be represented with their outerring only -- and not deleting those simple_polys will prevent insertion in the final polygon UNION below CREATE TABLE IF NOT EXISTS osm.ways_in_poly (id bigint PRIMARY KEY); TRUNCATE osm.ways_in_poly; INSERT INTO osm.ways_in_poly SELECT DISTINCT rm.member_id FROM osm.relation_members rm WHERE rm.member_role='outer' AND rm.member_type = 'W' and rm.relation_id IN (SELECT r.relation_id FROM osm.polygon_with_holes r WHERE ST_IsValid(r.polygon)) ; DELETE FROM osm.simple_polys p WHERE p.id IN ( SELECT rm.member_id FROM osm.relation_members rm WHERE rm.member_role='outer' and rm.relation_id IN (SELECT r.relation_id FROM osm.polygon_with_holes r WHERE ST_IsValid(r.polygon) ) ); -- Also clean useless innerrings stored as simple_polys or ways INSERT INTO osm.ways_in_poly SELECT DISTINCT rm.member_id FROM osm.relation_members rm, osm.ways w WHERE rm.member_id = w.id AND rm.member_role='inner' AND rm.member_type = 'W' AND w.tags = '' and rm.relation_id IN (SELECT r.relation_id FROM osm.polygon_with_holes r WHERE ST_IsValid(r.polygon)) AND NOT EXISTS ( SELECT 1 FROM osm.ways_in_poly p WHERE p.id = rm.member_id) ; DELETE FROM osm.simple_polys p WHERE p.id IN ( SELECT DISTINCT rm.member_id FROM osm.relation_members rm WHERE rm.member_role='inner' and rm.relation_id IN (SELECT r.relation_id FROM osm.polygon_with_holes r WHERE ST_IsValid(r.polygon) ) ); INSERT INTO osm.ways_in_poly SELECT s.id FROM osm.simple_polys s WHERE NOT EXISTS( SELECT 1 FROM osm.ways_in_poly p WHERE p.id = s.id); -- Create simple polygon not having (valid) innering(s) UPDATE osm.polygon_with_holes r SET polygon = st_multi(st_MakePolygon(r.outerring_linestring)) WHERE (r.poly_type = 'no valid innerring' OR r.poly_type = 'unknown') and GeometryType((r.outerring_linestring)) ='LINESTRING' ; """ sql_update_tags = """ -- UPDATE tags of the multipolygon with the tags from the polygon UPDATE osm.polygon_with_holes r SET tags = w2.tags || r.tags FROM (SELECT wr.relation_id, hstore_sum(w.tags) AS tags FROM osm.ways w, (SELECT r.relation_id, unnest(r.outerring_array) AS wayid FROM osm.polygon_with_holes r WHERE r.poly_type= 'valid innerring' OR r.poly_type = 'multipolygon' ) wr WHERE w.id = wr.wayid GROUP BY wr.relation_id ) w2 WHERE r.relation_id = w2.relation_id AND w2.tags != '' ; """ sql_create_index = """ CREATE INDEX poly_tags_idx ON osm.polygon_with_holes USING gist(tags); CREATE INDEX poly_geom_idx ON osm.polygon_with_holes USING gist(polygon); CREATE INDEX simple_poly_geom_idx ON osm.simple_polys USING gist(geom); ANALYZE osm.polygon_with_holes; ANALYZE osm.simple_polys; """ sql_create_view = """ CREATE OR REPLACE VIEW osm.polygons AS SELECT p.id, p.geom, w.tags FROM osm.simple_polys p, osm.ways w WHERE p.id = w.id UNION ALL SELECT m.relation_id AS id, m.polygon AS geom, m.tags FROM osm.polygon_with_holes m WHERE m.poly_type != 'no valid outerring'; DROP VIEW IF EXISTS osm.lines CASCADE; CREATE OR REPLACE VIEW osm.lines AS SELECT w.id, w.linestring::geometry(LINESTRING, {srid}) as geom, w.tags FROM osm.ways w WHERE NOT EXISTS (SELECT 1 FROM osm.ways_in_poly wp WHERE wp.id = w.id); """ with Connection(login=self.login1) as conn: self.conn = conn self.run_query( sql_create_simple_polygons.format(srid=self.target_srid)) self.run_query( sql_create_polygons_with_holes.format(srid=self.target_srid)) self.run_query( sql_create_multipolygons.format(srid=self.target_srid)) self.run_query(sql_delete_unused_simple_polygons) self.run_query(sql_update_tags) self.run_query(sql_create_index) self.run_query(sql_create_view.format(srid=self.target_srid)) self.conn.commit()
def run_scripts(self): """ go through the scripts table and run the scripts to be done """ sql = """ SELECT id, scriptcode, scriptname, parameter FROM meta.script_view WHERE todo ORDER BY id """ started_sql = """ UPDATE meta.scripts SET started = True, success=NULL, starttime = %(time)s, endtime=NULL WHERE scriptcode = %(sc)s; """ finished_sql = """ UPDATE meta.scripts SET success = True, started=False, endtime = %(time)s, todo = False WHERE scriptcode = %(sc)s; """ error_sql = """ UPDATE meta.scripts SET success = False, endtime = %(time)s WHERE scriptcode = %(sc)s; """ msg_start = ''' run script {name} with parameters {params} at {time}: {command}''' msg_end = ''' script {name} finished at {time} with returncode {ret}''' cmd = '. ~/.profile; {scriptname} {params}' with Connection(login=self.login) as conn: self.conn = conn cursor = self.conn.cursor() cursor.execute(sql) rows = cursor.fetchall() for row in rows: starttime = datetime.now() if row.parameter: params = row.parameter.format(**self.options.__dict__) else: params = '' command = cmd.format(scriptname=row.scriptname, params=params) self.logger.info(msg_start.format(name=row.scriptname, params=row.parameter, time=starttime, command=command)) self.run_query(started_sql, values={'sc': row.scriptcode, 'time': starttime}) self.conn.commit() pipe = subprocess.Popen(command, shell=self.SHELL) ret = pipe.wait() endtime = datetime.now() if ret: self.run_query(error_sql, values={'sc': row.scriptcode, 'time': endtime}) msg = '{script} returned ErrorCode {code}' self.conn.commit() raise ScriptError(msg.format(script=command, code=ret)) self.run_query(finished_sql, values={'sc': row.scriptcode, 'time': endtime}) self.conn.commit() self.logger.info(msg_end.format(name=row.scriptname, time=endtime, ret=ret))