def get_data_from_pg(data_type,insee_com,cadastre_com,local=False,suffixe_data=None): # print(data_type,insee_com,cadastre_com,suffixe_data) cache_file = get_cache_filename(data_type,insee_com,cadastre_com) # print(os.path.abspath(__file__)) if not use_cache or not os.path.exists(cache_file) or (time.time() - os.path.getmtime(cache_file)) > 86400 : fq = open(os.path.join(os.path.dirname(os.path.abspath(__file__)),'sql/{:s}.sql'.format(data_type)),'rb') str_query = fq.read().replace('__com__',insee_com) fq.close() if local: pgc = get_pgc() else: pgc = get_pgc_layers() if suffixe_data: str_query = str_query.replace('__suffixe_data__',suffixe_data) cur = pgc.cursor() cur.execute(str_query) f = open(cache_file,'w+') for lt in cur: l = list(lt) f.write(str(l)+'\n') cur.close() f.seek(0) else : f = open(cache_file,'r') res = [] for l in f: res.append(eval(l)) f.close() return res
def main(args): debut_total = time.time() usage = 'USAGE : python place_2_db.py <code INSEE>' if len(args) < 2: print(usage) os._exit(0) global pgc,dicts,source,code_insee pgc = get_pgc() source = 'CADASTRE' code_insee = args[1] code_cadastre = get_code_cadastre_from_insee(code_insee) code_dept = get_cadastre_code_dept_from_insee(code_insee) batch_id = batch_start_log(source,'cumulPlaces',code_cadastre) global nodes,ways nodes = Nodes() ways = Ways() data = get_data_from_pg('cadastre_2_places',code_insee) for d in data: d[3] = format_toponyme(d[3]) nb_rec = load_to_db(data,code_insee,source,code_cadastre,code_dept) batch_end_log(nb_rec,batch_id)
def main(args): debut_total = time.time() usage = 'USAGE : python hameaux_depuis_noms_de_voies.py <code INSEE>' if len(args) != 2: print(usage) os._exit(0) global pgc,dicts,adresses,source,code_insee pgc = get_pgc() source = 'CADASTRE' adresses = Adresses() code_insee = args[1] code_cadastre = get_code_cadastre_from_insee(code_insee) code_dept = get_cadastre_code_dept_from_insee(code_insee) batch_id = batch_start_log(source,'detecteHameaux',code_cadastre) fnadresses = os.path.join('/data/work/cadastre.openstreetmap.fr/bano_cache',code_dept,code_cadastre,code_cadastre+'-adresses.osm') # fnadresses = os.path.join('C:\\Users\\vincent\\Documents\\GitHub',code_cadastre+'-adresses.osm') load_hsnr_from_cad_file(fnadresses) freq = name_frequency() sel = select_street_names_by_name(freq) adds = collect_adresses_points(sel) nb_rec = load_hameaux_2_db(adds) batch_end_log(nb_rec,batch_id)
def main(args): debut_total = time.time() usage = "USAGE : python suffixe_depuis_noms_de_voies.py <code INSEE>" if len(args) != 2: print(usage) os._exit(0) global pgc, dicts, adresses, source, code_insee pgc = get_pgc() source = "CADASTRE" adresses = Adresses() code_insee = args[1] code_cadastre = get_code_cadastre_from_insee(code_insee) code_dept = get_cadastre_code_dept_from_insee(code_insee) batch_id = batch_start_log(source, "detectesuffixe", code_cadastre) fnadresses = os.path.join( "/data/work/cadastre.openstreetmap.fr/bano_cache", code_dept, code_cadastre, code_cadastre + "-adresses.osm" ) load_hsnr_from_cad_file(fnadresses) freq = name_frequency() sel = select_street_names_by_name(freq) adds = collect_adresses_points(sel) nb_rec = load_suffixe_2_db(adds) batch_end_log(nb_rec, batch_id)
def main(args): debut_total = time.time() usage = 'USAGE : python suffixe_depuis_noms_de_voies.py <code INSEE>' if len(args) != 2: print(usage) os._exit(0) global pgc, dicts, adresses, source, code_insee pgc = get_pgc() source = 'CADASTRE' adresses = Adresses() code_insee = args[1] code_cadastre = get_code_cadastre_from_insee(code_insee) code_dept = get_cadastre_code_dept_from_insee(code_insee) batch_id = batch_start_log(source, 'detectesuffixe', code_cadastre) fnadresses = os.path.join( '/data/work/cadastre.openstreetmap.fr/bano_cache', code_dept, code_cadastre, code_cadastre + '-adresses.osm') load_hsnr_from_cad_file(fnadresses) freq = name_frequency() sel = select_street_names_by_name(freq) adds = collect_adresses_points(sel) nb_rec = load_suffixe_2_db(adds) batch_end_log(nb_rec, batch_id)
def batch_start_log(source, etape, code_cadastre): t = time.localtime() th = time.strftime('%d-%m-%Y %H:%M:%S', t) t = round(time.mktime(t), 0) pgc = get_pgc() cur = pgc.cursor() if len(etape) > 10 and etape[0:10] == 'cache_dept': whereclause = 'dept = \'{:s}\' AND etape = \'{:s}\''.format( code_cadastre, etape) else: whereclause = 'cadastre_com = \'{:s}\' AND source = \'{:s}\' AND etape = \'{:s}\''.format( code_cadastre, source, etape) str_query = 'INSERT INTO batch_historique (SELECT * FROM batch WHERE {:s});'.format( whereclause) str_query = str_query + 'DELETE FROM batch WHERE {:s};'.format(whereclause) if len(etape) > 10 and etape[0:10] == 'cache_dept': str_query = str_query + 'INSERT INTO batch (etape,timestamp_debut,date_debut,dept,nombre_adresses) SELECT \'{:s}\',{:f},\'{:s}\',\'{:s}\',0;'.format( etape, t, th, code_cadastre) else: str_query = str_query + 'INSERT INTO batch (source,etape,timestamp_debut,date_debut,dept,cadastre_com,nom_com,nombre_adresses) SELECT \'{:s}\',\'{:s}\',{:f},\'{:s}\',dept,cadastre_com,nom_com,0 FROM code_cadastre WHERE cadastre_com = \'{:s}\';'.format( source, etape, t, th, code_cadastre) str_query = str_query + 'COMMIT;' # print(str_query) cur.execute(str_query) str_query = 'SELECT id_batch::integer FROM batch WHERE {:s};'.format( whereclause) cur.execute(str_query) c = cur.fetchone() return c[0]
def main(args): debut_total = time.time() usage = 'USAGE : python parcelles_par_noms.py <code INSEE>' if len(args) != 2: print(usage) os._exit(0) global pgc,dicts,source,code_insee pgc = get_pgc() source = 'CADASTRE' code_insee = args[1] # code_cadastre = get_code_cadastre_from_insee(code_insee) code_cadastre = '61086' #KL326' #'W1172' code_cadastre = 'KL326' #'W1172' code_cadastre = 'W1172' code_cadastre = 'W1280' # code_dept = get_cadastre_code_dept_from_insee(code_insee) code_dept = '007' code_dept = '050' code_dept = '085' # batch_id = batch_start_log(source,'agregParcellesNommees',code_cadastre) global nodes,ways nodes = Nodes() ways = Ways() # fnparcelles = os.path.join('/data/work/cadastre.openstreetmap.fr/bano_cache',code_dept,code_cadastre,code_cadastre+'-parcelles.osm') fnparcelles = os.path.join('C:\\Users\\vincent\\Documents\\GitHub',code_cadastre+'-parcelles.osm') executeSQL_INSEE('sql/create_table_parcelles_insee.sql',code_insee) load_parcelles(fnparcelles) executeSQL_INSEE('sql/agrege_parcelles_par_nom_insee.sql',code_insee)
def get_data_from_pg(data_type, insee_com, cadastre_com, local=False, suffixe_data=None): # print(data_type,insee_com,cadastre_com,suffixe_data) cache_file = get_cache_filename(data_type, insee_com, cadastre_com) # print(cache_file) if not os.path.exists(cache_file) or (time.time() - os.path.getmtime(cache_file)) > 86400: fq = open("sql/{:s}.sql".format(data_type), "rb") str_query = fq.read().replace("__com__", insee_com) fq.close() if local: pgc = get_pgc() else: pgc = get_pgc_layers() if suffixe_data: str_query = str_query.replace("__suffixe_data__", suffixe_data) cur = pgc.cursor() cur.execute(str_query) f = open(cache_file, "w+") for lt in cur: l = list(lt) f.write(str(l) + "\n") cur.close() f.seek(0) else: f = open(cache_file, "r") res = [] for l in f: res.append(eval(l)) f.close() return res
def main(args): debut_total = time.time() usage = 'USAGE : python place_2_db.py <code INSEE>' if len(args) < 2: print(usage) os._exit(0) global pgc, dicts, source, code_insee pgc = get_pgc() source = 'CADASTRE' code_insee = args[1] code_cadastre = get_code_cadastre_from_insee(code_insee) code_dept = get_cadastre_code_dept_from_insee(code_insee) batch_id = batch_start_log(source, 'cumulPlaces', code_cadastre) global nodes, ways nodes = Nodes() ways = Ways() data = get_data_from_pg('cadastre_2_places', code_insee) for d in data: d[3] = format_toponyme(d[3]) nb_rec = load_to_db(data, code_insee, source, code_cadastre, code_dept) batch_end_log(nb_rec, batch_id)
def get_code_insee_from_cadastre(cad): str_query = "SELECT insee_com FROM code_cadastre WHERE cadastre_com = '{:s}';".format(cad) pgc = get_pgc() cur = pgc.cursor() cur.execute(str_query) for c in cur: code_insee = c[0] return code_insee
def get_cadastre_etape_timestamp_debut(code_cadastre,etape,source): str_query = "SELECT timestamp_debut FROM batch WHERE cadastre_com = '{:s}' AND etape = '{:s}' AND source = '{:s}';".format(code_cadastre,etape,source) pgc = get_pgc() cur = pgc.cursor() cur.execute(str_query) for c in cur: code_cadastre = c[0] return code_cadastre
def get_cadastre_format(insee): str_query = 'SELECT format_cadastre FROM code_cadastre WHERE insee_com = \'{:s}\';'.format(insee) pgc = get_pgc() cur = pgc.cursor() cur.execute(str_query) for c in cur: code_cadastre = c[0] return code_cadastre
def get_data(data_type): fq = open(os.path.join(os.path.dirname(os.path.abspath(__file__)),'sql/{:s}.sql'.format(data_type)),'rb') str_query = fq.read() fq.close() pgc = get_pgc() cur = pgc.cursor() cur.execute(str_query) r = cur.fetchall() return r
def batch_end_log(nb,batch_id): pgc = get_pgc() cur = pgc.cursor() t = time.localtime() th = time.strftime('%d-%m-%Y %H:%M:%S',t) whereclause = 'id_batch = {:d}'.format(batch_id) str_query = 'UPDATE batch SET nombre_adresses = {:d},date_fin = \'{:s}\' WHERE {:s};COMMIT;'.format(nb,th,whereclause) # print(str_query) cur.execute(str_query)
def age_etape_dept(etape,dept): pgc = get_pgc() cur = pgc.cursor() t = time.localtime() t = round(time.mktime(t),0) str_query = 'SELECT timestamp_debut FROM batch WHERE etape = \'{:s}\' AND dept = \'{:s}\' UNION ALL SELECT 0 ORDER BY 1 DESC;'.format(etape,dept) cur.execute(str_query) c = cur.fetchone() return t - c[0]
def main(): print("Content-Type: application/json") print("") pgc = get_pgc() params = cgi.FieldStorage() insee_com = params['insee'].value cadastre_com = get_code_cadastre_from_insee(insee_com) dept = get_code_dept_from_insee(insee_com) labels_statuts_fantoir = get_data_from_pg(pgc, 'labels_statuts_fantoir', '') infos_commune = get_data_from_pg(pgc, 'infos_commune_insee', insee_com) if infos_commune: nom_commune = infos_commune[0][0] lon_commune = infos_commune[0][1] lat_commune = infos_commune[0][2] else: nom_commune = [] lon_commune = None lat_commune = None date_import_cadastre = '' date_fin_cumul = ['',''] if cadastre_com: fin_etape = get_fin_etape(pgc, 'recupCadastre', cadastre_com) if fin_etape: date_import_cadastre = fin_etape fin_etape = get_fin_etape(pgc, 'loadCumul', cadastre_com) if len(fin_etape) == 1: date_fin_cumul = [[], fin_etape[0]] else: date_fin_cumul = fin_etape date_cache_hsnr = get_fin_etape_dept(pgc, 'cache_dept_hsnr_insee', dept)[0] date_cache_highway = get_fin_etape_dept(pgc, 'cache_dept_highway_insee', dept)[0] date_cache_highway_relation = get_fin_etape_dept(pgc, 'cache_dept_highway_relation_insee', dept)[0] data = [ [ nom_commune, date_import_cadastre, date_fin_cumul[0], date_fin_cumul[1], date_cache_hsnr, date_cache_highway, date_cache_highway_relation, lon_commune, lat_commune, labels_statuts_fantoir ], get_data_from_pg(pgc, 'voies_adresses_non_rapprochees_insee', insee_com), get_data_from_pg(pgc, 'voies_adresses_rapprochees_insee', insee_com), get_data_from_pg(pgc, 'voies_seules_non_rapprochees_insee', insee_com), get_data_from_pg(pgc, 'voies_seules_rapprochees_insee', insee_com) ] a = json.JSONEncoder().encode(data) print(a)
def get_code_cadastre_from_insee(insee): str_query = "SELECT cadastre_com FROM code_cadastre WHERE insee_com = '{:s}';".format(insee) code_cadastre = [] pgc = get_pgc() cur = pgc.cursor() cur.execute(str_query) for c in cur: code_cadastre = c[0] return code_cadastre
def get_code_cadastre_from_insee(insee): str_query = 'SELECT cadastre_com FROM code_cadastre WHERE insee_com = \'{:s}\';'.format( insee) pgc = get_pgc() cur = pgc.cursor() cur.execute(str_query) for c in cur: code_cadastre = c[0] return code_cadastre
def get_cadastre_etape_timestamp_debut(code_cadastre, etape, source): str_query = "SELECT timestamp_debut FROM batch WHERE cadastre_com = '{:s}' AND etape = '{:s}' AND source = '{:s}';".format( code_cadastre, etape, source) pgc = get_pgc() cur = pgc.cursor() cur.execute(str_query) for c in cur: code_cadastre = c[0] return code_cadastre
def main(args): global source, batch_id global pgc, pgcl global code_insee, code_cadastre, code_dept global dicts global nodes, ways, adresses global commune_avec_suffixe global geom_suffixe debut_total = time.time() usage = "USAGE : python addr_cad_2_db.py <code INSEE> <OSM|CADASTRE>" if len(args) != 3: print(usage) os._exit(0) source = args[2].upper() if source not in ["OSM", "CADASTRE"]: print(usage) os._exit(0) adresses = Adresses() pgc = get_pgc() pgcl = get_pgc_layers() code_insee = args[1] code_cadastre = get_code_cadastre_from_insee(code_insee) code_dept = get_cadastre_code_dept_from_insee(code_insee) batch_id = batch_start_log(source, "loadCumul", code_cadastre) dicts = Dicts() dicts.load_all(code_insee) commune_avec_suffixe = has_addreses_with_suffix(code_insee) geom_suffixe = None if commune_avec_suffixe: geom_suffixe = get_geom_suffixes(code_insee, code_cadastre) if source == "CADASTRE": fnadresses = os.path.join( "/data/work/cadastre.openstreetmap.fr/bano_cache", code_dept, code_cadastre, code_cadastre + "-adresses.osm" ) load_hsnr_from_cad_file(fnadresses, source) if source == "OSM": load_hsnr_from_pg_osm(code_insee, code_cadastre) load_highways_from_pg_osm(code_insee, code_cadastre) load_highways_relations_from_pg_osm(code_insee, code_cadastre) add_fantoir_to_hsnr() load_point_par_rue_from_pg_osm(code_insee, code_cadastre) nb_rec = load_to_db(adresses, code_insee, source, code_cadastre, code_dept) batch_end_log(nb_rec, batch_id) fin_total = time.time() print("Execution en " + str(int(fin_total - debut_total)) + " s.")
def main(args): global source, batch_id global pgc, pgcl global code_insee, code_cadastre, code_dept global dicts global nodes, ways, adresses global commune_avec_suffixe global geom_suffixe debut_total = time.time() usage = 'USAGE : python addr_cad_2_db.py <code INSEE> <OSM|CADASTRE>' if len(args) != 3: print(usage) os._exit(0) source = args[2].upper() if source not in ['OSM', 'CADASTRE']: print(usage) os._exit(0) adresses = Adresses() pgc = get_pgc() pgcl = get_pgc_layers() code_insee = args[1] code_cadastre = get_code_cadastre_from_insee(code_insee) code_dept = get_cadastre_code_dept_from_insee(code_insee) batch_id = batch_start_log(source, 'loadCumul', code_cadastre) dicts = Dicts() dicts.load_all(code_insee) commune_avec_suffixe = has_addreses_with_suffix(code_insee) geom_suffixe = None if commune_avec_suffixe: geom_suffixe = get_geom_suffixes(code_insee, code_cadastre) if source == 'CADASTRE': fnadresses = os.path.join( '/data/work/cadastre.openstreetmap.fr/bano_cache', code_dept, code_cadastre, code_cadastre + '-adresses.osm') load_hsnr_from_cad_file(fnadresses, source) if source == 'OSM': load_hsnr_from_pg_osm(code_insee, code_cadastre) load_highways_from_pg_osm(code_insee, code_cadastre) load_highways_relations_from_pg_osm(code_insee, code_cadastre) add_fantoir_to_hsnr() load_point_par_rue_from_pg_osm(code_insee, code_cadastre) nb_rec = load_to_db(adresses, code_insee, source, code_cadastre, code_dept) batch_end_log(nb_rec, batch_id) fin_total = time.time() print('Execution en ' + str(int(fin_total - debut_total)) + ' s.')
def load_highway_types(self): str_query = ''' SELECT tag_index, tag_value FROM type_voie ORDER BY tag_index;''' pgc = get_pgc() cur_hw = pgc.cursor() cur_hw.execute(str_query) for c in cur_hw: self.highway_types[c[1]] = c[0]
def get_data(data_type,dept): fq = open(os.path.join(os.path.dirname(os.path.abspath(__file__)),'sql/{:s}.sql'.format(data_type)),'rb') str_query = fq.read().replace('__dept__',dept) fq.close() pgc = get_pgc() cur = pgc.cursor() try: cur.execute(str_query) except psycopg2.Error, e: return (e.diag.severity)
def get_data(data_type): fq = open( os.path.join(os.path.dirname(os.path.abspath(__file__)), 'sql/{:s}.sql'.format(data_type)), 'rb') str_query = fq.read() fq.close() pgc = get_pgc() cur = pgc.cursor() cur.execute(str_query) r = cur.fetchall() return r
def get_data(data_type, dept): fq = open( os.path.join(os.path.dirname(os.path.abspath(__file__)), 'sql/{:s}.sql'.format(data_type)), 'rb') str_query = fq.read().replace('__dept__', dept) fq.close() pgc = get_pgc() cur = pgc.cursor() try: cur.execute(str_query) except psycopg2.Error, e: return (e.diag.severity)
def load_fantoir(insee): pgc = get_pgc() str_query = "SELECT code_insee||id_voie||cle_rivoli,\ TRIM(BOTH FROM nature_voie||' '||libelle_voie),\ ld_bati\ FROM fantoir_voie\ WHERE code_insee = '{:s}' AND\ type_voie = '3' AND\ COALESCE(caractere_annul,'') = '';".format(insee) cur_fantoir = pgc.cursor() cur_fantoir.execute(str_query) for c in cur_fantoir: p = Place(0,0,'',c[1],'','','',c[0],c[2]) places.add_place(p)
def load_fantoir(self, insee): str_query = ''' SELECT code_insee||id_voie||cle_rivoli, nature_voie||' '||libelle_voie FROM fantoir_voie WHERE code_insee = \'''' + insee + '''\' AND caractere_annul NOT IN ('O','Q');''' pgc = get_pgc() cur_fantoir = pgc.cursor() cur_fantoir.execute(str_query) for c in cur_fantoir: self.code_fantoir_vers_nom_fantoir[c[0]] = c[1] cle = ' '.join(c[1].replace('-', ' ').split()) cle = normalize(cle) self.fantoir[cle] = c[0]
def load_fantoir(insee): pgc = get_pgc() str_query = "SELECT code_insee||id_voie||cle_rivoli,\ TRIM(BOTH FROM nature_voie||' '||libelle_voie),\ ld_bati\ FROM fantoir_voie\ WHERE code_insee = '{:s}' AND\ type_voie = '3' AND\ COALESCE(caractere_annul,'') = '';".format(insee) cur_fantoir = pgc.cursor() cur_fantoir.execute(str_query) for c in cur_fantoir: p = Place(0, 0, '', c[1], '', '', '', c[0], c[2]) places.add_place(p)
def load_fantoir(self,insee): str_query = ''' SELECT * FROM (SELECT code_insee||id_voie||cle_rivoli, nature_voie||' '||libelle_voie, rank() OVER(PARTITION BY nature_voie||' '||libelle_voie ORDER BY type_voie,id_voie,cle_rivoli) rang FROM fantoir_voie WHERE code_insee = \''''+insee+'''\' AND caractere_annul NOT IN ('O','Q')) a WHERE rang = 1;''' pgc = get_pgc() cur_fantoir = pgc.cursor() cur_fantoir.execute(str_query) for c in cur_fantoir: self.code_fantoir_vers_nom_fantoir[c[0]] = c[1] cle = ' '.join(c[1].replace('-',' ').split()) cle = normalize(cle) self.fantoir[cle] = c[0]
def load_fantoir(self, insee): str_query = ("""SELECT * FROM (SELECT code_insee||id_voie||cle_rivoli, nature_voie||' '||libelle_voie, rank() OVER(PARTITION BY nature_voie||' '||libelle_voie ORDER BY type_voie,id_voie,cle_rivoli) rang FROM fantoir_voie WHERE code_insee = '%s' AND caractere_annul NOT IN ('O','Q')) a WHERE rang = 1;""" % insee) pgc = get_pgc() cur_fantoir = pgc.cursor() cur_fantoir.execute(str_query) for c in cur_fantoir: self.code_fantoir_vers_nom_fantoir[c[0]] = c[1] cle = ' '.join(c[1].replace('-', ' ').split()) cle = normalize(cle) self.fantoir[cle] = c[0]
def load_fantoir(self, insee): str_query = ( """ SELECT code_insee||id_voie||cle_rivoli, nature_voie||' '||libelle_voie FROM fantoir_voie WHERE code_insee = \'""" + insee + """\' AND caractere_annul NOT IN ('O','Q');""" ) pgc = get_pgc() cur_fantoir = pgc.cursor() cur_fantoir.execute(str_query) for c in cur_fantoir: self.code_fantoir_vers_nom_fantoir[c[0]] = c[1] cle = " ".join(c[1].replace("-", " ").split()) cle = normalize(cle) self.fantoir[cle] = c[0]
def batch_start_log(source,etape,code_cadastre): t = time.localtime() th = time.strftime('%d-%m-%Y %H:%M:%S',t) t = round(time.mktime(t),0) pgc = get_pgc() cur = pgc.cursor() whereclause = 'cadastre_com = \'{:s}\' AND source = \'{:s}\' AND etape = \'{:s}\''.format(code_cadastre,source,etape) str_query = 'INSERT INTO batch_historique (SELECT * FROM batch WHERE {:s});'.format(whereclause) str_query = str_query+'DELETE FROM batch WHERE {:s};'.format(whereclause) str_query = str_query+'INSERT INTO batch (source,etape,timestamp_debut,date_debut,dept,cadastre_com,nom_com,nombre_adresses) SELECT \'{:s}\',\'{:s}\',{:f},\'{:s}\',dept,cadastre_com,nom_com,0 FROM code_cadastre WHERE cadastre_com = \'{:s}\';'.format(source,etape,t,th,code_cadastre) str_query = str_query+'COMMIT;' #print(str_query) cur.execute(str_query) # print(str_query) str_query = 'SELECT id_batch::integer FROM batch WHERE {:s};'.format(whereclause) cur.execute(str_query) c = cur.fetchone() return c[0]
def main(args): global source,batch_id global pgc,pgcl global code_insee,code_cadastre,code_dept global dicts global nodes,ways,adresses debut_total = time.time() usage = 'USAGE : python addr_cad_2_db.py <code INSEE> <OSM|CADASTRE>' if len(args) != 3: print(usage) os._exit(0) source = args[2].upper() if source not in ['OSM','CADASTRE']: print(usage) os._exit(0) adresses = Adresses() pgc = get_pgc() pgcl = get_pgc_layers() code_insee = args[1] code_cadastre = get_code_cadastre_from_insee(code_insee) code_dept = get_cadastre_code_dept_from_insee(code_insee) batch_id = batch_start_log(source,'loadCumul',code_cadastre) dicts = Dicts() dicts.load_all(code_insee) if source == 'CADASTRE': fnadresses = os.path.join('/data/work/cadastre.openstreetmap.fr/bano_cache',code_dept,code_cadastre,code_cadastre+'-adresses.osm') load_hsnr_from_cad_file(fnadresses,source) if source == 'OSM': load_hsnr_from_pg_osm(code_insee,code_cadastre) load_highways_from_pg_osm(code_insee,code_cadastre) load_highways_relations_from_pg_osm(code_insee,code_cadastre) add_fantoir_to_hsnr() nb_rec = load_to_db(adresses,code_insee,source,code_cadastre,code_dept) batch_end_log(nb_rec,batch_id) fin_total = time.time() print('Execution en '+str(int(fin_total - debut_total))+' s.')
def main(): print "Content-Type: application/json" print "" pgc = get_pgc() params = cgi.FieldStorage() insee_com = params['insee'].value cadastre_com = get_code_cadastre_from_insee(insee_com) dept = get_code_dept_from_insee(insee_com) labels_statuts_fantoir = get_data_from_pg(pgc,'labels_statuts_fantoir','') infos_commune = get_data_from_pg(pgc,'infos_commune_insee',insee_com) if infos_commune: nom_commune = infos_commune[0][0] lon_commune = infos_commune[0][1] lat_commune = infos_commune[0][2] else: nom_commune = [] lon_commune = None lat_commune = None voisins = get_data_from_pg(pgc,'voisins_insee',insee_com) a_voisins = [[v[0],v[1],v[2]] for v in voisins] date_import_cadastre = '' date_fin_cumul = ['',''] if cadastre_com: fin_etape = get_fin_etape(pgc,'recupCadastre',cadastre_com) if fin_etape: date_import_cadastre = fin_etape fin_etape = get_fin_etape(pgc,'loadCumul',cadastre_com) if len(fin_etape) == 1: date_fin_cumul = [[],fin_etape[0]] else: date_fin_cumul = fin_etape date_cache_hsnr = get_fin_etape_dept(pgc,'cache_dept_hsnr_insee',dept)[0] date_cache_highway = get_fin_etape_dept(pgc,'cache_dept_highway_insee',dept)[0] date_cache_highway_relation = get_fin_etape_dept(pgc,'cache_dept_highway_relation_insee',dept)[0] data = [[nom_commune,date_import_cadastre,date_fin_cumul[0],date_fin_cumul[1],date_cache_hsnr,date_cache_highway,date_cache_highway_relation,lon_commune,lat_commune,labels_statuts_fantoir,a_voisins],get_data_from_pg(pgc,'voies_adresses_non_rapprochees_insee',insee_com),get_data_from_pg(pgc,'voies_adresses_rapprochees_insee',insee_com),get_data_from_pg(pgc,'voies_seules_non_rapprochees_insee',insee_com),get_data_from_pg(pgc,'voies_seules_rapprochees_insee',insee_com),get_data_from_pg(pgc,'places_non_rapprochees_insee',insee_com),get_data_from_pg(pgc,'places_rapprochees_insee',insee_com)] a = json.JSONEncoder().encode(data) print(a)
def load_to_db(places): table = 'cumul_places' sload = "DELETE FROM {:s} WHERE insee_com = '{:s}'".format(table,code_insee) pgc = get_pgc() cur_insert = pgc.cursor() cur_insert.execute(sload) a_values_place = [] sload = 'INSERT INTO {:s} (geometrie,libelle_cadastre,libelle_osm,libelle_fantoir,fantoir,insee_com,cadastre_com,dept,code_postal,source,ld_bati,ld_osm) VALUES'.format(table) # a_values = [places.p[a].as_SQL_cadastre_row() for a in places.p] a_values = places.as_SQL_Cadastre_array() nb_rec = len(a_values) if nb_rec>0: cur_insert.execute(sload+','.join(a_values)+';COMMIT;') # a_values = [places.p[a].as_SQL_osm_row() for a in places.p] a_values = places.as_SQL_OSM_array() if len(a_values)>0: cur_insert.execute(sload+','.join(a_values)+';COMMIT;') nb_rec+=len(a_values) return(nb_rec)
def main(args): debut_total = time.time() if len(args) < 2: print('USAGE : python addr_cad_2_db.py <code INSEE>') os._exit(0) global source, batch_id global pgc, pgcl global code_insee, code_cadastre, code_dept global dicts global nodes, ways, adresses adresses = Adresses() source = 'CADASTRE' pgc = get_pgc() pgcl = get_pgc_layers() code_insee = args[1] code_cadastre = get_code_cadastre_from_insee(code_insee) code_dept = '0' + code_insee[0:2] if code_insee[0:2] == '97': code_dept = code_insee[0:3] batch_id = batch_start_log(source, 'loadCumul', code_cadastre) dicts = Dicts() dicts.load_all(code_insee) fnadresses = os.path.join( '/data/work/cadastre.openstreetmap.fr/bano_cache', code_dept, code_cadastre, code_cadastre + '-adresses.osm') load_hsnr_from_cad_file(fnadresses, source) load_highways_from_pg_osm(code_insee) load_highways_relations_from_pg_osm(code_insee) add_fantoir_to_hsnr() nb_rec = load_to_db(adresses, code_insee, source, code_cadastre, code_dept) batch_end_log(nb_rec, batch_id) fin_total = time.time() print('Execution en ' + str(int(fin_total - debut_total)) + ' s.')
def main(args): global batch_id global pgc global code_insee, code_dept, cadastre_com global nodes, ways nodes = Nodes() ways = Ways() pgc = get_pgc() debut_total = time.time() usage = "USAGE : python parcelles_buildings_2_db.py <code INSEE>" if len(args) != 2: print(usage) os._exit(0) code_insee = args[1] cadastre_com = get_code_cadastre_from_insee(code_insee) # cadastre_com = 'S0335' fn_parcelles = os.path.join(get_cache_directory(code_insee, cadastre_com), cadastre_com + "-parcelles.osm") # fn_parcelles = 'C:\\Users\\vincent\\Documents\\GitHub\\ZA063-parcelles.osm' batch_id = batch_start_log("CADASTRE", "importParcelles", cadastre_com) nb_parcelles = load_parcelles(fn_parcelles) batch_end_log(nb_parcelles, batch_id) a_fn_houses_parts = glob.glob( "{:s}/{:s}-[0-9]-[0-9]-houses.osm".format(get_cache_directory(code_insee, cadastre_com), cadastre_com) ) # a_fn_houses_parts = glob.glob('{:s}\\{:s}-[0-9]-[0-9]-houses.osm'.format('C:\\Users\\vincent\\Documents\\GitHub','ZA063')) create_tmp_building_table() nb_buildings = 0 batch_id = batch_start_log("CADASTRE", "importBuildings", cadastre_com) for h in a_fn_houses_parts: print(h) nodes = Nodes() ways = Ways() nb_buildings += load_tmp_buildings(h) load_buildings() batch_end_log(nb_buildings, batch_id) pgc.close()
def main(args): global batch_id global pgc global code_insee, code_dept, cadastre_com global nodes, ways nodes = Nodes() ways = Ways() pgc = get_pgc() debut_total = time.time() usage = 'USAGE : python parcelles_buildings_2_db.py <code INSEE>' if len(args) != 2: print(usage) os._exit(0) code_insee = args[1] cadastre_com = get_code_cadastre_from_insee(code_insee) # cadastre_com = 'S0335' fn_parcelles = os.path.join(get_cache_directory(code_insee, cadastre_com), cadastre_com + '-parcelles.osm') # fn_parcelles = 'C:\\Users\\vincent\\Documents\\GitHub\\ZA063-parcelles.osm' batch_id = batch_start_log('CADASTRE', 'importParcelles', cadastre_com) nb_parcelles = load_parcelles(fn_parcelles) batch_end_log(nb_parcelles, batch_id) a_fn_houses_parts = glob.glob('{:s}/{:s}-[0-9]-[0-9]-houses.osm'.format( get_cache_directory(code_insee, cadastre_com), cadastre_com)) # a_fn_houses_parts = glob.glob('{:s}\\{:s}-[0-9]-[0-9]-houses.osm'.format('C:\\Users\\vincent\\Documents\\GitHub','ZA063')) create_tmp_building_table() nb_buildings = 0 batch_id = batch_start_log('CADASTRE', 'importBuildings', cadastre_com) for h in a_fn_houses_parts: print(h) nodes = Nodes() ways = Ways() nb_buildings += load_tmp_buildings(h) load_buildings() batch_end_log(nb_buildings, batch_id) pgc.close()
def main(args): debut_total = time.time() usage = 'USAGE : python place_2_db.py <code INSEE> <CADASTRE|OSM>' if len(args) < 3: print(usage) os._exit(0) global pgc,pgcl,fantoir,source,code_insee,code_cadastre,code_dept,dicts source = args[2].upper() if source not in ['OSM','CADASTRE']: print(usage) os._exit(0) pgc = get_pgc() pgcl = get_pgc_layers() code_insee = args[1] code_cadastre = get_code_cadastre_from_insee(code_insee) code_dept = get_cadastre_code_dept_from_insee(code_insee) fantoir = {} dicts = Dicts() dicts.load_all() load_fantoir(code_insee) # print(dicts) batch_id = batch_start_log(source,'cumulPlaces',code_cadastre) global places places = [] if source == 'CADASTRE': data = get_data_from_pg('cadastre_2_places',code_insee) # for d in data: # places.append(Place(d[0],d[1],d[2],d[3],d[5],source,d[4])) else: data = get_data_from_pgl('place_insee',code_insee,code_cadastre) for d in data: places.append(Place(d[0],d[1],d[2],d[3].decode('utf8'),d[5],source,d[4])) nb_rec = load_to_db(places,code_insee,source,code_cadastre,code_dept) batch_end_log(nb_rec,batch_id)
def main(args): debut_total = time.time() if len(args) < 2: print('USAGE : python addr_cad_2_db.py <code INSEE>') os._exit(0) global source,batch_id global pgc,pgcl global code_insee,code_cadastre,code_dept global dicts global nodes,ways,adresses adresses = Adresses() source = 'CADASTRE' pgc = get_pgc() pgcl = get_pgc_layers() code_insee = args[1] code_cadastre = get_code_cadastre_from_insee(code_insee) code_dept = '0'+code_insee[0:2] if code_insee[0:2] == '97': code_dept = code_insee[0:3] batch_id = batch_start_log(source,'loadCumul',code_cadastre) dicts = Dicts() dicts.load_all(code_insee) fnadresses = os.path.join('/data/work/cadastre.openstreetmap.fr/bano_cache',code_dept,code_cadastre,code_cadastre+'-adresses.osm') load_hsnr_from_cad_file(fnadresses,source) load_highways_from_pg_osm(code_insee) load_highways_relations_from_pg_osm(code_insee) add_fantoir_to_hsnr() nb_rec = load_to_db(adresses,code_insee,source,code_cadastre,code_dept) batch_end_log(nb_rec,batch_id) fin_total = time.time() print('Execution en '+str(int(fin_total - debut_total))+' s.')
def load_to_db(places): table = 'cumul_places' sload = "DELETE FROM {:s} WHERE insee_com = '{:s}'".format( table, code_insee) pgc = get_pgc() cur_insert = pgc.cursor() cur_insert.execute(sload) a_values_place = [] sload = 'INSERT INTO {:s} (geometrie,libelle_cadastre,libelle_osm,libelle_fantoir,fantoir,insee_com,cadastre_com,dept,code_postal,source,ld_bati,ld_osm) VALUES'.format( table) # a_values = [places.p[a].as_SQL_cadastre_row() for a in places.p] a_values = places.as_SQL_Cadastre_array() nb_rec = len(a_values) if nb_rec > 0: cur_insert.execute(sload + ','.join(a_values) + ';COMMIT;') # a_values = [places.p[a].as_SQL_osm_row() for a in places.p] a_values = places.as_SQL_OSM_array() if len(a_values) > 0: cur_insert.execute(sload + ','.join(a_values) + ';COMMIT;') nb_rec += len(a_values) return (nb_rec)
def main(argv): if len(argv) != 5 or argv[1] in ["-h", "-help", "--help"]: print_help() sys.exit() code_departement = argv[1] code_commune = argv[2] # code_insee = get_code_insee_from_cadastre(code_commune) input_filename = argv[3] code_insee = argv[4] if len(code_departement) != 3: command_line_error("le code departement doit avoir 3 chiffres") if len(code_commune) != 5: command_line_error("le code commune doit avoir 5 lettres ou chiffres") osm = OsmParser().parse(input_filename) osm_noms = None osm_noms_filename = code_commune + "-noms.osm" if os.path.exists(osm_noms_filename): print "Charges les noms depuis le fichier " + osm_noms_filename osm_noms = OsmParser().parse(osm_noms_filename) cherche_fantoir_et_osm_highways(code_departement, code_commune, osm, osm_noms) a_values_brut = [] for r in osm.relations.itervalues(): fantoir = "" if "ref:FR:FANTOIR" in r.tags: fantoir = r.tags["ref:FR:FANTOIR"] a_values_brut.append( "('{:s}','{:s}','{:s}')".format(code_insee, r.tags["name"].encode("utf8").replace("'", "''"), fantoir) ) sload = "DELETE FROM cadastre_noms_bruts WHERE insee_com = '{:s}';INSERT INTO cadastre_noms_bruts (insee_com,voie_cadastre,fantoir) VALUES".format( code_insee ) if len(a_values_brut) > 0: pgc = get_pgc() sload = sload + ",".join(a_values_brut) + ";COMMIT;" cur_insert = pgc.cursor() cur_insert.execute(sload)
def main(args): debut_total = time.time() if len(args) < 2: print('USAGE : python addr_osm_2_db.py <code INSEE>') os._exit(0) global source, batch_id global pgc, pgcl global code_insee, code_cadastre, code_dept global dicts global nodes, ways, adresses source = 'OSM' pgc = get_pgc() pgcl = get_pgc_layers() code_insee = args[1] code_cadastre = get_code_cadastre_from_insee(code_insee) code_dept = '0' + code_insee[0:2] if code_insee[0:2] == '97': code_dept = code_insee[0:3] batch_id = batch_start_log(source, 'loadCumul', code_cadastre) adresses = Adresses() dicts = Dicts() dicts.load_all(code_insee) load_highways_from_pg_osm(code_insee) load_hsnr_from_pg_osm(code_insee) add_fantoir_to_hsnr() load_to_db(adresses) fin_total = time.time() print('Execution en ' + str(int(fin_total - debut_total)) + ' s.')
def main(args): debut_total = time.time() if len(args) < 2: print('USAGE : python addr_osm_2_db.py <code INSEE>') os._exit(0) global source,batch_id global pgc,pgcl global code_insee,code_cadastre,code_dept global dicts global nodes,ways,adresses source = 'OSM' pgc = get_pgc() pgcl = get_pgc_layers() code_insee = args[1] code_cadastre = get_code_cadastre_from_insee(code_insee) code_dept = '0'+code_insee[0:2] if code_insee[0:2] == '97': code_dept = code_insee[0:3] batch_id = batch_start_log(source,'loadCumul',code_cadastre) adresses = Adresses() dicts = Dicts() dicts.load_all(code_insee) load_highways_from_pg_osm(code_insee) load_hsnr_from_pg_osm(code_insee) add_fantoir_to_hsnr() load_to_db(adresses) fin_total = time.time() print('Execution en '+str(int(fin_total - debut_total))+' s.')
import addr_2_db as a import sys import os,os.path import subprocess import glob import time from outils_de_gestion import batch_start_log from outils_de_gestion import batch_end_log from pg_connexion import get_pgc if len(sys.argv) > 2: print('Mauvais nombre d\'arguments') print('USAGE : ./import_cadastre_france.py {dept}') os._exit(0) pgc = get_pgc() if len(sys.argv) == 2: str_query = 'SELECT insee_com,cadastre_com,nom_com,cadastre_dept FROM code_cadastre WHERE format_cadastre = \'VECT\' AND dept = \'{:s}\' ORDER BY dept,nom_com;'.format(sys.argv[1]) else: str_query = 'SELECT c.insee_com,c.cadastre_com,c.nom_com,c.cadastre_dept FROM code_cadastre c LEFT OUTER JOIN (SELECT cadastre_com FROM batch WHERE etape = \'importQadastre\' AND date_fin IS NOT NULL) b ON c.cadastre_com = b.cadastre_com WHERE b.cadastre_com IS NULL AND c.format_cadastre = \'VECT\' ORDER BY dept,nom_com;' cur = pgc.cursor() cur.execute(str_query) for c in cur: print(c[2]) parts = glob.glob('/data/work/cadastre.openstreetmap.fr/bano_cache/{:s}/{:s}/{:s}-[0-9]-[0-9].pdf'.format(c[3],c[1],c[1])) nb_parts = len(parts) batch_id = batch_start_log('CADASTRE','importQadastre',c[1]) for p in parts: # print(p) p_out = p[0:-4]+'-houses.osm'
def main(args): if len(args) < 3: print('USAGE : python addr_2_db.py <code INSEE> <code Cadastre> <OSM|CADASTRE>') os._exit(0) global source,batch_id source = args[3].upper() if source not in ['OSM','CADASTRE']: print('USAGE : python addr_2_db.py <code INSEE> <code Cadastre> <OSM|CADASTRE>') os._exit(0) global pgc pgc = get_pgc() global code_insee,code_cadastre,code_dept code_insee = args[1] code_cadastre = args[2] code_dept = '0'+code_insee[0:2] if code_insee[0:2] == '97': code_dept = code_insee[0:3] batch_id = batch_start_log(source,'loadCumul',code_cadastre) global dicts dicts = Dicts() dicts.load_all(code_insee) dict_objets_pour_output = {'1':{}} dict_objets_pour_output['1']['libelle_pour_fichiers'] = 'adresse_point_sur_batiment' # dict_objets_pour_output['2']['libelle_pour_fichiers'] = 'adresse_tag_sur_batiment' rep_parcelles_adresses = 'parcelles_adresses' global root_dir_out root_dir_out = 'osm_output' if socket.gethostname() == 'osm104': hidden_dept_dir = '/data/work/cadastre.openstreetmap.fr/hidden/'+code_dept if not os.path.exists(hidden_dept_dir): os.mkdir(hidden_dept_dir) rep_parcelles_adresses = '/data/work/cadastre.openstreetmap.fr/hidden/'+code_dept+'/'+code_cadastre root_dir_out = rep_parcelles_adresses # else: if not os.path.exists(rep_parcelles_adresses): os.mkdir(rep_parcelles_adresses) # if not os.path.exists(root_dir_out): # os.mkdir(root_dir_out) # fnparcelles = rep_parcelles_adresses+'/'+code_cadastre+'-parcelles.osm' # if not os.path.exists(fnparcelles): # download_vector_from_cadastre(code_insee,code_cadastre,fnparcelles,'parcelles') if source == 'CADASTRE': fnadresses = rep_parcelles_adresses+'/'+code_cadastre+'-adresses.osm' if not os.path.exists(fnadresses): download_vector_from_cadastre(code_insee,code_cadastre,fnadresses,'adresses') if source == 'OSM': fnadresses = rep_parcelles_adresses+'/'+code_cadastre+'-adresses-'+source+'.osm' if not os.path.exists(fnadresses): download_addresses_from_overpass(fnadresses) # building_rep = 'cache_buildings' # if not os.path.exists(building_rep): # os.mkdir(building_rep) global nodes,ways nodes = Nodes() ways = Ways() adresses = Adresses() print('mise en cache des points adresses...') print('nodes...') sys.stdout.flush() xmladresses = ET.parse(fnadresses) # nodes sur relations associatedStreet dict_node_relations = {} dict_ways_relations = {} for asso in xmladresses.iter('relation'): is_name_vide = False for t in asso.iter('tag'): if t.get('k') == 'name' and len(t.get('v')) < 2: is_name_vide = True break if is_name_vide: continue is_type_associatedStreet = False for t in asso.iter('tag'): if t.get('k') == 'type' and t.get('v') == 'associatedStreet': is_type_associatedStreet = True break if not is_type_associatedStreet: continue for t in asso.iter('tag'): if t.get('k') == 'name': for n in asso.iter('member'): if n.get('type') == 'node': if not n.get('ref') in dict_node_relations: dict_node_relations[n.get('ref')] = [] dict_node_relations[n.get('ref')] = dict_node_relations[n.get('ref')]+[normalize(t.get('v'))] if n.get('type') == 'way': if not n.get('ref') in dict_ways_relations: dict_ways_relations[n.get('ref')] = [] dict_ways_relations[n.get('ref')] = dict_ways_relations[n.get('ref')]+[normalize(t.get('v'))] dicts.add_voie('adresse',t.get('v')) load_nodes_from_xml_parse(xmladresses) for n in xmladresses.iter('node'): dtags = get_tags(n) if 'addr:street' in dtags: dicts.add_voie('adresse',dtags['addr:street']) n_id = n.get('id') nodes.n[n_id].modified = True if 'addr:housenumber' in dtags and n_id in dict_node_relations: if is_valid_housenumber(dtags['addr:housenumber']): for v in dict_node_relations[n_id]: ad = Adresse(nodes.n[n_id],dtags['addr:housenumber'],v) adresses.add_adresse(ad) else: print('Numero invalide : {:s}'.format(dtags['addr:housenumber'].encode('utf8'))) if 'addr:housenumber' in dtags and 'addr:street' in dtags : if is_valid_housenumber(dtags['addr:housenumber']): ad = Adresse(nodes.n[n_id],dtags['addr:housenumber'],normalize(dtags['addr:street'])) adresses.add_adresse(ad) else: print('Numero invalide : {:s}'.format(dtags['addr:housenumber'].encode('utf8'))) load_ways_from_xml_parse(xmladresses,'centroid_building') for n in xmladresses.iter('way'): dtags = get_tags(n) if 'addr:street' in dtags: dicts.add_voie('adresse',dtags['addr:street']) n_id = n.get('id') if 'addr:housenumber' in dtags and n_id in dict_ways_relations: if is_valid_housenumber(dtags['addr:housenumber']): for v in dict_ways_relations[n_id]: new_node_geom = ways.w['centroid_building'][n_id].geom.get_centroid() new_node = nodes.add_new_node(new_node_geom[0],new_node_geom[1],{'addr:housenumber':dtags['addr:housenumber']}) ad = Adresse(nodes.n[new_node],dtags['addr:housenumber'],v) adresses.add_adresse(ad) else: print('Numero invalide : {:s}'.format(dtags['addr:housenumber'].encode('utf8'))) if 'addr:housenumber' in dtags and 'addr:street' in dtags : if is_valid_housenumber(dtags['addr:housenumber']): # for v in dict_ways_relations[n_id]: new_node_geom = ways.w['centroid_building'][n_id].geom.get_centroid() new_node = nodes.add_new_node(new_node_geom[0],new_node_geom[1],{'addr:housenumber':dtags['addr:housenumber']}) ad = Adresse(nodes.n[new_node],dtags['addr:housenumber'],normalize(dtags['addr:street'])) adresses.add_adresse(ad) else: print('Numero invalide : {:s}'.format(dtags['addr:housenumber'].encode('utf8'))) # print('chargement...') # sys.stdout.flush() # cur_adresses = pgc.cursor() # str_query = "" # for idx,voie in enumerate(adresses.a): # for num in adresses.a[voie]['numeros']: # ad = adresses.a[voie]['numeros'][num] # str_query = str_query+'''INSERT INTO adresses_'''+code_insee+''' # (SELECT ST_Transform('''+ad.node.get_geom_as_text()+''', # 2154),'''+str(ad.node.attribs['id'])+''',\''''+num+'''\',\''''+voie+'''\');''' # if idx%100 == 0: # cur_adresses.execute(str_query.encode('utf8')+"COMMIT;") # str_query = "" # if str_query != "": # cur_adresses.execute(str_query.encode('utf8')+"COMMIT;") highway_rep = 'cache_highways' if not os.path.exists(highway_rep): os.mkdir(highway_rep) fnhighway = highway_rep+'/highways_'+code_insee+'.osm' if not os.path.exists(fnhighway): download_ways_from_overpass('highway',fnhighway) # print('mise en cache des voies...') # sys.stdout.flush() xmlways = ET.parse(fnhighway) # load_nodes_from_xml_parse(xmlways) # load_ways_from_xml_parse(xmlways,'highway') for w in xmlways.iter('way'): for t in w.iter('tag'): if t.get('k') == 'name' and len(t.get('v')) < 2: break if t.get('k') == 'name': name_osm = t.get('v') dicts.add_voie('OSM',name_osm) name_norm = normalize(name_osm) if name_norm not in dicts.ways_osm: dicts.ways_osm[name_norm] = {'name':name_osm,'ids':[]} dicts.ways_osm[name_norm]['ids'].append(w.get('id')) # print('Traitements PostGIS...') # sys.stdout.flush() # executeSQL_INSEE('adresses_buildings.sql',code_insee) # duplication des instances de nodes,ways,adresses car modifs differentes selon le style des adresses # 1 : adresses comme points rabattus sur les batiments # 2 : adresses comme tags sur les batiments dict_objets_pour_output['1']['nodes'] = nodes dict_objets_pour_output['1']['ways'] = ways dict_objets_pour_output['1']['adresses']= adresses # dict_objets_pour_output['2']['nodes'] = copy.deepcopy(nodes) # dict_objets_pour_output['2']['ways'] = copy.deepcopy(ways) # dict_objets_pour_output['2']['adresses']= copy.deepcopy(adresses) for k in dict_objets_pour_output.viewkeys(): load_to_db(dict_objets_pour_output[k]['nodes'],dict_objets_pour_output[k]['ways'],dict_objets_pour_output[k]['adresses'],dict_objets_pour_output[k]['libelle_pour_fichiers']) fin_total = time.time() print('Execution en '+str(int(fin_total - debut_total))+' s.')
def main(args): global source,batch_id global pgc,pgcl global code_insee,code_cadastre,code_dept global dicts global nodes,ways,adresses global commune_avec_suffixe global geom_suffixe global use_cache use_cache = True debut_total = time.time() usage = 'USAGE : python addr_cad_2_db.py <code INSEE> <OSM|CADASTRE> {use_cache=True}' if len(args) < 3: print(usage) os._exit(0) if len(args) > 3: # use_cache = eval(args[3]) use_cache = args[3] source = args[2].upper() if source not in ['OSM','CADASTRE']: print(usage) os._exit(0) adresses = Adresses() pgc = get_pgc() pgcl = get_pgc_layers() code_insee = args[1] code_cadastre = get_code_cadastre_from_insee(code_insee) code_dept = get_cadastre_code_dept_from_insee(code_insee) batch_id = batch_start_log(source,'loadCumul',code_cadastre) dicts = Dicts() dicts.load_all(code_insee) commune_avec_suffixe = has_addreses_with_suffix(code_insee) geom_suffixe = None if commune_avec_suffixe: geom_suffixe = get_geom_suffixes(code_insee,code_cadastre) if source == 'CADASTRE': fnadresses = os.path.join('/data/work/cadastre.openstreetmap.fr/bano_cache',code_dept,code_cadastre,code_cadastre+'-adresses.osm') load_hsnr_from_cad_file(fnadresses,source) if source == 'OSM': load_hsnr_from_pg_osm(code_insee,code_cadastre) load_hsnr_bbox_from_pg_osm(code_insee,code_cadastre) load_type_highway_from_pg_osm(code_insee,code_cadastre) load_highways_from_pg_osm(code_insee,code_cadastre) load_highways_relations_from_pg_osm(code_insee,code_cadastre) load_highways_bbox_from_pg_osm(code_insee,code_cadastre) load_highways_relations_bbox_from_pg_osm(code_insee,code_cadastre) add_fantoir_to_hsnr() load_point_par_rue_from_pg_osm(code_insee,code_cadastre) load_point_par_rue_complement_from_pg_osm(code_insee,code_cadastre) nb_rec = load_to_db(adresses,code_insee,source,code_cadastre,code_dept) batch_end_log(nb_rec,batch_id) fin_total = time.time()
#!/usr/bin/env python # coding: UTF-8 import os import psycopg2 from pg_connexion import get_pgc pgc = get_pgc() cur_dept = pgc.cursor() cur_comm = pgc.cursor() str_query = 'SELECT DISTINCT dept FROM code_cadastre WHERE dept IS NOT NULL;' # str_query = 'SELECT DISTINCT dept FROM code_cadastre WHERE dept =\'090\';' cur_dept.execute(str_query) for d in cur_dept: str_query = 'SELECT cadastre_com,nom_com FROM code_cadastre WHERE dept = \'{:s}\' ORDER BY 1;'.format( d[0]) cur_comm.execute(str_query) fn = 'import_v2_{:s}.sh'.format(d[0]) f = open(fn, 'wb') f.write('#!/bin/bash\n') f.write( 'cd /data/project/cadastre.openstreetmap.fr/export-cadastre/bin/cadastre-housenumber/bano\n' ) for c in cur_comm: f.write('./import-bano.sh {:s} {:s} "{:s}" true\n'.format( d[0], c[0], c[1])) f.close() os.chmod(fn, 0777)
def main(args): if len(args) < 2: print( 'USAGE : python addr_fantoir_building.py <code INSEE> <code Cadastre>' ) os._exit(0) global pgc pgc = get_pgc() global code_insee, code_cadastre, code_dept code_insee = args[1] code_cadastre = args[2] code_dept = '0' + code_insee[0:2] if code_insee[0:2] == '97': code_dept = code_insee[0:3] global dicts dicts = Dicts() dicts.load_all(code_insee) dict_objets_pour_output = {'1': {}, '2': {}} dict_objets_pour_output['1'][ 'libelle_pour_fichiers'] = 'adresse_point_sur_batiment' dict_objets_pour_output['2'][ 'libelle_pour_fichiers'] = 'adresse_tag_sur_batiment' rep_parcelles_adresses = 'parcelles_adresses' rep_parcelles_adresses = 'parcelles_adresses' global root_dir_out root_dir_out = 'osm_output' if socket.gethostname() == 'osm104': rep_parcelles_adresses = 'data/' + code_dept + '/' + code_cadastre root_dir_out = rep_parcelles_adresses else: if not os.path.exists(rep_parcelles_adresses): os.mkdir(rep_parcelles_adresses) if not os.path.exists(root_dir_out): os.mkdir(root_dir_out) fnparcelles = rep_parcelles_adresses + '/' + code_cadastre + '-parcelles.osm' fnadresses = rep_parcelles_adresses + '/' + code_cadastre + '-adresses.osm' if not os.path.exists(fnparcelles): download_vector_from_cadastre(code_insee, code_cadastre, fnparcelles, 'parcelles') if not os.path.exists(fnadresses): download_vector_from_cadastre(code_insee, code_cadastre, fnadresses, 'adresses') building_rep = 'cache_buildings' if not os.path.exists(building_rep): os.mkdir(building_rep) global nodes, ways nodes = Nodes() ways = Ways() adresses = Adresses() fnbuilding = building_rep + '/buildings_' + code_insee + '.osm' # if not os.path.exists(fnbuilding): download_ways_from_overpass('building', fnbuilding) print('mise en cache des buildings...') sys.stdout.flush() xmlbuldings = ET.parse(fnbuilding) print('nodes...') sys.stdout.flush() load_nodes_from_xml_parse(xmlbuldings) print('buildings...') sys.stdout.flush() load_ways_from_xml_parse(xmlbuldings, 'building') del xmlbuldings gc.collect() executeSQL_INSEE('create_tables__com__.sql', code_insee) print('chargement des polygones...') sys.stdout.flush() cur_buildings = pgc.cursor() str_query = "" for idx, id in enumerate(ways.w['building']): if not ways.w['building'][id].is_valid: continue str_query = str_query + ways.w['building'][ id].get_as_SQL_import_building() if idx % 100 == 0 and str_query != "": cur_buildings.execute(str_query + "COMMIT;") str_query = "" if str_query != "": cur_buildings.execute(str_query + "COMMIT;") print('chargement des segments...') sys.stdout.flush() str_query = "" for idx, id in enumerate(ways.w['building']): if not ways.w['building'][id].is_valid: continue for nn in range(0, len(ways.w['building'][id].geom.a_nodes) - 1): str_query = str_query + ways.w['building'][ id].get_as_SQL_import_building_segment(nn) if idx % 100 == 0 and str_query != "": cur_buildings.execute(str_query + "COMMIT;") str_query = "" if str_query != "": cur_buildings.execute(str_query + "COMMIT;") str_query = "" print('mise en cache des parcelles...') print('nodes...') sys.stdout.flush() xmlparcelles = ET.parse(fnparcelles) load_nodes_from_xml_parse(xmlparcelles) print('parcelles...') sys.stdout.flush() load_ways_from_xml_parse(xmlparcelles, 'parcelle') del xmlparcelles gc.collect() print('chargement...') sys.stdout.flush() cur_parcelles = pgc.cursor() str_query = "" for idx, id in enumerate(ways.w['parcelle']): str_query = str_query + ways.w['parcelle'][ id].get_as_SQL_import_parcelle() if idx % 100 == 0: cur_parcelles.execute(str_query + "COMMIT;") str_query = "" if str_query != "": cur_parcelles.execute(str_query + "COMMIT;") print('mise en cache des points adresses...') print('nodes...') sys.stdout.flush() xmladresses = ET.parse(fnadresses) dict_node_relations = {} for asso in xmladresses.iter('relation'): for t in asso.iter('tag'): if t.get('k') == 'name': for n in asso.iter('member'): if not n.get('ref') in dict_node_relations: dict_node_relations[n.get('ref')] = [] dict_node_relations[n.get('ref')] = dict_node_relations[ n.get('ref')] + [normalize(t.get('v'))] dicts.add_voie('adresse', t.get('v')) load_nodes_from_xml_parse(xmladresses) for n in xmladresses.iter('node'): dtags = get_tags(n) n_id = n.get('id') nodes.n[n_id].modified = True if 'addr:housenumber' in nodes.n[ n_id].tags and n_id in dict_node_relations: for v in dict_node_relations[n_id]: ad = Adresse(nodes.n[n_id], dtags['addr:housenumber'], v) adresses.add_adresse(ad) print('chargement...') sys.stdout.flush() cur_adresses = pgc.cursor() str_query = "" for idx, voie in enumerate(adresses.a): for num in adresses.a[voie]['numeros']: ad = adresses.a[voie]['numeros'][num] str_query = str_query + '''INSERT INTO adresses_''' + code_insee + ''' (SELECT ST_Transform(''' + ad.node.get_geom_as_text() + ''', 2154),''' + str(ad.node.attribs['id'] ) + ''',\'''' + num + '''\',\'''' + voie + '''\');''' if idx % 100 == 0: cur_adresses.execute(str_query.encode('utf8') + "COMMIT;") str_query = "" if str_query != "": cur_adresses.execute(str_query.encode('utf8') + "COMMIT;") highway_rep = 'cache_highways' if not os.path.exists(highway_rep): os.mkdir(highway_rep) fnhighway = highway_rep + '/highways_' + code_insee + '.osm' # if not os.path.exists(fnhighway): download_ways_from_overpass('highway', fnhighway) print('mise en cache des voies...') sys.stdout.flush() xmlways = ET.parse(fnhighway) load_nodes_from_xml_parse(xmlways) load_ways_from_xml_parse(xmlways, 'highway') for w in xmlways.iter('way'): for t in w.iter('tag'): if t.get('k') == 'name': name_osm = t.get('v') dicts.add_voie('OSM', name_osm) name_norm = normalize(name_osm) if name_norm not in dicts.ways_osm: dicts.ways_osm[name_norm] = {'name': name_osm, 'ids': []} dicts.ways_osm[name_norm]['ids'].append(w.get('id')) print('Traitements PostGIS...') sys.stdout.flush() executeSQL_INSEE('adresses_buildings.sql', code_insee) # duplication des instances de nodes,ways,adresses car modifs differentes selon le style des adresses # 1 : adresses comme points rabattus sur les batiments # 2 : adresses comme tags sur les batiments dict_objets_pour_output['1']['nodes'] = nodes dict_objets_pour_output['1']['ways'] = ways dict_objets_pour_output['1']['adresses'] = adresses dict_objets_pour_output['2']['nodes'] = copy.deepcopy(nodes) dict_objets_pour_output['2']['ways'] = copy.deepcopy(ways) dict_objets_pour_output['2']['adresses'] = copy.deepcopy(adresses) # tierce == '1': print( 'Report des adresses sur les buildings en tant que nouveaux points...') sys.stdout.flush() cur_addr_node_building = pgc.cursor() str_query = '''SELECT lon, lat, id_building::integer, indice_node_1, numero, voie, id_adresse::integer FROM points_adresse_sur_building_''' + code_insee + ''';''' cur_addr_node_building.execute(str_query) for c in cur_addr_node_building: dict_objets_pour_output['1']['nodes'].n[str(c[6])].move_to(c[0], c[1]) dict_objets_pour_output['1']['ways'].w['building'][str( c[2])].insert_new_point(str(c[6]), c[3]) dict_objets_pour_output['1']['adresses'].a[c[5]]['numeros'][ c[4]].add_addr_as_node_on_building(str(c[6])) dict_objets_pour_output['1']['adresses'].a[c[5]]['numeros'][ c[4]].add_building_for_addr_node(str(c[2])) # tierce == '2': print('Report des adresses sur les buildings en tant que nouveau tag...') sys.stdout.flush() # batiments modifies cur_addr_way_building = pgc.cursor() str_query = '''SELECT id_building::integer, id_adresse::integer, numero, voie FROM adresse_sur_buildings_''' + code_insee + ''';''' cur_addr_way_building.execute(str_query) for c in cur_addr_way_building: dict_objets_pour_output['2']['ways'].w['building'][str(c[0])].add_tag( 'addr:housenumber', c[2]) dict_objets_pour_output['2']['adresses'].a[c[3]]['numeros'][ c[2]].add_addr_as_building(str(c[0])) print('Ajout des autres buildings de la voie...') sys.stdout.flush() # autres batiments des parcelles de la voie cur_addr_building_comp = pgc.cursor() str_query = '''SELECT id_building::integer, voie FROM buildings_complementaires_''' + code_insee + ''' EXCEPT SELECT id_building::integer, voie FROM adresse_sur_buildings_''' + code_insee + ''';''' cur_addr_building_comp.execute(str_query) for c in cur_addr_building_comp: for k in dict_objets_pour_output.viewkeys(): dict_objets_pour_output[k]['adresses'].add_batiment_complementaire( c[1], str(c[0])) for k in dict_objets_pour_output.viewkeys(): write_output(dict_objets_pour_output[k]['nodes'], dict_objets_pour_output[k]['ways'], dict_objets_pour_output[k]['adresses'], dict_objets_pour_output[k]['libelle_pour_fichiers']) # Menage en prod if socket.gethostname() == 'osm104': purge_pg_tables(code_insee) fin_total = time.time() print('Execution en ' + str(int(fin_total - debut_total)) + ' s.')