def get_data_from_pg(data_type,insee_com,cadastre_com,local=False,suffixe_data=None): # print(data_type,insee_com,cadastre_com,suffixe_data) cache_file = get_cache_filename(data_type,insee_com,cadastre_com) if not use_cache or not os.path.exists(cache_file) or (time.time() - os.path.getmtime(cache_file)) > 86400 : fq = open(os.path.join(os.path.dirname(os.path.abspath(__file__)),'sql/{:s}.sql'.format(data_type)),'rb') str_query = fq.read().replace('__com__',insee_com) # print(str_query) fq.close() if local: pgc = get_pgc() else: pgc = get_pgc_layers() if suffixe_data: str_query = str_query.replace('__suffixe_data__',suffixe_data) cur = pgc.cursor() cur.execute(str_query) f = open(cache_file,'w+') for lt in cur: l = list(lt) f.write(str(l)+'\n') cur.close() f.seek(0) else : f = open(cache_file,'r') # print('open cache '+cache_file) res = [] for l in f: res.append(eval(l)) f.close() return res
def get_data_by_dept_from_pg(data_type, dept): if dept[0:1] == '0': dept = dept[1:] etape_dept = 'cache_dept_' + data_type # Cache gardé 1h if o.age_etape_dept(etape_dept, dept) > 3600: print(u'Mise à jour du cache "{:s}"'.format(data_type.upper())) batch_id = o.batch_start_log(source, etape_dept, dept) fq = open('sql/{:s}.sql'.format(data_type), 'rb') str_sql_dept_like = (dept + '___')[0:5] str_query = fq.read().replace( '=\'__com__', ' LIKE \'{:s}'.format(str_sql_dept_like)) fq.close() cur = pgcl.cursor() cur.execute(str_query) str_query_com = "SELECT insee_com,cadastre_com FROM code_cadastre WHERE dept = '{:s}'".format( dept) cur_com = pgc.cursor() cur_com.execute(str_query_com) k_insee = {} for c in cur_com: k_insee[c[0]] = {'cad': c[1], 'data': []} for lt in cur: l = list(lt) insee = l[-1] # Exceptions Paris/Lyon/Marseille if insee == '75056' or insee == '13055' or insee == '69123': continue l = l[0:-1] if insee not in k_insee: e.write_log_to_file( f_log, 'Commune manquante au Cadastre : INSEE # {:s}'.format( insee)) continue k_insee[insee]['data'].append(str(l)) for k in k_insee.iterkeys(): # print(k) cache_file = a.get_cache_filename(data_type, k, k_insee[k]['cad']) # print(cache_file) f = open(cache_file, 'w') f.write('\n'.join(k_insee[k]['data'])) f.close() o.batch_end_log(len(k_insee.keys()), batch_id)
def load_cadastre(): fname = get_cache_filename('cadastre_2_places',code_insee,code_cadastre) if os.path.exists(fname): date_buildings_en_base = get_cadastre_etape_timestamp_debut(code_cadastre,'importBuildings','CADASTRE') date_cache = os.path.getmtime(fname) # print('date_buildings_en_base '+str(date_buildings_en_base)) # print('date_cache '+str(date_cache)) if date_cache > date_buildings_en_base: os.utime(fname, None) # print('utime') data = get_data_from_pg('cadastre_2_places',code_insee,code_cadastre,True) for d in data: targets = places.match_name(d[2],'FANTOIR') if targets: for t in targets: places.p[t].update_cadastre(d[0],d[1],d[2]) else: places.add_place(Place(d[0],d[1],'','',d[2],'','',d[3],-1))
def load_cadastre(): fname = get_cache_filename('cadastre_2_places', code_insee, code_cadastre) if os.path.exists(fname): date_buildings_en_base = get_cadastre_etape_timestamp_debut( code_cadastre, 'importBuildings', 'CADASTRE') date_cache = os.path.getmtime(fname) # print('date_buildings_en_base '+str(date_buildings_en_base)) # print('date_cache '+str(date_cache)) if date_cache > date_buildings_en_base: os.utime(fname, None) # print('utime') data = get_data_from_pg('cadastre_2_places', code_insee, code_cadastre, True) for d in data: targets = places.match_name(d[2], 'FANTOIR') if targets: for t in targets: places.p[t].update_cadastre(d[0], d[1], d[2]) else: places.add_place(Place(d[0], d[1], '', '', d[2], '', '', d[3], -1))
def get_data_from_pgl(data_type,insee_com,cadastre_com): cache_file = get_cache_filename(data_type,insee_com,cadastre_com) if not os.path.exists(cache_file) or (time.time() - os.path.getmtime(cache_file)) > 86400 : fq = open('sql/{:s}.sql'.format(data_type),'rb') str_query = fq.read().replace('__com__',insee_com) fq.close() cur = pgcl.cursor() cur.execute(str_query) f = open(cache_file,'w+') for lt in cur: l = list(lt) f.write(str(l)+'\n') cur.close() f.seek(0) else : f = open(cache_file,'r') res = [] for l in f: res.append(eval(l)) f.close() return res
def get_data_by_dept_from_pg(data_type,dept): if dept[0:1] == '0': dept = dept[1:] etape_dept = 'cache_dept_'+data_type # Cache gardé 1h if o.age_etape_dept(etape_dept,dept) > 3600: print(u'Mise à jour du cache "{:s}"'.format(data_type.upper())) batch_id = o.batch_start_log(source,etape_dept,dept) fq = open('sql/{:s}.sql'.format(data_type),'rb') str_sql_dept_like = (dept+'___')[0:5] str_query = fq.read().replace('=\'__com__',' LIKE \'{:s}'.format(str_sql_dept_like)) fq.close() cur = pgcl.cursor() cur.execute(str_query) str_query_com = "SELECT insee_com,cadastre_com FROM code_cadastre WHERE dept = '{:s}'".format(dept) cur_com = pgc.cursor() cur_com.execute(str_query_com) k_insee = {} for c in cur_com: k_insee[c[0]] = {'cad':c[1],'data':[]} for lt in cur: l = list(lt) insee = l[-1] # Exceptions Paris/Lyon/Marseille if insee == '75056' or insee == '13055' or insee == '69123': continue l = l[0:-1] if insee not in k_insee: e.write_log_to_file(f_log,'Commune manquante au Cadastre : INSEE # {:s}'.format(insee)) continue k_insee[insee]['data'].append(str(l)) for k in k_insee.iterkeys(): # print(k) cache_file = a.get_cache_filename(data_type,k,k_insee[k]['cad']) # print(cache_file) f = open(cache_file,'w') f.write('\n'.join(k_insee[k]['data'])) f.close() o.batch_end_log(len(k_insee.keys()),batch_id)
def get_data_from_pg(data_type, insee_com, cadastre_com, local=False, suffixe_data=None): # print(data_type,insee_com,cadastre_com,suffixe_data) cache_file = get_cache_filename(data_type, insee_com, cadastre_com) if not use_cache or not os.path.exists(cache_file) or ( time.time() - os.path.getmtime(cache_file)) > 86400: fq = open( os.path.join(os.path.dirname(os.path.abspath(__file__)), 'sql/{:s}.sql'.format(data_type)), 'rb') str_query = fq.read().replace('__com__', insee_com) # print(str_query) fq.close() if local: pgc = get_pgc() else: pgc = get_pgc_layers() if suffixe_data: str_query = str_query.replace('__suffixe_data__', suffixe_data) cur = pgc.cursor() cur.execute(str_query) f = open(cache_file, 'w+') for lt in cur: l = list(lt) f.write(str(l) + '\n') cur.close() f.seek(0) else: f = open(cache_file, 'r') # print('open cache '+cache_file) res = [] for l in f: res.append(eval(l)) f.close() return res