def aspectRatio(grid_input, grid_output, roads_input, built_input, seg_dist, seg_length, buffer_size, epsg, project_encoding, server_postgis, port_number, user_postgis, password_postgis, database_postgis, schema_postgis, path_time_log, format_vector='ESRI Shapefile', extension_vector=".shp", save_results_intermediate=False, overwrite=True): print(bold + yellow + "Début du calcul de l'indicateur Aspect Ratio." + endC + "\n") timeLine(path_time_log, "Début du calcul de l'indicateur Aspect Ratio : ") if debug >= 3: print(bold + green + "aspectRatio() : Variables dans la fonction" + endC) print(cyan + "aspectRatio() : " + endC + "grid_input : " + str(grid_input) + endC) print(cyan + "aspectRatio() : " + endC + "grid_output : " + str(grid_output) + endC) print(cyan + "aspectRatio() : " + endC + "roads_input : " + str(roads_input) + endC) print(cyan + "aspectRatio() : " + endC + "built_input : " + str(built_input) + endC) print(cyan + "aspectRatio() : " + endC + "seg_dist : " + str(seg_dist) + endC) print(cyan + "aspectRatio() : " + endC + "seg_length : " + str(seg_length) + endC) print(cyan + "aspectRatio() : " + endC + "buffer_size : " + str(buffer_size) + endC) print(cyan + "aspectRatio() : " + endC + "epsg : " + str(epsg) + endC) print(cyan + "aspectRatio() : " + endC + "project_encoding : " + str(project_encoding) + endC) print(cyan + "aspectRatio() : " + endC + "server_postgis : " + str(server_postgis) + endC) print(cyan + "aspectRatio() : " + endC + "port_number : " + str(port_number) + endC) print(cyan + "aspectRatio() : " + endC + "user_postgis : " + str(user_postgis) + endC) print(cyan + "aspectRatio() : " + endC + "password_postgis : " + str(password_postgis) + endC) print(cyan + "aspectRatio() : " + endC + "database_postgis : " + str(database_postgis) + endC) print(cyan + "aspectRatio() : " + endC + "schema_postgis : " + str(schema_postgis) + endC) print(cyan + "aspectRatio() : " + endC + "format_vector : " + str(format_vector) + endC) print(cyan + "aspectRatio() : " + endC + "extension_vector : " + str(extension_vector) + endC) print(cyan + "aspectRatio() : " + endC + "path_time_log : " + str(path_time_log) + endC) print(cyan + "aspectRatio() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC) print(cyan + "aspectRatio() : " + endC + "overwrite : " + str(overwrite) + endC) # Constantes liées aux données PostGIS et GRASS LOCATION = "LOCATION" MAPSET = "MAPSET" SUFFIX_SEGMENTED = "_segmented" if not os.path.exists(grid_output) or overwrite: ############################################ ### Préparation générale des traitements ### ############################################ if os.path.exists(grid_output): removeVectorFile(grid_output) temp_path = os.path.dirname(grid_output) + os.sep + "AspectRatio" file_name = os.path.splitext(os.path.basename(roads_input))[0] roads_segmented = temp_path + os.sep + file_name + SUFFIX_SEGMENTED + extension_vector if os.path.exists(temp_path): shutil.rmtree(temp_path) # Variables d'environnement spécifiques à GRASS gisbase = os.environ['GISBASE'] gisdb = "GRASS_database" # Variables liées à GRASS permettant la construction de 'LOCATION' et 'MAPSET' xmin, xmax, ymin, ymax = getEmpriseFile(roads_input, format_vector) pixel_size_x, pixel_size_y = 1, 1 ##################################### ### Préparation géodatabase GRASS ### ##################################### print(bold + cyan + "Préparation de la géodatabase GRASS :" + endC) timeLine(path_time_log, " Préparation de la géodatabase GRASS : ") # Initialisation de la connexion à la géodatabase GRASS gisbase, gisdb, location, mapset = initializeGrass( temp_path, xmin, xmax, ymin, ymax, pixel_size_x, pixel_size_y, epsg, gisbase, gisdb, LOCATION, MAPSET, True, overwrite) ################################################### ### Division des routes en segments de x mètres ### ################################################### print(bold + cyan + "Segmentation des routes avec GRASS :" + endC) timeLine(path_time_log, " Segmentation des routes avec GRASS : ") # Segmentation du jeu de données route en segments de x mètres splitGrass(roads_input, roads_segmented, seg_dist, format_vector, overwrite) cleanGrass(temp_path, gisdb, save_results_intermediate) ########################################### ### Préparation base de données PostGIS ### ########################################### print(bold + cyan + "Préparation de la base de données PostGIS :" + endC) timeLine(path_time_log, " Préparation de la base de données PostGIS : ") # Création de la base de données PostGIS # Conflits avec autres indicateurs (Height of Roughness Elements / Terrain Roughness Class) createDatabase(database_postgis) # Import des fichiers shapes maille, bati et routes segmentées dans la base de données PostGIS table_name_maille = importVectorByOgr2ogr(database_postgis, grid_input, 'ara_maille', user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=str(port_number), schema_name=schema_postgis, epsg=str(epsg), codage=project_encoding) table_name_bati = importVectorByOgr2ogr(database_postgis, built_input, 'ara_bati', user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=str(port_number), schema_name=schema_postgis, epsg=str(epsg), codage=project_encoding) table_name_routes_seg = importVectorByOgr2ogr( database_postgis, roads_segmented, 'ara_routes_seg', user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=str(port_number), schema_name=schema_postgis, epsg=str(epsg), codage=project_encoding) # Connection à la base de données PostGIS et initialisation de 'cursor' (permet de récupérer des résultats de requêtes SQL pour les traiter en Python) connection = openConnection(database_postgis, user_postgis, password_postgis, server_postgis, str(port_number), schema_name=schema_postgis) cursor = connection.cursor() # Requête d'ajout de champ ID segment route dans la table routes_seg et création des index pour les shapes importés query_preparation = """ ALTER TABLE %s ADD COLUMN id_seg serial; --CREATE INDEX IF NOT EXISTS routes_seg_geom_gist ON %s USING GIST (geom); --CREATE INDEX IF NOT EXISTS bati_geom_gist ON %s USING GIST (geom); --CREATE INDEX IF NOT EXISTS maille_geom_gist ON %s USING GIST (geom); """ % (table_name_routes_seg, table_name_routes_seg, table_name_bati, table_name_maille) if debug >= 2: print(query_preparation) executeQuery(connection, query_preparation) ############################################################################## ### Création des segments de y mètres perpendiculaires aux segments routes ### ############################################################################## print(bold + cyan + "Création des segments perpendiculaires aux routes :" + endC) timeLine(path_time_log, " Création des segments perpendiculaires aux routes : ") # Début de la construction de la requête de création des segments perpendiculaires query_seg_perp = "DROP TABLE IF EXISTS ara_seg_perp;\n" query_seg_perp += "CREATE TABLE ara_seg_perp (id_seg text, id_perp text, xR float, yR float, xP float, yP float, geom geometry);\n" query_seg_perp += "INSERT INTO ara_seg_perp VALUES\n" # Récupération de la liste des identifiants segments routes cursor.execute( "SELECT id_seg FROM %s GROUP BY id_seg ORDER BY id_seg;" % table_name_routes_seg) id_seg_list = cursor.fetchall() # Boucle sur les segments routes nb_seg = len(id_seg_list) treat_seg = 1 for id_seg in id_seg_list: if debug >= 4: print(bold + " Traitement du segment route : " + endC + str(treat_seg) + "/" + str(nb_seg)) id_seg = id_seg[0] # Table temporaire ne contenant qu'un segment route donné : ST_LineMerge(geom) permet de passer la géométrie de MultiLineString à LineString, pour éviter des problèmes de requêtes spatiales query_temp1_seg = "DROP TABLE IF EXISTS ara_temp1_seg;\n" query_temp1_seg += "CREATE TABLE ara_temp1_seg AS SELECT id_seg as id_seg, ST_LineMerge(geom) as geom FROM %s WHERE id_seg = %s;\n" % ( table_name_routes_seg, id_seg) if debug >= 4: print(query_temp1_seg) executeQuery(connection, query_temp1_seg) # Récupération du nombre de sommets du segment route (utile pour les segments routes en courbe, permet de récupérer le dernier point du segment) cursor.execute("SELECT ST_NPoints(geom) FROM ara_temp1_seg;") nb_points = cursor.fetchone() # Récupération des coordonnées X et Y des points extrémités du segment route query_xR1 = "SELECT ST_X(geom) as X FROM (SELECT ST_AsText(ST_PointN(geom,1)) as geom FROM ara_temp1_seg) as toto;" cursor.execute(query_xR1) xR1 = cursor.fetchone() query_yR1 = "SELECT ST_Y(geom) as Y FROM (SELECT ST_AsText(ST_PointN(geom,1)) as geom FROM ara_temp1_seg) as toto;" cursor.execute(query_yR1) yR1 = cursor.fetchone() query_xR2 = "SELECT ST_X(geom) as X FROM (SELECT ST_AsText(ST_PointN(geom,%s)) as geom FROM ara_temp1_seg) as toto;" % ( nb_points) cursor.execute(query_xR2) xR2 = cursor.fetchone() query_yR2 = "SELECT ST_Y(geom) as Y FROM (SELECT ST_AsText(ST_PointN(geom,%s)) as geom FROM ara_temp1_seg) as toto;" % ( nb_points) cursor.execute(query_yR2) yR2 = cursor.fetchone() # Transformation des coordonnées X et Y des points extrémités du segment route en valeurs numériques xR1 = float(str(xR1)[1:-2]) yR1 = float(str(yR1)[1:-2]) xR2 = float(str(xR2)[1:-2]) yR2 = float(str(yR2)[1:-2]) if debug >= 4: print(" xR1 = " + str(xR1)) print(" yR1 = " + str(yR1)) print(" xR2 = " + str(xR2)) print(" yR2 = " + str(yR2)) # Calcul des delta X et Y entre les points extrémités du segment route dxR = xR1 - xR2 dyR = yR1 - yR2 if debug >= 4: print(" dxR = " + str(dxR)) print(" dyR = " + str(dyR)) print("\n") # Suppression des cas où le script créé des perpendiculaires tous les cm ! Bug lié à la segmentation des routes dist_R1_R2 = math.sqrt((abs(dxR)**2) + (abs(dyR)**2)) if dist_R1_R2 >= (seg_dist / 2): # Calcul de l'angle (= gisement) entre le Nord et le segment route if dxR == 0 or dyR == 0: if dxR == 0 and dyR > 0: aR = 0 elif dxR > 0 and dyR == 0: aR = 90 elif dxR == 0 and dyR < 0: aR = 180 elif dxR < 0 and dyR == 0: aR = 270 else: aR = math.degrees(math.atan(dxR / dyR)) if aR < 0: aR = aR + 360 if debug >= 4: print(" aR = " + str(aR)) # Calcul des angles (= gisements) entre le Nord et les 2 segments perpendiculaires au segment route aP1 = aR + 90 if aP1 < 0: aP1 = aP1 + 360 if aP1 >= 360: aP1 = aP1 - 360 aP2 = aR - 90 if aP2 < 0: aP2 = aP2 + 360 if aP2 >= 360: aP2 = aP2 - 360 if debug >= 4: print(" aP1 = " + str(aP1)) print(" aP2 = " + str(aP2)) # Calculs des coordonnées des nouveaux points à l'extrémité de chaque segment perpendiculaire pour le segment route sélectionné xP1 = xR1 + (seg_length * math.sin(math.radians(aP1))) yP1 = yR1 + (seg_length * math.cos(math.radians(aP1))) xP2 = xR1 + (seg_length * math.sin(math.radians(aP2))) yP2 = yR1 + (seg_length * math.cos(math.radians(aP2))) if debug >= 4: print(" xP1 = " + str(xP1)) print(" yP1 = " + str(yP1)) print(" xP2 = " + str(xP2)) print(" yP2 = " + str(yP2)) print("\n") # Construction de la requête de création des 2 segments perpendiculaires pour le segment route sélectionné query_seg_perp += " ('%s', '%s_perp1', %s, %s, %s, %s, 'LINESTRING(%s %s, %s %s)'),\n" % ( str(id_seg), str(id_seg), xR1, yR1, xP1, yP1, xR1, yR1, xP1, yP1) query_seg_perp += " ('%s', '%s_perp2', %s, %s, %s, %s, 'LINESTRING(%s %s, %s %s)'),\n" % ( str(id_seg), str(id_seg), xR1, yR1, xP2, yP2, xR1, yR1, xP2, yP2) treat_seg += 1 # Fin de la construction de la requête de création des segments perpendiculaires et exécution de cette requête query_seg_perp = query_seg_perp[:-2] + ";\n" # Transformer la virgule de la dernière ligne SQL en point-virgule (pour terminer la requête) query_seg_perp += "ALTER TABLE ara_seg_perp ALTER COLUMN geom TYPE geometry(LINESTRING,%s) USING ST_SetSRID(geom,%s);\n" % ( epsg, epsg) # Mise à jour du système de coordonnées query_seg_perp += "CREATE INDEX IF NOT EXISTS seg_perp_geom_gist ON ara_seg_perp USING GIST (geom);\n" if debug >= 2: print(query_seg_perp) executeQuery(connection, query_seg_perp) ################################################### ### Intersect segments perpendiculaires et bâti ### ################################################### print( bold + cyan + "Intersect entre les segments perpendiculaires et les bâtiments :" + endC) timeLine( path_time_log, " Intersect entre les segments perpendiculaires et les bâtiments : " ) # Requête d'intersect entre les segments perpendiculaires et les bâtiments query_intersect = """ DROP TABLE IF EXISTS ara_intersect_bati; CREATE TABLE ara_intersect_bati AS SELECT r.id_seg as id_seg, r.id_perp as id_perp, r.xR as xR, r.yR as yR, b.HAUTEUR as haut_bati, ST_Intersection(r.geom, b.geom) as geom FROM ara_seg_perp as r, %s as b WHERE ST_Intersects(r.geom, b.geom); ALTER TABLE ara_intersect_bati ADD COLUMN id_intersect serial; CREATE INDEX IF NOT EXISTS intersect_bati_geom_gist ON ara_intersect_bati USING GIST (geom); """ % table_name_bati if debug >= 2: print(query_intersect) executeQuery(connection, query_intersect) ################################################################################################################## ### Récupération des demi-largeurs de rue et de la hauteur du 1er bâtiment pour chaque segment perpendiculaire ### ################################################################################################################## print( bold + cyan + "Récupération des informations nécessaires au calcul du rapport d'aspect :" + endC) timeLine( path_time_log, " Récupération des informations nécessaires au calcul du rapport d'aspect : " ) # Début de la construction de la requête de création des points route, avec infos de demi-largeurs de rue et hauteurs des bâtiments query_pt_route = "DROP TABLE IF EXISTS ara_asp_ratio_by_seg;\n" query_pt_route += "CREATE TABLE ara_asp_ratio_by_seg (id_seg text, xR float, yR float, width1 float, height1 float, width2 float, height2 float, geom geometry);\n" query_pt_route += "INSERT INTO ara_asp_ratio_by_seg VALUES\n" # Récupération de la liste des identifiants segments routes (uniquement ceux qui intersectent du bâti) cursor.execute( "SELECT id_seg FROM ara_intersect_bati GROUP BY id_seg ORDER BY id_seg;" ) id_seg_list = cursor.fetchall() # Boucle sur les segments routes nb_seg = len(id_seg_list) treat_seg = 1 for id_seg in id_seg_list: if debug >= 4: print(bold + " Traitement du segment route : " + endC + str(treat_seg) + "/" + str(nb_seg)) id_seg = id_seg[0] # Table temporaire ne contenant que les intersects d'un segment route donné query_temp2_seg = "DROP TABLE IF EXISTS ara_temp2_seg;\n" query_temp2_seg += "CREATE TABLE ara_temp2_seg AS SELECT id_seg, id_perp, xR, yR, haut_bati, id_intersect, geom FROM ara_intersect_bati WHERE id_seg = '%s';\n" % ( id_seg) if debug >= 4: print(query_temp2_seg) executeQuery(connection, query_temp2_seg) # Récupération des coordonnées X et Y du point route du segment route associé cursor.execute("SELECT xR FROM ara_temp2_seg;") xR = cursor.fetchone() cursor.execute("SELECT yR FROM ara_temp2_seg;") yR = cursor.fetchone() # Transformation des coordonnées X et Y du point route du segment route associé en valeurs numériques xR = float(str(xR)[1:-2]) yR = float(str(yR)[1:-2]) if debug >= 4: print(" xR = " + str(xR)) print(" yR = " + str(yR)) print("\n") # Initialisation des variables demi-largeurs de rue et hauteur du 1er bâtiment intersecté w1 = 0 h1 = 0 w2 = 0 h2 = 0 # Récupération de la liste des identifiants segments perpendiculaires de la table temp2_seg cursor.execute( "SELECT id_perp FROM ara_temp2_seg GROUP BY id_perp ORDER BY id_perp;" ) id_perp_list = cursor.fetchall() # Boucle sur les perpendiculaires (max 2) d'un segment route donné for id_perp in id_perp_list: # Récupération du numéro de perpendiculaire (1 ou 2 ~ droite ou gauche) num_seg = float(str(id_perp)[-4:-3]) # Initialisation de listes contenant les demi-largeurs de rue et les hauteurs des bâtiments intersectés length_list = [] haut_bati_list = [] # Table temporaire ne contenant que les intersects d'un segment perpendiculaire donné query_temp2_perp = "DROP TABLE IF EXISTS ara_temp2_perp;\n" query_temp2_perp += "CREATE TABLE ara_temp2_perp AS SELECT id_seg, id_perp, xR, yR, haut_bati, id_intersect, geom FROM ara_temp2_seg WHERE id_perp = '%s';\n" % ( id_perp) if debug >= 4: print(query_temp2_perp) executeQuery(connection, query_temp2_perp) # Récupération de la liste des identifiants segments intersects de la table temp2_perp cursor.execute( "SELECT id_intersect FROM ara_temp2_perp GROUP BY id_intersect ORDER BY id_intersect;" ) id_intersect_list = cursor.fetchall() # Boucle sur les intersects d'un segment perpendiculaire donné, d'un segment route donné for id_intersect in id_intersect_list: # Récupération des coordonnées X et Y des points extrémités de chaque segment intersect query_xI1 = "SELECT ST_X(geom) as X FROM (SELECT ST_AsText(ST_PointN(geom,1)) as geom FROM ara_temp2_perp WHERE id_intersect = '%s') as toto;" % ( id_intersect) cursor.execute(query_xI1) xI1 = cursor.fetchone() query_yI1 = "SELECT ST_Y(geom) as Y FROM (SELECT ST_AsText(ST_PointN(geom,1)) as geom FROM ara_temp2_perp WHERE id_intersect = '%s') as toto;" % ( id_intersect) cursor.execute(query_yI1) yI1 = cursor.fetchone() query_xI2 = "SELECT ST_X(geom) as X FROM (SELECT ST_AsText(ST_PointN(geom,2)) as geom FROM ara_temp2_perp WHERE id_intersect = '%s') as toto;" % ( id_intersect) cursor.execute(query_xI2) xI2 = cursor.fetchone() query_yI2 = "SELECT ST_Y(geom) as Y FROM (SELECT ST_AsText(ST_PointN(geom,2)) as geom FROM ara_temp2_perp WHERE id_intersect = '%s') as toto;" % ( id_intersect) cursor.execute(query_yI2) yI2 = cursor.fetchone() # Transformation des coordonnées X et Y des points extrémités de chaque segment intersect en valeurs numériques try: xI1 = float(str(xI1)[1:-2]) yI1 = float(str(yI1)[1:-2]) xI2 = float(str(xI2)[1:-2]) yI2 = float(str(yI2)[1:-2]) except ValueError: # Python renvoie une valeur Null pour les bâtiments en U (= intersectés à plus de 2 points) xI1 = yI1 = xI2 = yI2 = 0 if debug >= 4: print(" xI1 = " + str(xI1)) print(" yI1 = " + str(yI1)) print(" xI2 = " + str(xI2)) print(" yI2 = " + str(yI2)) # Calcul des distances entre la route et chaque point du segment intersect length_intersect1 = math.sqrt((abs(xR - xI1)**2) + (abs(yR - yI1)**2)) length_intersect2 = math.sqrt((abs(xR - xI2)**2) + (abs(yR - yI2)**2)) if debug >= 4: print(" length_intersect1 = " + str(length_intersect1)) print(" length_intersect2 = " + str(length_intersect2)) # Récupération de la valeur de distance entre la route et le point d'intersect le plus proche (+ ajout dans la liste length_list) length = min(length_intersect1, length_intersect2) length_list.append(length) if debug >= 4: print(" length = " + str(length)) # Récupération de la hauteur du bâtiment correspondant à l'intersect (+ ajout dans la liste haut_bati_list) query_haut_bati = "SELECT haut_bati FROM ara_temp2_perp WHERE id_intersect = '%s';" % ( id_intersect) cursor.execute(query_haut_bati) haut_bati = cursor.fetchone() haut_bati = float(str(haut_bati)[10:-4]) haut_bati_list.append(haut_bati) if debug >= 4: print(" haut_bati = " + str(haut_bati)) print("\n") # Pour un segment perpendiculaire donné, récupération de la distance minimale entre la route et les intersect avec le bâti width = min(length_list) if debug >= 4: print(" width = " + str(width)) # Pour un segment perpendiculaire donné, récupération de la hauteur du bâtiment correspondant au segment intersect le plus proche de la route height_position = length_list.index(width) height = haut_bati_list[height_position] if debug >= 4: print(" height = " + str(height)) print("\n") # MAJ des variables demi-largeurs de rue et hauteur du 1er bâtiment intersecté suivant le côté de la perpendiculaire par rapport à la route if num_seg == 1: w1 = width h1 = height elif num_seg == 2: w2 = width h2 = height # Construction de la requête de création du point route pour le segment route donné query_pt_route += " ('%s', %s, %s, %s, %s, %s, %s, 'POINT(%s %s)'),\n" % ( str(id_seg), xR, yR, w1, h1, w2, h2, xR, yR) treat_seg += 1 # Fin de la construction de la requête de création des points route, avec infos de demi-largeurs de rue et hauteurs des bâtiments query_pt_route = query_pt_route[:-2] + ";\n" # Transformer la virgule de la dernière ligne SQL en point-virgule (pour terminer la requête) query_pt_route += "ALTER TABLE ara_asp_ratio_by_seg ALTER COLUMN geom TYPE geometry(POINT,%s) USING ST_SetSRID(geom,%s);\n" % ( epsg, epsg) # Mise à jour du système de coordonnées query_pt_route += "CREATE INDEX IF NOT EXISTS asp_ratio_by_seg_geom_gist ON ara_asp_ratio_by_seg USING GIST (geom);\n" if debug >= 2: print(query_pt_route) executeQuery(connection, query_pt_route) ########################################################### ### Calcul de l'indicateur et export de la table finale ### ########################################################### print(bold + cyan + "Calculs finaux de l'indicateur de rapport d'aspect :" + endC) timeLine(path_time_log, " Calculs finaux de l'indicateur de rapport d'aspect : ") # Requête de calcul d'un rapport d'aspect par segment route query_asp_ratio_by_seg = """ ALTER TABLE ara_asp_ratio_by_seg ADD COLUMN asp_ratio float; UPDATE ara_asp_ratio_by_seg SET asp_ratio = 0; UPDATE ara_asp_ratio_by_seg SET asp_ratio = ((height1 + height2) / 2) / (width1 + width2) WHERE height1 <> 0 AND height2 <> 0 AND width1 <> 0 AND width2 <> 0; """ if debug >= 2: print(query_asp_ratio_by_seg) executeQuery(connection, query_asp_ratio_by_seg) # Requête de bufferisation du fichier maillage query_buffer = """ DROP TABLE IF EXISTS ara_buffer; CREATE TABLE ara_buffer AS SELECT ID as ID, ST_Buffer(geom, %s) as geom FROM %s; CREATE INDEX IF NOT EXISTS buffer_geom_gist ON ara_buffer USING GIST (geom); """ % (buffer_size, table_name_maille) if debug >= 2: print(query_buffer) executeQuery(connection, query_buffer) # Requête de calcul d'un rapport d'aspect par maille (via un intersect sur le maillage bufferisé) query_asp_ratio_temp1 = """ DROP TABLE IF EXISTS ara_asp_ratio_temp1; CREATE TABLE ara_asp_ratio_temp1 AS SELECT m.ID as ID, avg(r.asp_ratio) as asp_ratio, m.geom as geom FROM %s as m, ara_buffer as b, ara_asp_ratio_by_seg as r WHERE ST_Intersects(b.geom, r.geom) AND m.ID = b.ID AND r.asp_ratio > 0 GROUP BY m.ID, m.geom; CREATE INDEX IF NOT EXISTS asp_ratio_temp1_geom_gist ON ara_asp_ratio_temp1 USING GIST (geom); """ % (table_name_maille) if debug >= 2: print(query_asp_ratio_temp1) executeQuery(connection, query_asp_ratio_temp1) # Rapport d'aspect pour les mailles n'intersectant pas de points-route avec un rapport d'aspect query_asp_ratio_temp2 = """ DROP TABLE IF EXISTS ara_asp_ratio_temp2; CREATE TABLE ara_asp_ratio_temp2 AS SELECT DISTINCT ID as ID, geom as geom FROM %s WHERE ID NOT IN (SELECT DISTINCT ID FROM ara_asp_ratio_temp1); ALTER TABLE ara_asp_ratio_temp2 ADD COLUMN asp_ratio float; UPDATE ara_asp_ratio_temp2 SET asp_ratio = 0; CREATE INDEX IF NOT EXISTS asp_ratio_temp2_geom_gist ON ara_asp_ratio_temp2 USING GIST (geom); """ % table_name_maille if debug >= 1: print(query_asp_ratio_temp2) executeQuery(connection, query_asp_ratio_temp2) # Fusion des 2 tables précédentes pour retrouver l'ensemble des mailles de départ query_asp_ratio = """ DROP TABLE IF EXISTS ara_asp_ratio; CREATE TABLE ara_asp_ratio AS SELECT ID, asp_ratio, geom FROM ara_asp_ratio_temp1 UNION SELECT ID, asp_ratio, geom FROM ara_asp_ratio_temp2; ALTER TABLE ara_asp_ratio ALTER COLUMN ID TYPE INTEGER; """ if debug >= 2: print(query_asp_ratio) executeQuery(connection, query_asp_ratio) closeConnection(connection) exportVectorByOgr2ogr(database_postgis, grid_output, 'ara_asp_ratio', user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=str(port_number), schema_name=schema_postgis, format_type=format_vector) ########################################## ### Nettoyage des fichiers temporaires ### ########################################## if not save_results_intermediate: if os.path.exists(temp_path): shutil.rmtree(temp_path) # ~ dropDatabase(database_postgis) # Conflits avec autres indicateurs (Height of Roughness Elements / Terrain Roughness Class) else: print(bold + magenta + "Le calcul de Aspect Ratio a déjà eu lieu." + endC) print(bold + yellow + "Fin du calcul de l'indicateur Aspect Ratio." + endC + "\n") timeLine(path_time_log, "Fin du calcul de l'indicateur Aspect Ratio : ") return
def soilOccupationChange(input_plot_vector, output_plot_vector, footprint_vector, input_tx_files_list, evolutions_list=['0:1:11000:10:50:and', '0:1:12000:10:50:and', '0:1:21000:10:50:and', '0:1:22000:10:50:and', '0:1:23000:10:50:and'], class_label_dico={11000:'Bati', 12000:'Route', 21000:'SolNu', 22000:'Eau', 23000:'Vegetation'}, epsg=2154, no_data_value=0, format_raster='GTiff', format_vector='ESRI Shapefile', extension_raster='.tif', extension_vector='.shp', postgis_ip_host='localhost', postgis_num_port=5432, postgis_user_name='postgres', postgis_password='******', postgis_database_name='database', postgis_schema_name='public', postgis_encoding='latin1', path_time_log='', save_results_intermediate=False, overwrite=True): if debug >= 3: print('\n' + bold + green + "Evolution de l'OCS par parcelle - Variables dans la fonction :" + endC) print(cyan + " soilOccupationChange() : " + endC + "input_plot_vector : " + str(input_plot_vector) + endC) print(cyan + " soilOccupationChange() : " + endC + "output_plot_vector : " + str(output_plot_vector) + endC) print(cyan + " soilOccupationChange() : " + endC + "footprint_vector : " + str(footprint_vector) + endC) print(cyan + " soilOccupationChange() : " + endC + "input_tx_files_list : " + str(input_tx_files_list) + endC) print(cyan + " soilOccupationChange() : " + endC + "evolutions_list : " + str(evolutions_list) + endC) print(cyan + " soilOccupationChange() : " + endC + "class_label_dico : " + str(class_label_dico) + endC) print(cyan + " soilOccupationChange() : " + endC + "epsg : " + str(epsg) + endC) print(cyan + " soilOccupationChange() : " + endC + "no_data_value : " + str(no_data_value) + endC) print(cyan + " soilOccupationChange() : " + endC + "format_raster : " + str(format_raster) + endC) print(cyan + " soilOccupationChange() : " + endC + "format_vector : " + str(format_vector) + endC) print(cyan + " soilOccupationChange() : " + endC + "extension_raster : " + str(extension_raster) + endC) print(cyan + " soilOccupationChange() : " + endC + "extension_vector : " + str(extension_vector) + endC) print(cyan + " soilOccupationChange() : " + endC + "postgis_ip_host : " + str(postgis_ip_host) + endC) print(cyan + " soilOccupationChange() : " + endC + "postgis_num_port : " + str(postgis_num_port) + endC) print(cyan + " soilOccupationChange() : " + endC + "postgis_user_name : " + str(postgis_user_name) + endC) print(cyan + " soilOccupationChange() : " + endC + "postgis_password : "******" soilOccupationChange() : " + endC + "postgis_database_name : " + str(postgis_database_name) + endC) print(cyan + " soilOccupationChange() : " + endC + "postgis_schema_name : " + str(postgis_schema_name) + endC) print(cyan + " soilOccupationChange() : " + endC + "postgis_encoding : " + str(postgis_encoding) + endC) print(cyan + " soilOccupationChange() : " + endC + "path_time_log : " + str(path_time_log) + endC) print(cyan + " soilOccupationChange() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC) print(cyan + " soilOccupationChange() : " + endC + "overwrite : " + str(overwrite) + endC + '\n') # Définition des constantes EXTENSION_TEXT = '.txt' SUFFIX_TEMP = '_temp' SUFFIX_CUT = '_cut' AREA_FIELD = 'st_area' GEOM_FIELD = 'geom' # Mise à jour du log starting_event = "soilOccupationChange() : Début du traitement : " timeLine(path_time_log, starting_event) print(cyan + "soilOccupationChange() : " + bold + green + "DEBUT DES TRAITEMENTS" + endC + '\n') # Définition des variables 'basename' output_plot_basename = os.path.splitext(os.path.basename(output_plot_vector))[0] # Définition des variables temp temp_directory = os.path.dirname(output_plot_vector) + os.sep + output_plot_basename + SUFFIX_TEMP plot_vector_cut = temp_directory + os.sep + output_plot_basename + SUFFIX_CUT + extension_vector # Définition des variables PostGIS plot_table = output_plot_basename.lower() # Fichier .txt associé au fichier vecteur de sortie, sur la liste des évolutions quantifiées output_evolution_text_file = os.path.splitext(output_plot_vector)[0] + EXTENSION_TEXT # Nettoyage des traitements précédents if debug >= 3: print(cyan + "soilOccupationChange() : " + endC + "Nettoyage des traitements précédents." + endC + '\n') removeVectorFile(output_plot_vector, format_vector=format_vector) removeFile(output_evolution_text_file) cleanTempData(temp_directory) dropDatabase(postgis_database_name, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name) ############# # Etape 0/2 # Préparation des traitements ############# print(cyan + "soilOccupationChange() : " + bold + green + "ETAPE 0/2 - Début de la préparation des traitements." + endC + '\n') # Découpage du parcellaire à la zone d'étude cutVector(footprint_vector, input_plot_vector, plot_vector_cut, overwrite=overwrite, format_vector=format_vector) # Récupération du nom des champs dans le fichier source (pour isoler les champs nouvellement créés par la suite, et les renommer) attr_names_list_origin = getAttributeNameList(plot_vector_cut, format_vector=format_vector) new_attr_names_list_origin = attr_names_list_origin # Préparation de PostGIS createDatabase(postgis_database_name, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name) print(cyan + "soilOccupationChange() : " + bold + green + "ETAPE 0/2 - Fin de la préparation des traitements." + endC + '\n') ############# # Etape 1/2 # Calculs des statistiques à tx ############# print(cyan + "soilOccupationChange() : " + bold + green + "ETAPE 1/2 - Début des calculs des statistiques à tx." + endC + '\n') len_tx = len(input_tx_files_list) tx = 0 # Boucle sur les fichiers d'entrés à t0+x for input_tx_file in input_tx_files_list: if debug >= 3: print(cyan + "soilOccupationChange() : " + endC + bold + "Calcul des statistiques à tx %s/%s." % (tx+1, len_tx) + endC + '\n') # Statistiques OCS par parcelle statisticsVectorRaster(input_tx_file, plot_vector_cut, "", 1, True, False, False, [], [], class_label_dico, path_time_log, clean_small_polygons=True, format_vector=format_vector, save_results_intermediate=save_results_intermediate, overwrite=overwrite) # Récupération du nom des champs dans le fichier parcellaire (avec les champs créés précédemment dans CVR) attr_names_list_tx = getAttributeNameList(plot_vector_cut, format_vector=format_vector) # Isolement des nouveaux champs issus du CVR fields_name_list = [] for attr_name in attr_names_list_tx: if attr_name not in new_attr_names_list_origin: fields_name_list.append(attr_name) # Gestion des nouveaux noms des champs issus du CVR new_fields_name_list = [] for field_name in fields_name_list: new_field_name = 't%s_' % tx + field_name new_field_name = new_field_name[:10] new_fields_name_list.append(new_field_name) new_attr_names_list_origin.append(new_field_name) # Renommage des champs issus du CVR, pour le relancer par la suite sur d'autres dates renameFieldsVector(plot_vector_cut, fields_name_list, new_fields_name_list, format_vector=format_vector) tx += 1 print(cyan + "soilOccupationChange() : " + bold + green + "ETAPE 1/2 - Fin des calculs des statistiques à tx." + endC + '\n') ############# # Etape 2/2 # Caractérisation des changements ############# print(cyan + "soilOccupationChange() : " + bold + green + "ETAPE 2/2 - Début de la caractérisation des changements." + endC + '\n') # Pré-traitements dans PostGIS plot_table = importVectorByOgr2ogr(postgis_database_name, plot_vector_cut, plot_table, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name, epsg=epsg, codage=postgis_encoding) connection = openConnection(postgis_database_name, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name) # Requête SQL pour le calcul de la surface des parcelles sql_query = "ALTER TABLE %s ADD COLUMN %s REAL;\n" % (plot_table, AREA_FIELD) sql_query += "UPDATE %s SET %s = ST_Area(%s);\n" % (plot_table, AREA_FIELD, GEOM_FIELD) # Boucle sur les évolutions à quantifier temp_field = 1 for evolution in evolutions_list: evolution_split = evolution.split(':') idx_bef = int(evolution_split[0]) idx_aft = int(evolution_split[1]) label = int(evolution_split[2]) evol = abs(int(evolution_split[3])) evol_s = abs(int(evolution_split[4])) combi = evolution_split[5] class_name = class_label_dico[label] def_evo_field = "def_evo_%s" % str(temp_field) if debug >= 3: print(cyan + "soilOccupationChange() : " + endC + bold + "Caractérisation des changements t%s/t%s pour la classe '%s' (%s)." % (idx_bef, idx_aft, class_name, label) + endC + '\n') if evol != 0 or evol_s != 0: # Gestion de l'évolution via le taux evol_str = str(evol) + ' %' evo_field = "evo_%s" % str(temp_field) t0_field = 't%s_' % idx_bef + class_name.lower()[:7] t1_field = 't%s_' % idx_aft + class_name.lower()[:7] # Gestion de l'évolution via la surface evol_s_str = str(evol_s) + ' m²' evo_s_field = "evo_s_%s" % str(temp_field) t0_s_field = 't%s_s_' % idx_bef + class_name.lower()[:5] t1_s_field = 't%s_s_' % idx_aft + class_name.lower()[:5] # Requête SQL pour le calcul brut de l'évolution sql_query += "ALTER TABLE %s ADD COLUMN %s REAL;\n" % (plot_table, evo_field) sql_query += "UPDATE %s SET %s = %s - %s;\n" % (plot_table, evo_field, t1_field, t0_field) sql_query += "ALTER TABLE %s ADD COLUMN %s REAL;\n" % (plot_table, evo_s_field) sql_query += "UPDATE %s SET %s = %s - %s;\n" % (plot_table, evo_s_field, t1_s_field, t0_s_field) sql_query += "ALTER TABLE %s ADD COLUMN %s VARCHAR;\n" % (plot_table, def_evo_field) sql_query += "UPDATE %s SET %s = 't%s a t%s - %s - aucune evolution';\n" % (plot_table, def_evo_field, idx_bef, idx_aft, class_name) # Si évolution à la fois via taux et via surface if evol != 0 and evol_s != 0: text_evol = "taux à %s" % evol_str if combi == 'and': text_evol += " ET " elif combi == 'or': text_evol += " OU " text_evol += "surface à %s" % evol_s_str sql_where_pos = "%s >= %s %s %s >= %s" % (evo_field, evol, combi, evo_s_field, evol_s) sql_where_neg = "%s <= -%s %s %s <= -%s" % (evo_field, evol, combi, evo_s_field, evol_s) # Si évolution uniquement via taux elif evol != 0: text_evol = "taux à %s" % evol_str sql_where_pos = "%s >= %s" % (evo_field, evol) sql_where_neg = "%s <= -%s" % (evo_field, evol) # Si évolution uniquement via surface elif evol_s != 0: text_evol = "surface à %s" % evol_s_str sql_where_pos = "%s >= %s" % (evo_s_field, evol_s) sql_where_neg = "%s <= -%s" % (evo_s_field, evol_s) sql_query += "UPDATE %s SET %s = 't%s a t%s - %s - evolution positive' WHERE %s;\n" % (plot_table, def_evo_field, idx_bef, idx_aft, class_name, sql_where_pos) sql_query += "UPDATE %s SET %s = 't%s a t%s - %s - evolution negative' WHERE %s;\n" % (plot_table, def_evo_field, idx_bef, idx_aft, class_name, sql_where_neg) # Ajout des paramètres de l'évolution quantifiée (temporalités, classe, taux/surface) au fichier texte de sortie text = "%s --> évolution entre t%s et t%s, pour la classe '%s' (label %s) :\n" % (def_evo_field, idx_bef, idx_aft, class_name, label) text += " %s --> taux d'évolution brut" % evo_field + " (%)\n" text += " %s --> surface d'évolution brute" % evo_s_field + " (m²)\n" text += "Evolution quantifiée : %s\n" % text_evol appendTextFileCR(output_evolution_text_file, text) temp_field += 1 # Traitements SQL de l'évolution des classes OCS executeQuery(connection, sql_query) closeConnection(connection) exportVectorByOgr2ogr(postgis_database_name, output_plot_vector, plot_table, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name, format_type=format_vector) print(cyan + "soilOccupationChange() : " + bold + green + "ETAPE 2/2 - Fin de la caractérisation des changements." + endC + '\n') # Suppression des fichiers temporaires if not save_results_intermediate: if debug >= 3: print(cyan + "soilOccupationChange() : " + endC + "Suppression des fichiers temporaires." + endC + '\n') deleteDir(temp_directory) dropDatabase(postgis_database_name, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name) print(cyan + "soilOccupationChange() : " + bold + green + "FIN DES TRAITEMENTS" + endC + '\n') # Mise à jour du log ending_event = "soilOccupationChange() : Fin du traitement : " timeLine(path_time_log, ending_event) return
def verificationCorrection(vector_ref, vectors_input_list, vectors_output_list, epsg, project_encoding, server_postgis, port_number, user_postgis, password_postgis, database_postgis, schema_postgis, path_time_log, save_results_intermediate=False, overwrite=True): # Mise à jour du Log starting_event = "verificationCorrection() : Verifie and correct vector starting : " timeLine(path_time_log, starting_event) print(endC) print(bold + green + "## START : VERIFIE CORRECT VECTORS" + endC) print(endC) if debug >= 2: print(bold + green + "verificationCorrection() : Variables dans la fonction" + endC) print(cyan + "verificationCorrection() : " + endC + "vector_ref : " + str(vector_ref) + endC) print(cyan + "verificationCorrection() : " + endC + "vectors_input_list : " + str(vectors_input_list) + endC) print(cyan + "verificationCorrection() : " + endC + "vectors_output_list : " + str(vectors_output_list) + endC) print(cyan + "verificationCorrection() : " + endC + "epsg : " + str(epsg) + endC) print(cyan + "verificationCorrection() : " + endC + "project_encoding : " + str(project_encoding) + endC) print(cyan + "verificationCorrection() : " + endC + "server_postgis : " + str(server_postgis) + endC) print(cyan + "verificationCorrection() : " + endC + "port_number : " + str(port_number) + endC) print(cyan + "verificationCorrection() : " + endC + "user_postgis : " + str(user_postgis) + endC) print(cyan + "verificationCorrection() : " + endC + "password_postgis : " + str(password_postgis) + endC) print(cyan + "verificationCorrection() : " + endC + "database_postgis : " + str(database_postgis) + endC) print(cyan + "verificationCorrection() : " + endC + "schema_postgis : " + str(schema_postgis) + endC) print(cyan + "verificationCorrection() : " + endC + "path_time_log : " + str(path_time_log) + endC) print(cyan + "verificationCorrection() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC) print(cyan + "verificationCorrection() : " + endC + "overwrite : " + str(overwrite) + endC) # Constante suffix des tables de contrôle en SQL SUFFIX_SUM = '_sum' SUFFIX_100 = '_100' SUFFIX_SURF = '_surf' COLUMN_SUM = "sum" schema_postgis = schema_postgis.lower() table_reference_name = os.path.splitext( os.path.basename(vector_ref))[0].lower() table_reference_complete_name = schema_postgis + '.' + table_reference_name # Préparation de la base de données dropDatabase( database_postgis, user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=str(port_number) ) # Suppression de la base de données (si elle existe d'un traitement précédent) createDatabase(database_postgis, user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=str(port_number)) # Création de la base de données connection = openConnection(database_postgis, user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=str(port_number), schema='') # Connexion à la base de données createSchema(connection, schema_postgis) # Création du schéma, s'il n'existe pas closeConnection( connection ) # Déconnexion de la base de données (pour éviter les conflits avec les outils d'import de shape) # Monter en base du fichier vecteur de référence table_reference_complete_name = importVectorByOgr2ogr( database_postgis, vector_ref, table_reference_complete_name, user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=str(port_number), schema=schema_postgis, epsg=str(epsg), codage=project_encoding) # Pour tous les fichiers à traiter for idx_vector in range(len(vectors_input_list)): vector_input = vectors_input_list[idx_vector] vector_output = vectors_output_list[idx_vector] table_input_name = schema_postgis + '.' + os.path.splitext( os.path.basename(vector_input))[0].lower() table_output_name = schema_postgis + '.' + os.path.splitext( os.path.basename(vector_output))[0].lower() # Test si le vecteur de sortie existe déjà et si il doit être écrasés check = os.path.isfile(vector_output) if check and not overwrite: # Si le fichier existe déjà et que overwrite n'est pas activé print(bold + yellow + "File vector output : " + vector_output + " already exists and will not be created again." + endC) else: if check: try: removeVectorFile(vector_output) except Exception: pass # si le fichier n'existe pas, il ne peut pas être supprimé : cette étape est ignorée # Monter en base du fichier vecteur d'entrée table_input_name = importVectorByOgr2ogr(database_postgis, vector_input, table_input_name, user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=str(port_number), schema=schema_postgis, epsg=str(epsg), codage=project_encoding) # Traitements SQL ################# connection = openConnection(database_postgis, user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=str(port_number), schema=schema_postgis) # Création de la nouvelle table corrigée query_create_table = """ CREATE TABLE %s AS SELECT CAST(id AS INTEGER) AS id, CAST(label AS INTEGER) AS label, CAST(datemaj AS TEXT) AS datemaj, CAST(srcmaj AS TEXT) AS srcmaj, CAST(round(img_sat) AS INTEGER) AS img_sat, CAST(round(bd_carto) AS INTEGER) AS bd_carto, CAST(round(bd_topo) AS INTEGER) AS bd_topo, CAST(round(bd_foret) AS INTEGER) AS bd_foret, CAST(round(clc) AS INTEGER) AS clc, CAST(round(cvi) AS INTEGER) AS cvi, CAST(round(rpg) AS INTEGER) AS rpg, CAST(round(vrgprunus) AS INTEGER) AS vrgprunus, CAST(round(oss_ign) AS INTEGER) AS oss_ign, geom FROM %s; """ % (table_output_name, table_input_name) executeQuery(connection, query_create_table) # Mise à jour des colonne de la nouvelle table query_update_colum = """ UPDATE %s SET img_sat = 100 - (bd_carto + bd_topo + bd_foret + clc + cvi + rpg + vrgprunus + oss_ign); UPDATE %s SET bd_carto = bd_carto-1, img_sat = 0 WHERE (img_sat=-1) AND (bd_carto = GREATEST(bd_carto, bd_topo, bd_foret, clc, cvi, rpg, vrgprunus, oss_ign)); UPDATE %s SET bd_topo = bd_topo-1, img_sat = 0 WHERE (img_sat=-1) AND (bd_topo = GREATEST(bd_carto, bd_topo, bd_foret, clc, cvi, rpg, vrgprunus, oss_ign)); UPDATE %s SET bd_foret = bd_foret-1, img_sat = 0 WHERE (img_sat=-1) AND (bd_foret = GREATEST(bd_carto, bd_topo, bd_foret, clc, cvi, rpg, vrgprunus, oss_ign)); UPDATE %s SET clc = clc-1, img_sat = 0 WHERE (img_sat=-1) AND (clc = GREATEST(bd_carto, bd_topo, bd_foret, clc, cvi, rpg, vrgprunus, oss_ign)); UPDATE %s SET cvi = cvi-1, img_sat = 0 WHERE (img_sat=-1) AND (cvi = GREATEST(bd_carto, bd_topo, bd_foret, clc, cvi, rpg, vrgprunus, oss_ign)); UPDATE %s SET rpg = rpg-1, img_sat = 0 WHERE (img_sat=-1) AND (rpg = GREATEST(bd_carto, bd_topo, bd_foret, clc, cvi, rpg, vrgprunus, oss_ign)); UPDATE %s SET vrgprunus = vrgprunus-1, img_sat = 0 WHERE (img_sat=-1) AND (vrgprunus = GREATEST(bd_carto, bd_topo, bd_foret, clc, cvi, rpg, vrgprunus, oss_ign)); UPDATE %s SET oss_ign = oss_ign-1, img_sat = 0 WHERE (img_sat=-1) AND (oss_ign = GREATEST(bd_carto, bd_topo, bd_foret, clc, cvi, rpg, vrgprunus, oss_ign)); """ % (table_output_name, table_output_name, table_output_name, table_output_name, table_output_name, table_output_name, table_output_name, table_output_name, table_output_name) executeQuery(connection, query_update_colum) # Vérification de la somme des colonnes pos = table_output_name.find("m2_") - 3 resolution = table_output_name[pos:pos + 3] table_name = schema_postgis + '.' + "controle_" + table_reference_name + "_" + resolution + "m_cor" table_verif_sum_name = table_name + SUFFIX_SUM table_verif_sum100_name = table_name + SUFFIX_SUM + SUFFIX_100 table_verif_surf_name = table_name + SUFFIX_SURF query_verif_sum = """ CREATE TABLE %s AS SELECT id, img_sat, bd_carto + bd_topo + bd_foret + clc + cvi + rpg + vrgprunus + oss_ign AS sum FROM %s; """ % (table_verif_sum_name, table_output_name) executeQuery(connection, query_verif_sum) query_verif_sum100 = """ CREATE TABLE %s AS SELECT id, sum FROM %s WHERE sum != 100 OR img_sat = -1; """ % (table_verif_sum100_name, table_verif_sum_name) executeQuery(connection, query_verif_sum100) data_list = getData(connection, table_verif_sum100_name, COLUMN_SUM) if len(data_list) > 0: print(cyan + "verificationCorrection() : " + bold + red + "Error column sum 100 not empty = " + str(len(data_list)) + endC, file=sys.stderr) # Vérification des aires query_verif_surf = """ CREATE TABLE %s AS SELECT 'OCS_SAT', SUM(st_area(geom)) FROM %s; INSERT INTO %s SELECT '%s', st_area(geom) FROM %s; """ % (table_verif_surf_name, table_output_name, table_verif_surf_name, table_reference_complete_name, table_reference_complete_name) executeQuery(connection, query_verif_surf) delta = 1 data_list = getData(connection, table_verif_surf_name, COLUMN_SUM) data_list0 = float(str(data_list[0])[1:-2]) for data in data_list: data = float(str(data)[1:-2]) data_ref_min = int(data_list0) - delta data_ref_max = int(data_list0) + delta if int(data) < data_ref_min or int(data) > data_ref_max: print(cyan + "verificationCorrection() : " + bold + red + "Error area comparaison, ref = " + str(data_list0) + "m², data = " + str(data) + "m²" + endC, file=sys.stderr) # Correction la géométrie (topologie) topologyCorrections(connection, table_output_name) # Récupération de la base du fichier vecteur de sortie (et déconnexion de la base de données, pour éviter les conflits d'accès) closeConnection(connection) exportVectorByOgr2ogr(database_postgis, vector_output, table_output_name, user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=str(port_number), schema=schema_postgis, format_type='ESRI Shapefile') # Suppression des fichiers intermédiaires if not save_results_intermediate: dropDatabase(database_postgis, user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=str(port_number)) print(endC) print(bold + green + "## END : VERIFIE CORRECT VECTORS" + endC) print(endC) # Mise à jour du Log ending_event = "verificationCorrection() : Verifie and correct vector ending : " timeLine(path_time_log, ending_event) return
def classificationLczSql(input_division, input_hre, input_ocs, output_lcz, id_field='id', epsg=2154, format_vector='ESRI Shapefile', postgis_ip_host='localhost', postgis_num_port=5432, postgis_user_name='postgres', postgis_password='******', postgis_database_name='lcz_db', postgis_schema_name='public', postgis_encoding='UTF-8', path_time_log='', save_results_intermediate=False, overwrite=True): if debug >= 3: print('\n' + bold + green + "Classification LCZ via SQL - Variables dans la fonction :" + endC) print(cyan + " classificationLczSql() : " + endC + "input_division : " + str(input_division) + endC) print(cyan + " classificationLczSql() : " + endC + "input_hre : " + str(input_hre) + endC) print(cyan + " classificationLczSql() : " + endC + "input_ocs : " + str(input_ocs) + endC) print(cyan + " classificationLczSql() : " + endC + "output_lcz : " + str(output_lcz) + endC) print(cyan + " classificationLczSql() : " + endC + "id_field : " + str(id_field) + endC) print(cyan + " classificationLczSql() : " + endC + "epsg : " + str(epsg) + endC) print(cyan + " classificationLczSql() : " + endC + "format_vector : " + str(format_vector) + endC) print(cyan + " classificationLczSql() : " + endC + "postgis_ip_host : " + str(postgis_ip_host) + endC) print(cyan + " classificationLczSql() : " + endC + "postgis_num_port : " + str(postgis_num_port) + endC) print(cyan + " classificationLczSql() : " + endC + "postgis_user_name : " + str(postgis_user_name) + endC) print(cyan + " classificationLczSql() : " + endC + "postgis_password : "******" classificationLczSql() : " + endC + "postgis_database_name : " + str(postgis_database_name) + endC) print(cyan + " classificationLczSql() : " + endC + "postgis_schema_name : " + str(postgis_schema_name) + endC) print(cyan + " classificationLczSql() : " + endC + "postgis_encoding : " + str(postgis_encoding) + endC) print(cyan + " classificationLczSql() : " + endC + "path_time_log : " + str(path_time_log) + endC) print(cyan + " classificationLczSql() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC) print(cyan + " classificationLczSql() : " + endC + "overwrite : " + str(overwrite) + endC + '\n') # Définition des constantes SUFFIX_TEMP = '_temp' # Mise à jour du log starting_event = "classificationLczSql() : Début du traitement : " timeLine(path_time_log, starting_event) print(cyan + "classificationLczSql() : " + bold + green + "DEBUT DES TRAITEMENTS" + endC + '\n') # Définition des variables hre_field = 'mean_h' bur_field = 'built' ror_field = 'mineral' bsr_field = 'baresoil' war_field = 'water' ver_field = 'veget' vhr_field = 'veg_h_rate' lcz_field = 'lcz' div_table = 'i_div' hre_table = 'i_hre' ocs_table = 'i_ocs' lcz_table = 'o_lcz' # Nettoyage des traitements précédents if overwrite: if debug >= 3: print(cyan + "classificationLczSql() : " + endC + "Nettoyage des traitements précédents." + endC + '\n') removeVectorFile(output_lcz) dropDatabase(postgis_database_name, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name) else: if os.path.exists(output_lcz): print(cyan + "classificationLczSql() : " + bold + yellow + "Le fichier de sortie existe déjà et ne sera pas regénéré." + endC + '\n') raise pass #################################################################### createDatabase(postgis_database_name, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name) div_table = importVectorByOgr2ogr(postgis_database_name, input_division, div_table, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name, epsg=epsg, codage=postgis_encoding) hre_table = importVectorByOgr2ogr(postgis_database_name, input_hre, hre_table, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name, epsg=epsg, codage=postgis_encoding) ocs_table = importVectorByOgr2ogr(postgis_database_name, input_ocs, ocs_table, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name, epsg=epsg, codage=postgis_encoding) connection = openConnection(postgis_database_name, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name) query = "DROP TABLE IF EXISTS %s;\n" % lcz_table query += "CREATE TABLE %s AS\n" % lcz_table query += " SELECT d.%s AS %s, h.%s AS hre, o.%s AS bur, o.%s AS ror, o.%s AS bsr, o.%s AS war, o.%s AS ver, o.%s AS vhr, d.geom AS geom\n" % (id_field, id_field, hre_field, bur_field, ror_field, bsr_field, war_field, ver_field, vhr_field) query += " FROM %s AS d, %s AS h, %s AS o\n" % (div_table, hre_table, ocs_table) query += " WHERE d.%s = h.%s AND d.%s = o.%s;\n" % (id_field, id_field, id_field, id_field) query += "ALTER TABLE %s ADD COLUMN %s VARCHAR(8);\n" % (lcz_table, lcz_field) query += "UPDATE %s SET %s = 'urban' WHERE bur > 5;\n" % (lcz_table, lcz_field) query += "UPDATE %s SET %s = 'natural' WHERE bur <= 5;\n" % (lcz_table, lcz_field) query += "UPDATE %s SET %s = 'low_ocs' WHERE %s = 'natural' AND (bur + ror + bsr + war + ver) <= 60;\n" % (lcz_table, lcz_field, lcz_field) query += "UPDATE %s SET %s = '1' WHERE %s = 'urban' AND (hre > 25) AND (bur > 40);\n" % (lcz_table, lcz_field, lcz_field) query += "UPDATE %s SET %s = '4' WHERE %s = 'urban' AND (hre > 25) AND (bur <= 40);\n" % (lcz_table, lcz_field, lcz_field) query += "UPDATE %s SET %s = '2' WHERE %s = 'urban' AND (hre > 10 AND hre <= 25) AND (bur > 40);\n" % (lcz_table, lcz_field, lcz_field) query += "UPDATE %s SET %s = '5' WHERE %s = 'urban' AND (hre > 10 AND hre <= 25) AND (bur <= 40);\n" % (lcz_table, lcz_field, lcz_field) query += "UPDATE %s SET %s = '7' WHERE %s = 'urban' AND (hre <= 10) AND (bur > 70);\n" % (lcz_table, lcz_field, lcz_field) query += "UPDATE %s SET %s = '3' WHERE %s = 'urban' AND (hre <= 10) AND (bur > 40 AND bur <= 70);\n" % (lcz_table, lcz_field, lcz_field) query += "UPDATE %s SET %s = '6' WHERE %s = 'urban' AND (hre <= 10) AND (bur > 20 AND bur <= 40) AND (ver > 10);\n" % (lcz_table, lcz_field, lcz_field) query += "UPDATE %s SET %s = '8' WHERE %s = 'urban' AND (hre <= 10) AND (bur > 20 AND bur <= 40) AND (ver <= 10);\n" % (lcz_table, lcz_field, lcz_field) query += "UPDATE %s SET %s = '9' WHERE %s = 'urban' AND (hre <= 10) AND (bur <= 20);\n" % (lcz_table, lcz_field, lcz_field) query += "UPDATE %s SET %s = 'A' WHERE %s = 'natural' AND ((ver >= ror) AND (ver >= bsr) AND (ver >= war)) AND (vhr > 50);\n" % (lcz_table, lcz_field, lcz_field) query += "UPDATE %s SET %s = 'B' WHERE %s = 'natural' AND ((ver >= ror) AND (ver >= bsr) AND (ver >= war)) AND (vhr > 25 AND vhr <= 50);\n" % (lcz_table, lcz_field, lcz_field) query += "UPDATE %s SET %s = 'C' WHERE %s = 'natural' AND ((ver >= ror) AND (ver >= bsr) AND (ver >= war)) AND (vhr > 10 AND vhr <= 25);\n" % (lcz_table, lcz_field, lcz_field) query += "UPDATE %s SET %s = 'D' WHERE %s = 'natural' AND ((ver >= ror) AND (ver >= bsr) AND (ver >= war)) AND (vhr <= 10);\n" % (lcz_table, lcz_field, lcz_field) query += "UPDATE %s SET %s = 'E' WHERE %s = 'natural' AND ((ror >= bsr) AND (ror >= war) AND (ror >= ver));\n" % (lcz_table, lcz_field, lcz_field) query += "UPDATE %s SET %s = 'F' WHERE %s = 'natural' AND ((bsr >= ror) AND (bsr >= war) AND (bsr >= ver));\n" % (lcz_table, lcz_field, lcz_field) query += "UPDATE %s SET %s = 'G' WHERE %s = 'natural' AND ((war >= ror) AND (war >= bsr) AND (war >= ver));\n" % (lcz_table, lcz_field, lcz_field) query += "ALTER TABLE %s ALTER COLUMN %s TYPE INTEGER;\n" % (lcz_table, id_field) query += "ALTER TABLE %s ALTER COLUMN hre TYPE NUMERIC(6,2);\n" % lcz_table query += "ALTER TABLE %s ALTER COLUMN bur TYPE NUMERIC(6,2);\n" % lcz_table query += "ALTER TABLE %s ALTER COLUMN ror TYPE NUMERIC(6,2);\n" % lcz_table query += "ALTER TABLE %s ALTER COLUMN bsr TYPE NUMERIC(6,2);\n" % lcz_table query += "ALTER TABLE %s ALTER COLUMN war TYPE NUMERIC(6,2);\n" % lcz_table query += "ALTER TABLE %s ALTER COLUMN ver TYPE NUMERIC(6,2);\n" % lcz_table query += "ALTER TABLE %s ALTER COLUMN vhr TYPE NUMERIC(6,2);\n" % lcz_table if debug >= 3: print('\n' + query) executeQuery(connection, query) closeConnection(connection) exportVectorByOgr2ogr(postgis_database_name, output_lcz, lcz_table, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name, format_type=format_vector) #################################################################### # Suppression des fichiers temporaires if not save_results_intermediate: if debug >= 3: print('\n' + cyan + "classificationLczSql() : " + endC + "Suppression des fichiers temporaires." + endC + '\n') dropDatabase(postgis_database_name, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name) print(cyan + "classificationLczSql() : " + bold + green + "FIN DES TRAITEMENTS" + endC + '\n') # Mise à jour du log ending_event = "classificationLczSql() : Fin du traitement : " timeLine(path_time_log, ending_event) return 0
def heightOfRoughnessElements(grid_input, grid_output, built_input, height_field, id_field, epsg, project_encoding, server_postgis, port_number, user_postgis, password_postgis, database_postgis, schema_postgis, path_time_log, format_vector='ESRI Shapefile', save_results_intermediate=False, overwrite=True): print(bold + yellow + "Début du calcul de l'indicateur Height of Roughness Elements." + endC + "\n") timeLine(path_time_log, "Début du calcul de l'indicateur Height of Roughness Elements : ") if debug >= 3: print(bold + green + "heightOfRoughnessElements() : Variables dans la fonction" + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "grid_input : " + str(grid_input) + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "grid_output : " + str(grid_output) + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "built_input : " + str(built_input) + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "height_field : " + str(height_field) + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "id_field : " + str(id_field) + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "epsg : " + str(epsg) + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "project_encoding : " + str(project_encoding) + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "server_postgis : " + str(server_postgis) + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "port_number : " + str(port_number) + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "user_postgis : " + str(user_postgis) + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "password_postgis : " + str(password_postgis) + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "database_postgis : " + str(database_postgis) + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "schema_postgis : " + str(schema_postgis) + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "path_time_log : " + str(path_time_log) + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "format_vector : " + str(format_vector) + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "overwrite : " + str(overwrite) + endC) print("\n") if not os.path.exists(grid_output) or overwrite: ############################################ ### Préparation générale des traitements ### ############################################ print(bold + cyan + "Préparation au calcul de Height of Roughness Elements :" + endC) timeLine(path_time_log, " Préparation au calcul de Height of Roughness Elements : ") if os.path.exists(grid_output): removeVectorFile(grid_output) # Création de la base de données PostGIS # ~ dropDatabase(database_postgis, user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=port_number, schema_name=schema_postgis) # Conflits avec autres indicateurs (Aspect Ratio / Terrain Roughness Class) createDatabase(database_postgis, user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=port_number, schema_name=schema_postgis) # Import des fichiers shapes maille et bati dans la base de données PostGIS table_name_maille = importVectorByOgr2ogr(database_postgis, grid_input, 'hre_maille', user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=str(port_number), schema_name=schema_postgis, epsg=str(epsg), codage=project_encoding) table_name_bati = importVectorByOgr2ogr(database_postgis, built_input, 'hre_bati', user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=str(port_number), schema_name=schema_postgis, epsg=str(epsg), codage=project_encoding) # Gestion de l'ID connection = openConnection(database_postgis, user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=port_number, schema_name=schema_postgis) attr_names_list = getAttributeNameList(built_input, format_vector=format_vector) if id_field not in attr_names_list: id_field = 'ogc_fid' # ~ id_query = "ALTER TABLE %s ADD COLUMN %s SERIAL PRIMARY KEY" % (table_name_bati, id_field) # ~ executeQuery(connection, id_query) ############################################### ### Calcul de l'indicateur par requêtes SQL ### ############################################### print(bold + cyan + "Calcul de Height of Roughness Elements :" + endC) timeLine(path_time_log, " Calcul de Height of Roughness Elements : ") # Création des index spatiaux (accélère les requêtes spatiales) query = """ --CREATE INDEX IF NOT EXISTS maille_geom_gist ON %s USING GIST (geom); --CREATE INDEX IF NOT EXISTS bati_geom_gist ON %s USING GIST (geom); """ % (table_name_maille, table_name_bati) # Intersect entre les tables maille et bâti (pour chaque maille, on récupère le bâti qui intersect) query += """ DROP TABLE IF EXISTS hre_decoup; CREATE TABLE hre_decoup AS SELECT b.%s as ID, b.%s as hauteur, ST_Intersection(b.geom, m.geom) as geom FROM %s as b, %s as m WHERE ST_Intersects(b.geom, m.geom) AND (ST_GeometryType(b.geom) = 'ST_Polygon' OR ST_GeometryType(b.geom) = 'ST_MultiPolygon') AND (ST_GeometryType(m.geom) = 'ST_Polygon' OR ST_GeometryType(m.geom) = 'ST_MultiPolygon'); CREATE INDEX IF NOT EXISTS decoup_geom_gist ON hre_decoup USING GIST (geom); """ % (id_field, height_field, table_name_bati, table_name_maille) # Table intermédiaire de calculs d'indicateurs secondaires query += """ DROP TABLE IF EXISTS hre_temp; CREATE TABLE hre_temp AS SELECT d.ID, st_area(d.geom) as surface, (st_area(d.geom) * d.hauteur) as volume, d.geom as geom FROM hre_decoup as d; CREATE INDEX IF NOT EXISTS temp_geom_gist ON hre_temp USING GIST (geom); """ # Table intermédiaire de calcul de mean_h seulement pour les mailles intersectant du bâti query += """ DROP TABLE IF EXISTS hre_maille_bis; CREATE TABLE hre_maille_bis AS SELECT m.ID as ID, ((sum(t.volume) / count(t.geom)) / (sum(t.surface) / count(t.geom))) as mean_h, m.geom as geom FROM %s as m, hre_temp as t WHERE ST_Intersects(m.geom, t.geom) AND (ST_GeometryType(m.geom) = 'ST_Polygon' OR ST_GeometryType(m.geom) = 'ST_MultiPolygon') AND (ST_GeometryType(t.geom) = 'ST_Polygon' OR ST_GeometryType(t.geom) = 'ST_MultiPolygon') GROUP BY m.ID, m.geom; CREATE INDEX IF NOT EXISTS maille_bis_geom_gist ON hre_maille_bis USING GIST (geom); """ % (table_name_maille) # Table intermédiaire seulement pour les mailles n'intersectant pas de bâti (par défaut, mean_h = 0) query += """ DROP TABLE IF EXISTS hre_maille_ter; CREATE TABLE hre_maille_ter AS SELECT DISTINCT ID as ID, geom as geom FROM %s WHERE ID NOT IN (SELECT DISTINCT ID FROM hre_maille_bis); ALTER TABLE hre_maille_ter ADD mean_h DOUBLE PRECISION; UPDATE hre_maille_ter SET mean_h = 0; CREATE INDEX IF NOT EXISTS maille_ter_geom_gist ON hre_maille_ter USING GIST (geom); """ % (table_name_maille) # Union des 2 tables précédentes pour récupérer l'ensemble des polygones maille de départ query += """ DROP TABLE IF EXISTS hre_height; CREATE TABLE hre_height AS SELECT ID, mean_h, geom FROM hre_maille_bis UNION SELECT ID, mean_h, geom FROM hre_maille_ter; ALTER TABLE hre_height ALTER COLUMN ID TYPE INTEGER; """ # Exécution de la requête SQL if debug >= 1: print(query) executeQuery(connection, query) closeConnection(connection) # Export en shape de la table contenant l'indicateur calculé exportVectorByOgr2ogr(database_postgis, grid_output, 'hre_height', user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=str(port_number), schema_name=schema_postgis, format_type=format_vector) ########################################## ### Nettoyage des fichiers temporaires ### ########################################## if not save_results_intermediate: # ~ dropDatabase(database_postgis, user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=port_number, schema_name=schema_postgis) # Conflits avec autres indicateurs (Aspect Ratio / Terrain Roughness Class) pass else: print(bold + magenta + "Le calcul de Height of Roughness Elements a déjà eu lieu." + endC) print(bold + yellow + "Fin du calcul de l'indicateur Height of Roughness Elements." + endC + "\n") timeLine(path_time_log, "Fin du calcul de l'indicateur Height of Roughness Elements : ") return
def populationVulnerability( input_division, input_footprint, input_population, input_built, output_vulnerability, id_div='id', id_pop='IdINSPIRE', id_blt='ID', stake_field='Ind', health_vuln_field_list=['Ind_0_3', 'Ind_4_5', 'Ind_65_79', 'Ind_80p'], social_vuln_field_list=['Men_pauv'], height_field='HAUTEUR', built_sql_filter="NATURE LIKE 'Indiff%renci%e' AND (USAGE1 LIKE 'Indiff%renci%e' OR USAGE1 LIKE 'R%sidentiel' OR USAGE2 LIKE 'R%sidentiel' OR USAGE2 IS NULL) AND HAUTEUR IS NOT NULL AND ST_Area(geom) >= 20", epsg=2154, format_vector='ESRI Shapefile', postgis_ip_host='localhost', postgis_num_port=5432, postgis_user_name='postgres', postgis_password='******', postgis_database_name='uhi_vuln', postgis_schema_name='public', postgis_encoding='UTF-8', path_time_log='', save_results_intermediate=False, overwrite=True): if debug >= 3: print('\n' + bold + green + "Vulnérabilité des populations - Variables dans la fonction :" + endC) print(cyan + " populationVulnerability() : " + endC + "input_division : " + str(input_division) + endC) print(cyan + " populationVulnerability() : " + endC + "input_footprint : " + str(input_footprint) + endC) print(cyan + " populationVulnerability() : " + endC + "input_population : " + str(input_population) + endC) print(cyan + " populationVulnerability() : " + endC + "input_built : " + str(input_built) + endC) print(cyan + " populationVulnerability() : " + endC + "output_vulnerability : " + str(output_vulnerability) + endC) print(cyan + " populationVulnerability() : " + endC + "id_div : " + str(id_div) + endC) print(cyan + " populationVulnerability() : " + endC + "id_pop : " + str(id_pop) + endC) print(cyan + " populationVulnerability() : " + endC + "id_blt : " + str(id_blt) + endC) print(cyan + " populationVulnerability() : " + endC + "stake_field : " + str(stake_field) + endC) print(cyan + " populationVulnerability() : " + endC + "health_vuln_field_list : " + str(health_vuln_field_list) + endC) print(cyan + " populationVulnerability() : " + endC + "social_vuln_field_list : " + str(social_vuln_field_list) + endC) print(cyan + " populationVulnerability() : " + endC + "height_field : " + str(height_field) + endC) print(cyan + " populationVulnerability() : " + endC + "built_sql_filter : " + str(built_sql_filter) + endC) print(cyan + " populationVulnerability() : " + endC + "epsg : " + str(epsg) + endC) print(cyan + " populationVulnerability() : " + endC + "format_vector : " + str(format_vector) + endC) print(cyan + " populationVulnerability() : " + endC + "postgis_ip_host : " + str(postgis_ip_host) + endC) print(cyan + " populationVulnerability() : " + endC + "postgis_num_port : " + str(postgis_num_port) + endC) print(cyan + " populationVulnerability() : " + endC + "postgis_user_name : " + str(postgis_user_name) + endC) print(cyan + " populationVulnerability() : " + endC + "postgis_password : "******" populationVulnerability() : " + endC + "postgis_database_name : " + str(postgis_database_name) + endC) print(cyan + " populationVulnerability() : " + endC + "postgis_schema_name : " + str(postgis_schema_name) + endC) print(cyan + " populationVulnerability() : " + endC + "postgis_encoding : " + str(postgis_encoding) + endC) print(cyan + " populationVulnerability() : " + endC + "path_time_log : " + str(path_time_log) + endC) print(cyan + " populationVulnerability() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC) print(cyan + " populationVulnerability() : " + endC + "overwrite : " + str(overwrite) + endC + '\n') # Définition des constantes PREFIX_STAND = 'S_' STAKE_IND_FIELD = 'enjeu' HEALTH_VULN_IND_FIELD = 'vuln_san' SOCIAL_VULN_IND_FIELD = 'vuln_soc' GLOBAL_VULN_IND_FIELD = 'vuln_glo' # Mise à jour du log starting_event = "populationVulnerability() : Début du traitement : " timeLine(path_time_log, starting_event) print(cyan + "populationVulnerability() : " + bold + green + "DEBUT DES TRAITEMENTS" + endC + '\n') # Définition des variables tables/champs PostGIS div_table = 'i_division' ftp_table = 'i_footprint' pop_table = 'i_population' blt_table = 'i_built' vuln_table = 'o_vulnerability' clean_pop_table = 't_clean_pop' clean_blt_table = 't_clean_blt' inter_pop_blt_table = 't_inter_pop_blt' inter_popblt_div_table = 't_inter_popblt_div' div_no_pop_table = 't_div_with_no_pop' built_area_field = 'surf_bati' floor_area_field = 'surf_habit' inter_area_field = 'surf_inter' # Définition de variables diverses pop_fields_to_treat = [stake_field ] + health_vuln_field_list + social_vuln_field_list pop_fields_to_keep = [id_pop] + pop_fields_to_treat blt_fields_to_keep = [id_blt] + [height_field] pop_fields_to_treat_str = stake_field for health_vuln_field in health_vuln_field_list: pop_fields_to_treat_str += ', ' + health_vuln_field for social_vuln_field in social_vuln_field_list: pop_fields_to_treat_str += ', ' + social_vuln_field # Nettoyage des traitements précédents if overwrite: if debug >= 3: print(cyan + "populationVulnerability() : " + endC + "Nettoyage des traitements précédents." + endC + '\n') removeVectorFile(output_vulnerability, format_vector=format_vector) dropDatabase(postgis_database_name, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name) else: if os.path.exists(output_vulnerability): print(cyan + "populationVulnerability() : " + bold + yellow + "Le fichier de sortie existe déjà et ne sera pas regénéré." + endC + '\n') raise pass #################################################################### ############# # Etape 1/5 # Préparation de la base de données PostGIS ############# print( cyan + "populationVulnerability() : " + bold + green + "ETAPE 1/5 - Début de la préparation de la base de données PostGIS." + endC + '\n') createDatabase(postgis_database_name, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name) div_table = importVectorByOgr2ogr(postgis_database_name, input_division, div_table, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name, epsg=epsg, codage=postgis_encoding) ftp_table = importVectorByOgr2ogr(postgis_database_name, input_footprint, ftp_table, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name, epsg=epsg, codage=postgis_encoding) pop_table = importVectorByOgr2ogr(postgis_database_name, input_population, pop_table, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name, epsg=epsg, codage=postgis_encoding) blt_table = importVectorByOgr2ogr(postgis_database_name, input_built, blt_table, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name, epsg=epsg, codage=postgis_encoding) connection = openConnection(postgis_database_name, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name) cursor = connection.cursor() query = "CREATE INDEX IF NOT EXISTS %s_geom_gist ON %s USING GIST (geom);\n" % ( div_table, div_table) query += "CREATE INDEX IF NOT EXISTS %s_geom_gist ON %s USING GIST (geom);\n" % ( ftp_table, ftp_table) query += "CREATE INDEX IF NOT EXISTS %s_geom_gist ON %s USING GIST (geom);\n" % ( pop_table, pop_table) query += "CREATE INDEX IF NOT EXISTS %s_geom_gist ON %s USING GIST (geom);\n" % ( blt_table, blt_table) if debug >= 3: print(query) executeQuery(connection, query) print(cyan + "populationVulnerability() : " + bold + green + "ETAPE 1/5 - Fin de la préparation de la base de données PostGIS." + endC + '\n') ############# # Etape 2/5 # Préparation des données ############# print(cyan + "populationVulnerability() : " + bold + green + "ETAPE 2/5 - Début de la préparation des données." + endC + '\n') # Préparation donnée bâti = sélection à la zone d'étude + suppression du bâti non-habitation + MAJ champ hauteur + ajout champ surface habitable select_query = "" for blt_field in blt_fields_to_keep: select_query += "b.%s, " % blt_field query = "DROP TABLE IF EXISTS %s;\n" % clean_blt_table query += "CREATE TABLE %s AS\n" % clean_blt_table query += " SELECT %s, b.geom\n" % select_query[:-2] query += " FROM (\n" query += " SELECT *\n" query += " FROM %s\n" % blt_table query += " WHERE %s\n" % built_sql_filter query += " ) AS b, %s AS f\n" % ftp_table query += " WHERE ST_Intersects(b.geom, f.geom);\n" query += "CREATE INDEX IF NOT EXISTS %s_geom_gist ON %s USING GIST (geom);\n" % ( clean_blt_table, clean_blt_table) query += "UPDATE %s SET %s = 3 WHERE %s < 3;\n" % ( clean_blt_table, height_field, height_field) query += "ALTER TABLE %s ADD COLUMN %s NUMERIC(12,6);\n" % ( clean_blt_table, built_area_field) query += "UPDATE %s SET %s = ST_Area(geom);\n" % (clean_blt_table, built_area_field) query += "ALTER TABLE %s ADD COLUMN %s NUMERIC(12,6);\n" % ( clean_blt_table, floor_area_field) query += "UPDATE %s SET %s = ((%s * %s) / 3);\n" % ( clean_blt_table, floor_area_field, height_field, built_area_field) if debug >= 3: print(query) executeQuery(connection, query) blt_fields_to_keep.append(built_area_field) blt_fields_to_keep.append(floor_area_field) # Préparation donnée population = sélection à la zone d'étude + nettoyage de champs select_query = "" for pop_field in pop_fields_to_keep: select_query += "p.%s, " % pop_field query = "DROP TABLE IF EXISTS %s;\n" % clean_pop_table query += "CREATE TABLE %s AS\n" % clean_pop_table query += " SELECT %s, p.geom\n" % select_query[:-2] query += " FROM %s AS p, %s AS f\n" % (pop_table, ftp_table) query += " WHERE ST_Intersects(p.geom, f.geom);\n" query += "CREATE INDEX IF NOT EXISTS %s_geom_gist ON %s USING GIST (geom);\n" % ( clean_pop_table, clean_pop_table) if debug >= 3: print(query) executeQuery(connection, query) print(cyan + "populationVulnerability() : " + bold + green + "ETAPE 2/5 - Fin de la préparation des données." + endC + '\n') ############# # Etape 3/5 # Intersect de données ############# print(cyan + "populationVulnerability() : " + bold + green + "ETAPE 3/5 - Début de l'intersect de données." + endC + '\n') # Calcul du prorata de population par bâtiment select_query = "" for blt_field in blt_fields_to_keep: select_query += "b.%s, " % blt_field for pop_field in pop_fields_to_keep: select_query += "p.%s, " % pop_field query = "DROP TABLE IF EXISTS %s;\n" % inter_pop_blt_table query += "CREATE TABLE %s AS\n" % inter_pop_blt_table query += " SELECT %s, ST_Intersection(b.geom, p.geom) AS geom\n" % select_query[: -2] query += " FROM %s AS b, %s AS p\n" % (clean_blt_table, clean_pop_table) query += " WHERE ST_Intersects(b.geom, p.geom);\n" query += "CREATE INDEX IF NOT EXISTS %s_geom_gist ON %s USING GIST (geom);\n" % ( inter_pop_blt_table, inter_pop_blt_table) query += "ALTER TABLE %s ADD COLUMN %s NUMERIC(12,6);\n" % ( inter_pop_blt_table, inter_area_field) query += "UPDATE %s SET %s = ST_Area(geom);\n" % (inter_pop_blt_table, inter_area_field) for pop_field in pop_fields_to_treat: query += "UPDATE %s SET %s = (%s * (%s / %s));\n" % ( inter_pop_blt_table, pop_field, pop_field, inter_area_field, built_area_field) if debug >= 3: print(query) executeQuery(connection, query) # Somme de population par polygone division select_query = "SUM(i.%s) AS %s, " % (floor_area_field, floor_area_field) for pop_field in pop_fields_to_treat: select_query += "SUM(i.%s) AS %s, " % (pop_field, pop_field) query = "DROP TABLE IF EXISTS %s;\n" % inter_popblt_div_table query += "CREATE TABLE %s AS\n" % inter_popblt_div_table query += " SELECT d.%s, %s, d.geom\n" % (id_div, select_query[:-2]) query += " FROM %s AS d, %s AS i\n" % (div_table, inter_pop_blt_table) query += " WHERE ST_Intersects(d.geom, i.geom)\n" query += " GROUP BY d.%s, d.geom;\n" % id_div query += "CREATE INDEX IF NOT EXISTS %s_geom_gist ON %s USING GIST (geom);\n" % ( inter_popblt_div_table, inter_popblt_div_table) if debug >= 3: print(query) executeQuery(connection, query) print(cyan + "populationVulnerability() : " + bold + green + "ETAPE 3/5 - Fin de l'intersect de données." + endC + '\n') ############# # Etape 4/5 # Préparation du fichier final ############# print(cyan + "populationVulnerability() : " + bold + green + "ETAPE 4/5 - Début de la préparation du fichier final." + endC + '\n') # Nouvelle table avec les géométries sans données population query = "DROP TABLE IF EXISTS %s;\n" % div_no_pop_table query += "CREATE TABLE %s AS\n" % div_no_pop_table query += " SELECT DISTINCT %s, geom\n" % id_div query += " FROM %s\n" % div_table query += " WHERE %s NOT IN\n" % id_div query += " (SELECT DISTINCT %s\n" % id_div query += " FROM %s);\n" % inter_popblt_div_table query += "CREATE INDEX IF NOT EXISTS %s_geom_gist ON %s USING GIST (geom);\n" % ( div_no_pop_table, div_no_pop_table) query += "ALTER TABLE %s ADD COLUMN %s NUMERIC(12,6) DEFAULT 0;\n" % ( div_no_pop_table, floor_area_field) for pop_field in pop_fields_to_treat: query += "ALTER TABLE %s ADD COLUMN %s NUMERIC(8,6) DEFAULT 0;\n" % ( div_no_pop_table, pop_field) if debug >= 3: print(query) executeQuery(connection, query) # Union de tables pour reconstituer la table finale query = "DROP TABLE IF EXISTS %s;\n" % vuln_table query += "CREATE TABLE %s AS\n" % vuln_table query += " SELECT %s, %s, %s, geom\n" % (id_div, floor_area_field, pop_fields_to_treat_str) query += " FROM %s\n" % inter_popblt_div_table query += " UNION\n" query += " SELECT %s, %s, %s, geom\n" % (id_div, floor_area_field, pop_fields_to_treat_str) query += " FROM %s;\n" % div_no_pop_table query += "CREATE INDEX IF NOT EXISTS %s_geom_gist ON %s USING GIST (geom);\n" % ( vuln_table, vuln_table) if debug >= 3: print(query) executeQuery(connection, query) # Standardisation des données for pop_field in pop_fields_to_treat: pop_stand_field = PREFIX_STAND + pop_field[:8] cursor.execute("SELECT min(%s) FROM %s;" % (pop_field, vuln_table)) min_value = cursor.fetchone() cursor.execute("SELECT max(%s) FROM %s;" % (pop_field, vuln_table)) max_value = cursor.fetchone() pop_stand_calc = "((%s - %s) / (%s - %s))" % ( pop_field, min_value[0], max_value[0], min_value[0]) query = "ALTER TABLE %s ADD COLUMN %s NUMERIC(8,6);\n" % ( vuln_table, pop_stand_field) query += "UPDATE %s SET %s = %s;\n" % (vuln_table, pop_stand_field, pop_stand_calc) if debug >= 3: print(query) executeQuery(connection, query) print(cyan + "populationVulnerability() : " + bold + green + "ETAPE 4/5 - préparation du fichier final." + endC + '\n') ############# # Etape 5/5 # Calcul des indicateurs ############# print(cyan + "populationVulnerability() : " + bold + green + "ETAPE 5/5 - Début du calcul des indicateurs." + endC + '\n') # Calcul indice enjeu stake_stand_field = PREFIX_STAND + stake_field[:8] cursor.execute("SELECT max(%s) FROM %s;" % (stake_stand_field, vuln_table)) max_enjeu = cursor.fetchone() query = "ALTER TABLE %s ADD COLUMN %s NUMERIC(8,6);\n" % (vuln_table, STAKE_IND_FIELD) query += "UPDATE %s SET %s = (%s / %s);\n" % ( vuln_table, STAKE_IND_FIELD, stake_stand_field, max_enjeu[0]) if debug >= 3: print(query) executeQuery(connection, query) # Indice de vulnérabilité sanitaire health_vuln_pop_sum = "" for health_vuln_field in health_vuln_field_list: pop_stand_field = PREFIX_STAND + health_vuln_field[:8] health_vuln_pop_sum += "%s + " % pop_stand_field health_vuln_pop_sum = health_vuln_pop_sum[:-3] cursor.execute("SELECT max(%s) FROM %s;" % (health_vuln_pop_sum, vuln_table)) max_health_vuln_pop_sum = cursor.fetchone() query = "ALTER TABLE %s ADD COLUMN %s NUMERIC(8,6);\n" % ( vuln_table, HEALTH_VULN_IND_FIELD) query += "UPDATE %s SET %s = ((%s) / %s);\n" % ( vuln_table, HEALTH_VULN_IND_FIELD, health_vuln_pop_sum, max_health_vuln_pop_sum[0]) if debug >= 3: print(query) executeQuery(connection, query) # Indice de vulnérabilité sociale social_vuln_pop_sum = "" for social_vuln_field in social_vuln_field_list: pop_stand_field = PREFIX_STAND + social_vuln_field[:8] social_vuln_pop_sum += "%s + " % pop_stand_field social_vuln_pop_sum = social_vuln_pop_sum[:-3] cursor.execute("SELECT max(%s) FROM %s;" % (social_vuln_pop_sum, vuln_table)) max_social_vuln_pop_sum = cursor.fetchone() query = "ALTER TABLE %s ADD COLUMN %s NUMERIC(8,6);\n" % ( vuln_table, SOCIAL_VULN_IND_FIELD) query += "UPDATE %s SET %s = ((%s) / %s);\n" % ( vuln_table, SOCIAL_VULN_IND_FIELD, social_vuln_pop_sum, max_social_vuln_pop_sum[0]) if debug >= 3: print(query) executeQuery(connection, query) # Indice de vulnérabilité globale cursor.execute("SELECT max(%s + %s) FROM %s;" % (HEALTH_VULN_IND_FIELD, SOCIAL_VULN_IND_FIELD, vuln_table)) max_pop_vuln_glo_sum = cursor.fetchone() query = "ALTER TABLE %s ADD COLUMN %s NUMERIC(8,6);\n" % ( vuln_table, GLOBAL_VULN_IND_FIELD) query += "UPDATE %s SET %s = ((%s + %s) / %s);\n" % ( vuln_table, GLOBAL_VULN_IND_FIELD, HEALTH_VULN_IND_FIELD, SOCIAL_VULN_IND_FIELD, max_pop_vuln_glo_sum[0]) if debug >= 3: print(query) executeQuery(connection, query) print(cyan + "populationVulnerability() : " + bold + green + "ETAPE 5/5 - Fin du calcul des indicateurs." + endC + '\n') closeConnection(connection) exportVectorByOgr2ogr(postgis_database_name, output_vulnerability, vuln_table, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name, format_type=format_vector) #################################################################### # Suppression des fichiers temporaires if not save_results_intermediate: if debug >= 3: print(cyan + "populationVulnerability() : " + endC + "Suppression des fichiers temporaires." + endC + '\n') dropDatabase(postgis_database_name, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name) print(cyan + "populationVulnerability() : " + bold + green + "FIN DES TRAITEMENTS" + endC + '\n') # Mise à jour du log ending_event = "populationVulnerability() : Fin du traitement : " timeLine(path_time_log, ending_event) return 0
def traitementsPostGIS(urbanatlas_input, ucz_output, emprise_file, mask_file, enter_with_mask, image_file, mnh_file, built_files_list, hydrography_file, roads_files_list, rpg_file, indicators_method, ucz_method, dbms_choice, threshold_ndvi, threshold_ndvi_water, threshold_ndwi2, threshold_bi_bottom, threshold_bi_top, path_time_log, temp_directory): ############################################################ ### Fin des calculs des indicateurs, à partir de PostGIS ### ############################################################ print(bold + yellow + "Début de l'étape finale de calcul des indicateurs." + endC) step = " Début de l'étape finale de calcul des indicateurs : " timeLine(path_time_log, step) database = os.path.splitext( os.path.basename(ucz_output))[0].lower() # Base de données PostGIS grid_ready_cleaned = temp_directory + os.sep + os.path.splitext( os.path.basename(urbanatlas_input) )[0] + "_cut_cleaned.shp" # Fichier shape maillage built_shape = temp_directory + os.sep + "bati.shp" # Fichier shape bâti grid_table = ucz_method.lower( ) + ".maille" # Table PostGIS pour le maillage built_table = ucz_method.lower() + ".bati" # Table PostGIS pour le bâti grid_codage = "latin1" # Encodage du fichier shape de maillage. Par défaut : latin1. if indicators_method == "Resultats_classif": built_codage = "utf-8" # Encodage du fichier shape de bâti issu du traitement de la classif supervisée. Par défaut : utf-8 built_height = "(mean/100)" # Nom du champ du fichier shape de bâti issu du traitement de la classif supervisée contenant l'information de hauteur. Par défaut : mean ('/100' ajouté pour retrouver une hauteur en mètres dans les calculs) else: built_codage = "latin1" # Encodage du fichier shape de bâti issu de la BD TOPO. Par défaut : latin1 built_height = "hauteur" # Nom du champ du fichier shape de bâti issu de la BD TOPO contenant l'information de hauteur. Par défaut : hauteur ### Mise en place de la base de données PostGIS et fin des calculs des indicateurs ### createDatabase( database, user_name='postgres', password='******', ip_host='localhost', num_port='5432') # Création de la base de données si elle n'existe pas connection = openConnection(database, user_name='postgres', password='******', ip_host='localhost', num_port='5432', schema_name='') createSchema(connection, ucz_method.lower()) # Création du schéma s'il n'existe pas closeConnection(connection) print(bold + cyan + " Import du fichier shape de maillage :" + endC) grid_table = importShape(database, grid_ready_cleaned, grid_table, user_name='postgres', password='******', ip_host='localhost', num_port='5432', schema_name=ucz_method.lower(), epsg='2154', codage=grid_codage) print("\n") print(bold + cyan + " Import du fichier shape du bâti :" + endC) built_table = importShape(database, built_shape, built_table, user_name='postgres', password='******', ip_host='localhost', num_port='5432', schema_name=ucz_method.lower(), epsg='2154', codage=built_codage) print("\n") connection = openConnection(database, user_name='postgres', password='******', ip_host='localhost', num_port='5432', schema_name=ucz_method.lower() ) # Connection à la base de données PostGIS # Calcul du périmètre et de la surface de façades de chaque bâtiment print( bold + cyan + " Calcul du périmètre et de la surface de façades de chaque bâtiment :" + endC) query = """ ALTER TABLE %s.bati DROP IF EXISTS perimeter; ALTER TABLE %s.bati ADD perimeter NUMERIC(10,2); UPDATE %s.bati SET perimeter = st_perimeter(geom); ALTER TABLE %s.bati DROP IF EXISTS surf_fac; ALTER TABLE %s.bati ADD surf_fac NUMERIC(10,2); UPDATE %s.bati SET surf_fac = %s * perimeter; """ % (ucz_method.lower(), ucz_method.lower(), ucz_method.lower(), ucz_method.lower(), ucz_method.lower(), ucz_method.lower(), built_height) print(query + "\n") executeQuery(connection, query) # Calcul de la somme de surface de façades des bâtiments par maille print( bold + cyan + " Calcul de la somme de surface de façades des bâtiments par maille :" + endC) query = """ DROP TABLE IF EXISTS %s.temp0; CREATE TABLE %s.temp0 AS SELECT m.ID AS ID, sum(b.surf_fac) AS surf_fac FROM %s.bati AS b, %s.maille AS m WHERE st_intersects(b.geom, m.geom) GROUP BY m.ID; """ % (ucz_method.lower(), ucz_method.lower(), ucz_method.lower(), ucz_method.lower()) print(query + "\n") executeQuery(connection, query) # Création d'une nouvelle table avec seulement les mailles qui intersectent du bâti print( bold + cyan + " Création d'une nouvelle table avec seulement les mailles qui intersectent du bâti :" + endC) query = """ DROP TABLE IF EXISTS %s.temp; CREATE TABLE %s.temp AS SELECT m.ID AS ID, m.imperm AS SI, m.s_nonbati AS surf_nonba, t.surf_fac AS surf_fac, (m.mean / 100) AS z0, m.geom AS geom FROM %s.maille AS m, %s.temp0 AS t WHERE t.ID = m.ID; """ % (ucz_method.lower(), ucz_method.lower(), ucz_method.lower(), ucz_method.lower()) print(query + "\n") executeQuery(connection, query) # Fin du calcul du rapport d'aspect print(bold + cyan + " Fin du calcul du rapport d'aspect :" + endC) query = """ ALTER TABLE %s.temp DROP IF EXISTS RA; ALTER TABLE %s.temp ADD RA NUMERIC(10,2); UPDATE %s.temp SET RA = (0.5 * (surf_fac / (surf_nonba + 0.0001))); """ % (ucz_method.lower(), ucz_method.lower(), ucz_method.lower()) print(query + "\n") executeQuery(connection, query) # Fin du calcul de la classe de rugosité print(bold + cyan + " Fin du calcul de la classe de rugosité :" + endC) query = """ ALTER TABLE %s.temp DROP IF EXISTS Rug; ALTER TABLE %s.temp ADD Rug INTEGER; UPDATE %s.temp SET Rug = 1 WHERE z0 < 0.005; UPDATE %s.temp SET Rug = 2 WHERE z0 >= 0.005 AND z0 < 0.03; UPDATE %s.temp SET Rug = 3 WHERE z0 >= 0.03 AND z0 < 0.1; UPDATE %s.temp SET Rug = 4 WHERE z0 >= 0.1 AND z0 < 0.25; UPDATE %s.temp SET Rug = 5 WHERE z0 >= 0.25 AND z0 < 0.5; UPDATE %s.temp SET Rug = 6 WHERE z0 >= 0.5 AND z0 < 1; UPDATE %s.temp SET Rug = 7 WHERE z0 >= 1 AND z0 < 2; UPDATE %s.temp SET Rug = 8 WHERE z0 >= 2; """ % (ucz_method.lower(), ucz_method.lower(), ucz_method.lower(), ucz_method.lower(), ucz_method.lower(), ucz_method.lower(), ucz_method.lower(), ucz_method.lower(), ucz_method.lower(), ucz_method.lower()) print(query + "\n") executeQuery(connection, query) # Création d'une nouvelle table avec les mailles qui n'intersectent pas de bâtiment, complémentaire de la précédente print( bold + cyan + " Création d'une nouvelle table avec les mailles qui n'intersectent pas de bâtiment, complémentaire de la précédente :" + endC) query = """ DROP TABLE IF EXISTS %s.temp_bis; CREATE TABLE %s.temp_bis AS SELECT DISTINCT ID, imperm AS SI, geom FROM %s.maille WHERE ID NOT IN (SELECT DISTINCT ID FROM %s.temp); """ % (ucz_method.lower(), ucz_method.lower(), ucz_method.lower(), ucz_method.lower()) print(query + "\n") executeQuery(connection, query) # Ajout + MAJ par défaut des champs 'rapport d'aspect' (à 0) et 'classe de rugosité' (à 1) dans cette nouvelle table print( bold + cyan + " Ajout (et MAJ par défaut) des champs 'rapport d'aspect' (à 0) et 'classe de rugosité' (à 1) dans cette nouvelle table :" + endC) query = """ ALTER TABLE %s.temp_bis DROP IF EXISTS RA; ALTER TABLE %s.temp_bis ADD RA NUMERIC(10,2); UPDATE %s.temp_bis SET RA = 0; ALTER TABLE %s.temp_bis DROP IF EXISTS Rug; ALTER TABLE %s.temp_bis ADD Rug INTEGER; UPDATE %s.temp_bis SET Rug = 1; """ % (ucz_method.lower(), ucz_method.lower(), ucz_method.lower(), ucz_method.lower(), ucz_method.lower(), ucz_method.lower()) print(query + "\n") executeQuery(connection, query) closeConnection( connection) # Fermeture de la connexion à la base de données PostGIS step = " Fin de l'étape finale de calcul des indicateurs : " timeLine(path_time_log, step) print(bold + yellow + "Fin de l'étape finale de calcul des indicateurs." + endC) print("\n") ####################################################################################### ### Étape finale de cartographie en Zones Climatiques Urbaines, à partir de PostGIS ### ####################################################################################### print( bold + yellow + "Début de l'étape finale de cartographie en Zones Climatiques Urbaines." + endC) step = " Début de l'étape finale de cartographie en Zones Climatiques Urbaines : " timeLine(path_time_log, step) ucz_table = ucz_method.lower() + ".ucz" # Table PostGIS ### Tous les indicateurs sont calculés dans la base de données : étape finale d'attribution d'une classe d'UCZ à chaque polygone du maillage ### connection = openConnection(database, user_name='postgres', password='******', ip_host='localhost', num_port='5432', schema_name=ucz_method.lower() ) # Connection à la base de données PostGIS # Création de la table finale où seront attribuées les classes d'UCZ print( bold + cyan + " Création de la table finale où seront attribuées les classes d'UCZ :" + endC) query = """ DROP TABLE IF EXISTS %s.ucz; CREATE TABLE %s.ucz AS SELECT ID, SI, RA, Rug, geom FROM %s.temp UNION SELECT ID, SI, RA, Rug, geom FROM %s.temp_bis; ALTER TABLE %s.ucz DROP IF EXISTS UCZ; ALTER TABLE %s.ucz ADD UCZ INTEGER; ALTER TABLE %s.ucz ALTER COLUMN ID TYPE INTEGER; """ % (ucz_method.lower(), ucz_method.lower(), ucz_method.lower(), ucz_method.lower(), ucz_method.lower(), ucz_method.lower(), ucz_method.lower()) print(query + "\n") executeQuery(connection, query) # Exécution des requêtes SQL d'attribution des classes d'UCZ print(bold + cyan + " Exécution des requêtes SQL d'attribution des classes d'UCZ :" + endC) query_ucz = choixSeuilsUCZ(ucz_method, 'PostGIS') print(query_ucz + "\n") executeQuery(connection, query_ucz) closeConnection( connection) # Fermeture de la connexion à la base de données PostGIS # Export de la table UCZ en fichier shape print(bold + cyan + " Export de la table 'UCZ' en fichier shape '%s' :" % (ucz_output) + endC) exportShape(database, ucz_output, ucz_table, user_name='postgres', password='******', ip_host='localhost', num_port='5432', schema_name=ucz_method.lower()) step = " Fin de l'étape finale de cartographie en Zones Climatiques Urbaines : " timeLine(path_time_log, step) print( bold + yellow + "Fin de l'étape finale de cartographie en Zones Climatiques Urbaines." + endC) print("\n") return
def terrainRoughnessClass(grid_input, grid_output, built_input, distance_lines, epsg, project_encoding, server_postgis, port_number, user_postgis, password_postgis, database_postgis, schema_postgis, path_time_log, format_vector='ESRI Shapefile', save_results_intermediate=False, overwrite=True): print(bold + yellow + "Début du calcul de l'indicateur Terrain Roughness Class." + endC + "\n") timeLine(path_time_log, "Début du calcul de l'indicateur Terrain Roughness Class : ") if debug >= 3: print(bold + green + "terrainRoughnessClass() : Variables dans la fonction" + endC) print(cyan + "terrainRoughnessClass() : " + endC + "grid_input : " + str(grid_input) + endC) print(cyan + "terrainRoughnessClass() : " + endC + "grid_output : " + str(grid_output) + endC) print(cyan + "terrainRoughnessClass() : " + endC + "built_input : " + str(built_input) + endC) print(cyan + "terrainRoughnessClass() : " + endC + "distance_lines : " + str(distance_lines) + endC) print(cyan + "terrainRoughnessClass() : " + endC + "epsg : " + str(epsg) + endC) print(cyan + "terrainRoughnessClass() : " + endC + "project_encoding : " + str(project_encoding) + endC) print(cyan + "terrainRoughnessClass() : " + endC + "server_postgis : " + str(server_postgis) + endC) print(cyan + "terrainRoughnessClass() : " + endC + "port_number : " + str(port_number) + endC) print(cyan + "terrainRoughnessClass() : " + endC + "user_postgis : " + str(user_postgis) + endC) print(cyan + "terrainRoughnessClass() : " + endC + "password_postgis : " + str(password_postgis) + endC) print(cyan + "terrainRoughnessClass() : " + endC + "database_postgis : " + str(database_postgis) + endC) print(cyan + "terrainRoughnessClass() : " + endC + "schema_postgis : " + str(schema_postgis) + endC) print(cyan + "terrainRoughnessClass() : " + endC + "path_time_log : " + str(path_time_log) + endC) print(cyan + "terrainRoughnessClass() : " + endC + "format_vector : " + str(format_vector) + endC) print(cyan + "terrainRoughnessClass() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC) print(cyan + "terrainRoughnessClass() : " + endC + "overwrite : " + str(overwrite) + endC) print("\n") if not os.path.exists(grid_output) or overwrite: ############################################ ### Préparation générale des traitements ### ############################################ print(bold + cyan + "Préparation au calcul de Terrain Roughness Class :" + endC) timeLine(path_time_log, " Préparation au calcul de Terrain Roughness Class : ") if os.path.exists(grid_output): removeVectorFile(grid_output) # Création de la base de données PostGIS # ~ dropDatabase(database_postgis, user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=port_number, schema_name=schema_postgis) # Conflits avec autres indicateurs (Aspect Ratio / Height of Roughness Elements) createDatabase(database_postgis, user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=port_number, schema_name=schema_postgis) # Import des fichiers shapes maille et bati dans la base de données PostGIS table_name_maille = importVectorByOgr2ogr(database_postgis, grid_input, 'trc_maille', user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=str(port_number), schema_name=schema_postgis, epsg=str(epsg), codage=project_encoding) table_name_bati = importVectorByOgr2ogr(database_postgis, built_input, 'trc_bati', user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=str(port_number), schema_name=schema_postgis, epsg=str(epsg), codage=project_encoding) # Récupération de l'emprise de la zone d'étude, définie par le fichier maillage d'entrée xmin,xmax,ymin,ymax = getEmpriseFile(grid_input, format_vector) if debug >= 1: print(bold + "Emprise du fichier '%s' :" % (grid_input) + endC) print(" xmin = " + str(xmin)) print(" xmax = " + str(xmax)) print(" ymin = " + str(ymin)) print(" ymax = " + str(ymax)) # Création de la liste des valeurs de x à entrer dans la requêtes SQL de création de lignes x_list = [xmin] # Initialisation de la liste x = xmin # Définition de la valeur du 1er x à entrer dans la boucle while x < (xmax - distance_lines): # On boucle tant que la valeur de x ne dépasse pas le xmax du fichier maillage en entrée x = x + distance_lines x_list.append(x) # Ajout de la nouvelle valeur de x dans la liste if debug >= 2: print(bold + "x_list : " + endC + str(x_list) + "\n") # Création de la liste des valeurs de y à entrer dans la requêtes SQL de création de lignes y_list = [ymax] # Initialisation de la liste y = ymax # Définition de la valeur du 1er y à entrer dans la boucle while y > (ymin + distance_lines): # On boucle tant que la valeur de y ne descend pas en-dessous du ymin du fichier maillage en entrée y = y - distance_lines y_list.append(y) # Ajout de la nouvelle valeur de y dans la liste if debug >= 2: print(bold + "y_list : " + endC + str(y_list) + "\n") ################################################# ### Création des lignes parallèles N-S et W-E ### ################################################# print(bold + cyan + "Création des lignes parallèles N-S et W-E :" + endC) timeLine(path_time_log, " Création des lignes parallèles N-S et W-E : ") connection = openConnection(database_postgis, user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=port_number, schema_name=schema_postgis) # Construction de la requête de création des lignes parallèles N-S query_lines_NS = "DROP TABLE IF EXISTS trc_lines_NS;\n" query_lines_NS += "CREATE TABLE trc_lines_NS (line text, geom geometry);\n" query_lines_NS += "INSERT INTO trc_lines_NS VALUES\n" # Boucle sur les valeurs de x dans la liste associée, pour construire la requête de création des lignes parallèles N-S count_NS = 0 for x in x_list: count_NS += 1 query_lines_NS += " ('line_NS_%s', 'LINESTRING(%s %s, %s %s)'),\n" % (count_NS, x, ymax, x, ymin) # Fin de la requête de création des lignes parallèles N-S et exécution de cette requête query_lines_NS = query_lines_NS[:-2] + ";\n" # Transformer la virgule de la dernière ligne SQL en point-virgule (pour terminer la requête) query_lines_NS += "ALTER TABLE trc_lines_NS ALTER COLUMN geom TYPE geometry(LINESTRING,%s) USING ST_SetSRID(geom,%s);\n" % (epsg,epsg) # Mise à jour du système de coordonnées if debug >= 1: print(query_lines_NS) executeQuery(connection, query_lines_NS) # Construction de la requête de création des lignes parallèles W-E query_lines_WE = "DROP TABLE IF EXISTS trc_lines_WE;\n" query_lines_WE += "CREATE TABLE trc_lines_WE (line text, geom geometry);\n" query_lines_WE += "INSERT INTO trc_lines_WE VALUES\n" # Boucle sur les valeurs de y dans la liste associée, pour construire la requête de création des lignes parallèles W-E count_WE = 0 for y in y_list: count_WE += 1 query_lines_WE += " ('line_WE_%s', 'LINESTRING(%s %s, %s %s)'),\n" % (count_WE, xmin, y, xmax, y) # Fin de la requête de création des lignes parallèles W-E et exécution de cette requête query_lines_WE = query_lines_WE[:-2] + ";\n" # Transformer la virgule de la dernière ligne SQL en point-virgule (pour terminer la requête) query_lines_WE += "ALTER TABLE trc_lines_WE ALTER COLUMN geom TYPE geometry(LINESTRING,%s) USING ST_SetSRID(geom,%s);\n" % (epsg,epsg) # Mise à jour du système de coordonnées if debug >= 1: print(query_lines_WE) executeQuery(connection, query_lines_WE) ##################################################################################################### ### Découpage des bâtiments, et des lignes N-S et W-E à cheval sur plusieurs mailles (intersects) ### ##################################################################################################### print(bold + cyan + "Lancement des requêtes d'intersect :" + endC) timeLine(path_time_log, " Lancement des requêtes d'intersect : ") query_intersect = """ --CREATE INDEX IF NOT EXISTS maille_geom_gist ON %s USING GIST (geom); --CREATE INDEX IF NOT EXISTS bati_geom_gist ON %s USING GIST (geom); CREATE INDEX IF NOT EXISTS lines_NS_geom_gist ON trc_lines_NS USING GIST (geom); CREATE INDEX IF NOT EXISTS lines_WE_geom_gist ON trc_lines_WE USING GIST (geom); DROP TABLE IF EXISTS trc_decoup; CREATE TABLE trc_decoup AS SELECT b.ID as ID, b.HAUTEUR as hauteur, ST_Intersection(b.geom, m.geom) as geom FROM %s as b, %s as m WHERE ST_Intersects(b.geom, m.geom); CREATE INDEX IF NOT EXISTS decoup_geom_gist ON trc_decoup USING GIST (geom); DROP TABLE IF EXISTS trc_decoupNS; CREATE TABLE trc_decoupNS AS SELECT m.ID as ID, l.line as line, ST_Intersection(l.geom, m.geom) as geom FROM trc_lines_NS as l, %s as m WHERE ST_Intersects(l.geom, m.geom); CREATE INDEX IF NOT EXISTS decoupNS_geom_gist ON trc_decoupNS USING GIST (geom); DROP TABLE IF EXISTS trc_decoupWE; CREATE TABLE trc_decoupWE AS SELECT m.ID as ID, l.line as line, ST_Intersection(l.geom, m.geom) as geom FROM trc_lines_WE as l, %s as m WHERE ST_Intersects(l.geom, m.geom); CREATE INDEX IF NOT EXISTS decoupWE_geom_gist ON trc_decoupWE USING GIST (geom); """ % (table_name_maille, table_name_bati, table_name_bati, table_name_maille, table_name_maille, table_name_maille) if debug >= 1: print(query_intersect) executeQuery(connection, query_intersect) ####################################################################################################################################################### ### Calculs pour l'obtention des sous-indicateurs liés à l'intersect des bâtiments dans chaque maille : h, plan area ratio, hauteur de déplacement ### ####################################################################################################################################################### print(bold + cyan + "Calcul des indicateurs secondaires liés aux bâtiments intersectant chaque maille :" + endC) timeLine(path_time_log, " Calcul des indicateurs secondaires liés aux bâtiments intersectant chaque maille : ") query_bati = """ DROP TABLE IF EXISTS trc_temp_a; CREATE TABLE trc_temp_a AS SELECT ID, hauteur, st_area(geom) as surface, geom FROM trc_decoup; ALTER TABLE trc_temp_a ADD volume DOUBLE PRECISION; UPDATE trc_temp_a SET volume = (surface * hauteur); ALTER TABLE trc_temp_a ADD VxH DOUBLE PRECISION; UPDATE trc_temp_a SET VxH = (volume * hauteur); CREATE INDEX IF NOT EXISTS temp_a_geom_gist ON trc_temp_a USING GIST (geom); DROP TABLE IF EXISTS trc_temp_b; CREATE TABLE trc_temp_b AS SELECT m.ID as ID, (sum(a.VxH) / sum(a.volume)) as h, (sum(a.surface) / st_area(m.geom)) as PAR, m.geom as geom FROM %s as m, trc_temp_a as a WHERE ST_Intersects(m.geom, a.geom) GROUP BY m.ID, m.geom; ALTER TABLE trc_temp_b ADD zd DOUBLE PRECISION; UPDATE trc_temp_b SET zd = (h * (PAR ^ 0.6)); CREATE INDEX IF NOT EXISTS temp_b_geom_gist ON trc_temp_b USING GIST (geom); DROP TABLE IF EXISTS trc_temp_c; CREATE TABLE trc_temp_c AS SELECT DISTINCT ID, geom FROM %s WHERE ID NOT IN (SELECT DISTINCT ID FROM trc_temp_b); ALTER TABLE trc_temp_c ADD h DOUBLE PRECISION; UPDATE trc_temp_c SET h = 0; ALTER TABLE trc_temp_c ADD PAR DOUBLE PRECISION; UPDATE trc_temp_c SET PAR = 0; ALTER TABLE trc_temp_c ADD zd DOUBLE PRECISION; UPDATE trc_temp_c SET zd = 0; CREATE INDEX IF NOT EXISTS temp_c_geom_gist ON trc_temp_c USING GIST (geom); DROP TABLE IF EXISTS trc_temp; CREATE TABLE trc_temp AS SELECT ID, h, PAR, zd, geom FROM trc_temp_b UNION SELECT ID, h, PAR, zd, geom FROM trc_temp_c; ALTER TABLE trc_temp ALTER COLUMN ID TYPE INTEGER; CREATE INDEX IF NOT EXISTS temp_geom_gist ON trc_temp USING GIST (geom); """ % (table_name_maille, table_name_maille) if debug >= 1: print(query_bati) executeQuery(connection, query_bati) ##################################################################################################################################################### ### Calculs pour l'obtention des sous-indicateurs liés à l'intersect des bâtiments avec chaque ligne N-S, dans chaque maille : frontal area ratio ### ##################################################################################################################################################### print(bold + cyan + "Calcul des indicateurs secondaires liés aux bâtiments intersectant chaque ligne N-S, intersectant chaque maille :" + endC) timeLine(path_time_log, " Calcul des indicateurs secondaires liés aux bâtiments intersectant chaque ligne N-S, intersectant chaque maille : ") query_NS = """ DROP TABLE IF EXISTS trc_tempNS_a; CREATE TABLE trc_tempNS_a AS SELECT ns.ID as ID, ns.line as line, max(d.hauteur) as max_haut, (%s * max(d.hauteur)) as FA, ns.geom as geom FROM trc_decoup as d, trc_decoupNS as ns WHERE ST_Intersects(d.geom, ns.geom) GROUP BY ns.ID, ns.line, ns.geom; CREATE INDEX IF NOT EXISTS tempNS_a_geom_gist ON trc_tempNS_a USING GIST (geom); DROP TABLE IF EXISTS trc_tempNS_b; CREATE TABLE trc_tempNS_b AS SELECT m.ID as ID, (sum(a.FA) / st_area(m.geom)) as FAR, m.geom as geom FROM trc_tempNS_a as a, %s as m WHERE ST_Intersects(a.geom, m.geom) GROUP BY m.ID, m.geom; CREATE INDEX IF NOT EXISTS tempNS_b_geom_gist ON trc_tempNS_b USING GIST (geom); DROP TABLE IF EXISTS trc_tempNS_c; CREATE TABLE trc_tempNS_c AS SELECT DISTINCT ID, geom FROM %s WHERE ID NOT IN (SELECT DISTINCT ID FROM trc_tempNS_b); ALTER TABLE trc_tempNS_c ADD FAR DOUBLE PRECISION; UPDATE trc_tempNS_c SET FAR = 0; CREATE INDEX IF NOT EXISTS tempNS_c_geom_gist ON trc_tempNS_c USING GIST (geom); DROP TABLE IF EXISTS trc_tempNS; CREATE TABLE trc_tempNS AS SELECT ID, FAR, geom FROM trc_tempNS_b UNION SELECT ID, FAR, geom FROM trc_tempNS_c; ALTER TABLE trc_tempNS ALTER COLUMN ID TYPE INTEGER; CREATE INDEX IF NOT EXISTS tempNS_geom_gist ON trc_tempNS USING GIST (geom); """ % (distance_lines, table_name_maille, table_name_maille) if debug >= 1: print(query_NS) executeQuery(connection, query_NS) ##################################################################################################################################################### ### Calculs pour l'obtention des sous-indicateurs liés à l'intersect des bâtiments avec chaque ligne W-E, dans chaque maille : frontal area ratio ### ##################################################################################################################################################### print(bold + cyan + "Calcul des indicateurs secondaires liés aux bâtiments intersectant chaque ligne W-E, intersectant chaque maille :" + endC) timeLine(path_time_log, " Calcul des indicateurs secondaires liés aux bâtiments intersectant chaque ligne W-E, intersectant chaque maille : ") query_WE = """ DROP TABLE IF EXISTS trc_tempWE_a; CREATE TABLE trc_tempWE_a AS SELECT we.ID as ID, we.line as line, max(d.hauteur) as max_haut, (%s * max(d.hauteur)) as FA, we.geom as geom FROM trc_decoup as d, trc_decoupWE as we WHERE ST_Intersects(d.geom, we.geom) GROUP BY we.ID, we.line, we.geom; CREATE INDEX IF NOT EXISTS tempWE_a_geom_gist ON trc_tempWE_a USING GIST (geom); DROP TABLE IF EXISTS trc_tempWE_b; CREATE TABLE trc_tempWE_b AS SELECT m.ID as ID, (sum(a.FA) / st_area(m.geom)) as FAR, m.geom as geom FROM trc_tempWE_a as a, %s as m WHERE ST_Intersects(a.geom, m.geom) GROUP BY m.ID, m.geom; CREATE INDEX IF NOT EXISTS tempWE_b_geom_gist ON trc_tempWE_b USING GIST (geom); DROP TABLE IF EXISTS trc_tempWE_c; CREATE TABLE trc_tempWE_c AS SELECT DISTINCT ID, geom FROM %s WHERE ID NOT IN (SELECT DISTINCT ID FROM trc_tempWE_b); ALTER TABLE trc_tempWE_c ADD FAR DOUBLE PRECISION; UPDATE trc_tempWE_c SET FAR = 0; CREATE INDEX IF NOT EXISTS tempWE_c_geom_gist ON trc_tempWE_c USING GIST (geom); DROP TABLE IF EXISTS trc_tempWE; CREATE TABLE trc_tempWE AS SELECT ID, FAR, geom FROM trc_tempWE_b UNION SELECT ID, FAR, geom FROM trc_tempWE_c; ALTER TABLE trc_tempWE ALTER COLUMN ID TYPE INTEGER; CREATE INDEX IF NOT EXISTS tempWE_geom_gist ON trc_tempWE USING GIST (geom); """ % (distance_lines, table_name_maille, table_name_maille) if debug >= 1: print(query_WE) executeQuery(connection, query_WE) ######################################################################################################################## ### Calculs finaux pour l'obtention de l'indicateur de classe de rugosité : longueur de rugosité, classe de rugosité ### ######################################################################################################################## print(bold + cyan + "Calculs finaux de l'indicateur de classe de rugosité :" + endC) timeLine(path_time_log, " Calculs finaux de l'indicateur de classe de rugosité : ") query_rugo = """ DROP TABLE IF EXISTS trc_rugo; CREATE TABLE trc_rugo AS SELECT t.ID as ID, t.h as h, t.PAR as PAR, t.zd as zd, ns.FAR as FAR_NS, we.FAR as FAR_WE, t.geom as geom FROM trc_temp as t, trc_tempNS as ns, trc_tempWE as we WHERE t.ID = ns.ID and ns.ID = WE.ID; ALTER TABLE trc_rugo ALTER COLUMN ID TYPE INTEGER; ALTER TABLE trc_rugo ADD COLUMN z0_NS DOUBLE PRECISION; UPDATE trc_rugo SET z0_NS = ((h - zd) * exp(-sqrt(0.4 / FAR_NS))) WHERE FAR_NS > 0; UPDATE trc_rugo SET z0_NS = 0 WHERE FAR_NS = 0; ALTER TABLE trc_rugo ADD COLUMN z0_WE DOUBLE PRECISION; UPDATE trc_rugo SET z0_WE = ((h - zd) * exp(-sqrt(0.4 / FAR_WE))) WHERE FAR_WE > 0; UPDATE trc_rugo SET z0_WE = 0 WHERE FAR_WE = 0; ALTER TABLE trc_rugo ADD COLUMN mean_z0 DOUBLE PRECISION; UPDATE trc_rugo SET mean_z0 = (z0_NS + z0_WE) / 2; ALTER TABLE trc_rugo ADD COLUMN cl_rugo integer; UPDATE trc_rugo SET cl_rugo = 1 WHERE mean_z0 < 0.0025; UPDATE trc_rugo SET cl_rugo = 2 WHERE mean_z0 >= 0.0025 and mean_z0 < 0.0175; UPDATE trc_rugo SET cl_rugo = 3 WHERE mean_z0 >= 0.0175 and mean_z0 < 0.065; UPDATE trc_rugo SET cl_rugo = 4 WHERE mean_z0 >= 0.065 and mean_z0 < 0.175; UPDATE trc_rugo SET cl_rugo = 5 WHERE mean_z0 >= 0.175 and mean_z0 < 0.375; UPDATE trc_rugo SET cl_rugo = 6 WHERE mean_z0 >= 0.375 and mean_z0 < 0.75; UPDATE trc_rugo SET cl_rugo = 7 WHERE mean_z0 >= 0.75 and mean_z0 < 1.5; UPDATE trc_rugo SET cl_rugo = 8 WHERE mean_z0 >= 1.5; """ if debug >= 1: print(query_rugo) executeQuery(connection, query_rugo) closeConnection(connection) exportVectorByOgr2ogr(database_postgis, grid_output, 'trc_rugo', user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=str(port_number), schema_name=schema_postgis, format_type=format_vector) ########################################## ### Nettoyage des fichiers temporaires ### ########################################## if not save_results_intermediate: # ~ dropDatabase(database_postgis, user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=port_number, schema_name=schema_postgis) # Conflits avec autres indicateurs (Aspect Ratio / Height of Roughness Elements) pass else: print(bold + magenta + "Le calcul de Terrain Roughness Class a déjà eu lieu." + endC) print(bold + yellow + "Fin du calcul de l'indicateur Terrain Roughness Class." + endC + "\n") timeLine(path_time_log, "Fin du calcul de l'indicateur Terrain Roughness Class : ") return